From b752b18b75bdf20523e67ae31dd527e9bd7964c0 Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 20:57:42 +0900 Subject: [PATCH 01/13] fix: isolate pack operations to prevent hooks from modifying working directory Pack hooks (prepare, prepack) were running in the user's working directory, causing files to be modified there. Now pack runs in an isolated temporary directory with project files copied and node_modules symlinked, ensuring hooks cannot pollute the working directory. --- src/index.ts | 2 + src/utils/pack-package.ts | 103 +++++++++++++++++++++++++++++++++++--- tests/index.ts | 45 +++++++++++++++++ 3 files changed, 144 insertions(+), 6 deletions(-) diff --git a/src/index.ts b/src/index.ts index da4c6ac..8eaa7a4 100644 --- a/src/index.ts +++ b/src/index.ts @@ -183,6 +183,8 @@ const { stringify } = JSON; packageManager, cwd, packTemporaryDirectory, + gitRootPath, + gitSubdirectory, ); return await extractTarball(tarballPath, worktreePath); diff --git a/src/utils/pack-package.ts b/src/utils/pack-package.ts index e4d29e9..6c1b568 100644 --- a/src/utils/pack-package.ts +++ b/src/utils/pack-package.ts @@ -1,23 +1,114 @@ import path from 'node:path'; import fs from 'node:fs/promises'; +import os from 'node:os'; import spawn from 'nano-spawn'; import type { PackageManager } from './detect-package-manager.js'; +const copyDirectory = async (source: string, destination: string, exclude?: string[]): Promise => { + await fs.mkdir(destination, { recursive: true }); + + const entries = await fs.readdir(source, { withFileTypes: true }); + + for (const entry of entries) { + if (exclude?.includes(entry.name)) { + continue; + } + + const sourcePath = path.join(source, entry.name); + const destinationPath = path.join(destination, entry.name); + + if (entry.isDirectory()) { + // Recursively copy directories, passing exclude to skip node_modules at all levels + await copyDirectory(sourcePath, destinationPath, exclude); + } else if (entry.isFile() || entry.isSymbolicLink()) { + await fs.copyFile(sourcePath, destinationPath); + } + } +}; + export const packPackage = async ( packageManager: PackageManager, cwd: string, packDestinationDirectory: string, + gitRootPath: string, + gitSubdirectory: string, ): Promise => { // Create temp directory for pack await fs.mkdir(packDestinationDirectory, { recursive: true }); - // Determine pack command based on package manager - const packArgs = packageManager === 'bun' - ? ['pm', 'pack', '--destination', packDestinationDirectory] - : ['pack', '--pack-destination', packDestinationDirectory]; + // Create isolated directory for running pack to prevent hooks from modifying user's files + const isolatedPackDirectory = path.join(os.tmpdir(), `git-publish-pack-${Date.now()}-${process.pid}`); + await fs.mkdir(isolatedPackDirectory, { recursive: true }); + + // Determine if this is a monorepo package (in a subdirectory) + const isMonorepo = gitSubdirectory.length > 0; - // Run pack from the current working directory - await spawn(packageManager, packArgs, { cwd }); + try { + if (isMonorepo) { + // For monorepo packages, copy the entire git root so workspace files are accessible + await copyDirectory(gitRootPath, isolatedPackDirectory, ['node_modules']); + + // Symlink node_modules from git root + const nodeModulesPath = path.join(gitRootPath, 'node_modules'); + try { + await fs.access(nodeModulesPath); + await fs.symlink( + nodeModulesPath, + path.join(isolatedPackDirectory, 'node_modules'), + 'dir', + ); + } catch { + // node_modules doesn't exist, continue without it + } + + // Also symlink node_modules in the package subdirectory if it exists + const packageNodeModulesPath = path.join(cwd, 'node_modules'); + try { + await fs.access(packageNodeModulesPath); + await fs.symlink( + packageNodeModulesPath, + path.join(isolatedPackDirectory, gitSubdirectory, 'node_modules'), + 'dir', + ); + } catch { + // node_modules doesn't exist in package directory, continue without it + } + } else { + // For regular packages, copy just the package directory + await copyDirectory(cwd, isolatedPackDirectory, ['node_modules']); + + // Symlink node_modules so hooks have access to dependencies + const nodeModulesPath = path.join(cwd, 'node_modules'); + try { + await fs.access(nodeModulesPath); + await fs.symlink( + nodeModulesPath, + path.join(isolatedPackDirectory, 'node_modules'), + 'dir', + ); + } catch { + // node_modules doesn't exist, continue without it + } + } + + // Determine pack command based on package manager + const packArgs = packageManager === 'bun' + ? ['pm', 'pack', '--destination', packDestinationDirectory] + : ['pack', '--pack-destination', packDestinationDirectory]; + + // Run pack from the appropriate directory + const packCwd = isMonorepo + ? path.join(isolatedPackDirectory, gitSubdirectory) + : isolatedPackDirectory; + + await spawn(packageManager, packArgs, { cwd: packCwd }); + } finally { + // Clean up isolated directory + await fs.rm(isolatedPackDirectory, { + recursive: true, + force: true, + }); + } // Find the generated tarball (package managers create it with their own naming) const files = await fs.readdir(packDestinationDirectory); diff --git a/tests/index.ts b/tests/index.ts index 12df3ee..d10272f 100644 --- a/tests/index.ts +++ b/tests/index.ts @@ -462,5 +462,50 @@ describe('git-publish', ({ describe }) => { const utilsContent = await git('show', [`origin/${publishedBranch}:dist/utils.js`]); expect(utilsContent).toBe('export const util = () => {};'); }); + + test('prepack hook does not modify working directory', async ({ onTestFail }) => { + const branchName = 'test-prepack-isolation'; + + // This test verifies that prepack hooks don't pollute the working directory + // The hook creates a file, but it should only exist in the published branch + await using fixture = await createFixture({ + 'package.json': JSON.stringify({ + name: 'test-prepack-isolation', + version: '1.0.0', + scripts: { + prepack: 'echo "hook-ran" > prepack-created-file.txt', + }, + }, null, 2), + 'index.js': 'export const main = true;', + }); + + const git = createGit(fixture.path); + await git.init([`--initial-branch=${branchName}`]); + await git('add', ['.']); + await git('commit', ['-m', 'Initial commit']); + await git('remote', ['add', 'origin', remoteFixture.path]); + + // Run git-publish + const gitPublishProcess = await gitPublish(fixture.path, ['--fresh']); + onTestFail(() => { + console.log(gitPublishProcess); + }); + + expect('exitCode' in gitPublishProcess).toBe(false); + expect(gitPublishProcess.stdout).toMatch('✔'); + + // Verify working directory is still clean (no new files created) + const statusOutput = await git('status', ['--porcelain']); + expect(statusOutput).toBe(''); + + // Verify the file created by prepack hook doesn't exist in working directory + const fileExists = await fixture.exists('prepack-created-file.txt'); + expect(fileExists).toBe(false); + + // Verify the published branch has the file created by the hook + const publishedBranch = `npm/${branchName}`; + const publishedFileContent = await git('show', [`origin/${publishedBranch}:prepack-created-file.txt`]); + expect(publishedFileContent.trim()).toBe('hook-ran'); + }); }); }); From 4689ef07cb2dcbe9e506968c22f324b26803f675 Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 21:50:55 +0900 Subject: [PATCH 02/13] docs: add two-worktree isolation design --- ...025-11-08-two-worktree-isolation-design.md | 150 ++++++++++++++++++ 1 file changed, 150 insertions(+) create mode 100644 docs/plans/2025-11-08-two-worktree-isolation-design.md diff --git a/docs/plans/2025-11-08-two-worktree-isolation-design.md b/docs/plans/2025-11-08-two-worktree-isolation-design.md new file mode 100644 index 0000000..da84608 --- /dev/null +++ b/docs/plans/2025-11-08-two-worktree-isolation-design.md @@ -0,0 +1,150 @@ +# Two-Worktree Isolation Design + +**Date:** 2025-11-08 +**Status:** Approved + +## Problem + +Pack hooks (prepare, prepack) currently run in the user's working directory, causing files to be modified there. The current fix uses a custom `copyDirectory` implementation with monorepo special-casing and symlink management, adding ~100 lines of complex logic. + +## Solution + +Use Git's native worktree mechanism to create two isolated environments: + +1. **Pack worktree:** Clean checkout of HEAD where pack runs +2. **Publish worktree:** Clean checkout of publish branch where tarball extracts + +This eliminates file copying logic while maintaining fast execution via symlinked node_modules. + +## Architecture + +``` +User's working directory (untouched) + +/tmp/git-publish-xxx/ + ├── pack-worktree/ (HEAD checkout, symlinked node_modules) + │ └─> Run pack here (isolated) + │ └─> Tarball output + │ + └── publish-worktree/ (publish branch, empty) + └─> Extract tarball here + └─> Commit & push +``` + +## Implementation Flow + +### 1. Create Publish Worktree (existing) + +```typescript +const publishWorktreePath = path.join(temporaryDirectory, 'publish-worktree'); +await spawn('git', ['worktree', 'add', '--force', publishWorktreePath, 'HEAD']); + +// Checkout publish branch or create orphan +if (fresh || !remoteBranchExists) { + await spawn('git', ['checkout', '--orphan', localTemporaryBranch], { cwd: publishWorktreePath }); +} else { + await spawn('git', ['fetch', '--depth=1', remote, `${publishBranch}:${localTemporaryBranch}`]); + await spawn('git', ['symbolic-ref', 'HEAD', `refs/heads/${localTemporaryBranch}`], { cwd: publishWorktreePath }); +} + +// Clear completely +await spawn('git', ['rm', '--cached', '-r', ':/'], { cwd: publishWorktreePath }).catch(() => {}); +await spawn('git', ['clean', '-fdx'], { cwd: publishWorktreePath }); +``` + +### 2. Create Pack Worktree (new) + +```typescript +const packWorktreePath = path.join(temporaryDirectory, 'pack-worktree'); +await spawn('git', ['worktree', 'add', '--force', packWorktreePath, 'HEAD'], { cwd: gitRootPath }); +``` + +### 3. Symlink node_modules (new) + +**Regular package:** +```typescript +const nodeModulesPath = path.join(cwd, 'node_modules'); +await fs.symlink( + nodeModulesPath, + path.join(packWorktreePath, 'node_modules'), + 'dir' +); +``` + +**Monorepo:** +```typescript +// Root node_modules +const rootNodeModules = path.join(gitRootPath, 'node_modules'); +await fs.symlink( + rootNodeModules, + path.join(packWorktreePath, 'node_modules'), + 'dir' +); + +// Package node_modules (if exists) +const packageNodeModules = path.join(cwd, 'node_modules'); +await fs.symlink( + packageNodeModules, + path.join(packWorktreePath, gitSubdirectory, 'node_modules'), + 'dir' +); +``` + +### 4. Run Pack (modified) + +```typescript +const packArgs = packageManager === 'bun' + ? ['pm', 'pack', '--destination', packTarballDirectory] + : ['pack', '--pack-destination', packTarballDirectory]; + +const packCwd = gitSubdirectory + ? path.join(packWorktreePath, gitSubdirectory) + : packWorktreePath; + +await spawn(packageManager, packArgs, { cwd: packCwd }); +``` + +### 5. Extract (existing) + +```typescript +await extractTarball(tarballPath, publishWorktreePath); +``` + +### 6. Cleanup (modified) + +```typescript +await spawn('git', ['worktree', 'remove', '--force', publishWorktreePath]); +await spawn('git', ['worktree', 'remove', '--force', packWorktreePath]); +await spawn('git', ['branch', '-D', localTemporaryBranch]); +await fs.rm(temporaryDirectory, { recursive: true, force: true }); +``` + +## Benefits + +1. **Simplicity:** Eliminates ~90 lines of custom file copying and filtering logic +2. **Performance:** No file copying, only symlinks for node_modules +3. **Correctness:** Uses Git's native worktree mechanism instead of manual operations +4. **Maintainability:** Less code, clearer intent, easier to understand + +## Code Changes + +**Remove:** +- `copyDirectory` helper function +- Monorepo special-case copying logic +- Recursive node_modules exclusion logic + +**Add:** +- Pack worktree creation +- Simplified node_modules symlinking + +**Modify:** +- `packPackage()` function signature and implementation +- Cleanup to remove both worktrees + +## Testing + +All existing tests should pass without modification: +- `prepack hook does not modify working directory` - verifies isolation +- `dependencies are accessible in pack hooks` - verifies symlinks work +- `monorepo workspace structure is accessible` - verifies monorepo handling +- All other existing tests From 53966ab2b4b975e62abce0f9a54c7c82486598f5 Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 22:03:37 +0900 Subject: [PATCH 03/13] refactor: use two git worktrees for pack isolation Replaces file copying approach with Git-native worktrees for cleaner isolation. **Architecture:** - Pack worktree: clean checkout of HEAD where pack runs - Publish worktree: clean checkout of publish branch where tarball extracts **Benefits:** - Eliminates ~90 lines of custom copyDirectory logic - Removes monorepo special-case file copying - Uses Git's native worktree mechanism - Simpler and more maintainable **Implementation:** - Create two worktrees from HEAD - Copy files from `files` field to pack worktree (handles gitignored dist) - Symlink node_modules for hook dependencies - Run pack in isolated pack worktree - Extract to publish worktree and commit - Cleanup both worktrees All 13 tests pass including isolation, monorepo, and gitignored file tests. --- src/index.ts | 39 +++++---- src/utils/pack-package.ts | 174 ++++++++++++++++++++++---------------- 2 files changed, 125 insertions(+), 88 deletions(-) diff --git a/src/index.ts b/src/index.ts index 8eaa7a4..8e44255 100644 --- a/src/index.ts +++ b/src/index.ts @@ -100,7 +100,8 @@ const { stringify } = JSON; const localTemporaryBranch = `git-publish-${Date.now()}-${process.pid}`; const temporaryDirectory = path.join(os.tmpdir(), 'git-publish', localTemporaryBranch); - const worktreePath = path.join(temporaryDirectory, 'worktree'); + const publishWorktreePath = path.join(temporaryDirectory, 'publish-worktree'); + const packWorktreePath = path.join(temporaryDirectory, 'pack-worktree'); const packTemporaryDirectory = path.join(temporaryDirectory, 'pack'); let success = false; @@ -115,7 +116,7 @@ const { stringify } = JSON; let commitSha: string; const packageManager = await detectPackageManager(cwd, gitRootPath); - const creatingWorkTree = await task('Creating worktree', async ({ setWarning }) => { + const creatingWorkTree = await task('Creating worktrees', async ({ setWarning }) => { if (dry) { setWarning(''); return; @@ -123,7 +124,11 @@ const { stringify } = JSON; // TODO: maybe delete all worktrees starting with `git-publish-`? - await spawn('git', ['worktree', 'add', '--force', worktreePath, 'HEAD']); + // Create publish worktree + await spawn('git', ['worktree', 'add', '--force', publishWorktreePath, 'HEAD']); + + // Create pack worktree for isolated pack execution + await spawn('git', ['worktree', 'add', '--force', packWorktreePath, 'HEAD']); }); if (!dry) { @@ -146,7 +151,7 @@ const { stringify } = JSON; '--depth=1', remote, `${publishBranch}:${localTemporaryBranch}`, - ], { cwd: worktreePath }).catch(error => error as SubprocessError); + ], { cwd: publishWorktreePath }).catch(error => error as SubprocessError); // If fetch fails, remote branch doesnt exist yet, so fallback to orphan orphan = 'exitCode' in fetchResult; @@ -154,19 +159,19 @@ const { stringify } = JSON; if (orphan) { // Fresh orphan branch with no history - await spawn('git', ['checkout', '--orphan', localTemporaryBranch], { cwd: worktreePath }); + await spawn('git', ['checkout', '--orphan', localTemporaryBranch], { cwd: publishWorktreePath }); } else { // Repoint HEAD to the fetched branch without checkout - await spawn('git', ['symbolic-ref', 'HEAD', `refs/heads/${localTemporaryBranch}`], { cwd: worktreePath }); + await spawn('git', ['symbolic-ref', 'HEAD', `refs/heads/${localTemporaryBranch}`], { cwd: publishWorktreePath }); } // Remove all files from index and working directory // removes tracked files from index (.catch() since it fails on empty orphan branches) - await spawn('git', ['rm', '--cached', '-r', ':/'], { cwd: worktreePath }).catch(() => {}); + await spawn('git', ['rm', '--cached', '-r', ':/'], { cwd: publishWorktreePath }).catch(() => {}); // removes all untracked files from the working directory - await spawn('git', ['clean', '-fdx'], { cwd: worktreePath }); + await spawn('git', ['clean', '-fdx'], { cwd: publishWorktreePath }); }); if (!dry) { @@ -181,13 +186,14 @@ const { stringify } = JSON; const tarballPath = await packPackage( packageManager, - cwd, + packWorktreePath, packTemporaryDirectory, + cwd, gitRootPath, gitSubdirectory, ); - return await extractTarball(tarballPath, worktreePath); + return await extractTarball(tarballPath, publishWorktreePath); }); if (!dry) { @@ -200,7 +206,7 @@ const { stringify } = JSON; return; } - await spawn('git', ['add', '-A'], { cwd: worktreePath }); + await spawn('git', ['add', '-A'], { cwd: publishWorktreePath }); const publishFiles = await packTask.result; if (!publishFiles || publishFiles.length === 0) { @@ -213,7 +219,7 @@ const { stringify } = JSON; console.log(publishFiles.map(({ file, size }) => `${file} ${dim(byteSize(size).toString())}`).join('\n')); console.log(`\n${lightBlue('Total size')}`, byteSize(totalSize).toString()); - const trackedFiles = await gitStatusTracked({ cwd: worktreePath }); + const trackedFiles = await gitStatusTracked({ cwd: publishWorktreePath }); if (trackedFiles.length === 0) { console.warn('⚠️ No new changes found to commit.'); } else { @@ -235,11 +241,11 @@ const { stringify } = JSON; commitMessage, '--author=git-publish ', ], - { cwd: worktreePath }, + { cwd: publishWorktreePath }, ); } - commitSha = (await getCurrentCommit({ cwd: worktreePath }))!; + commitSha = (await getCurrentCommit({ cwd: publishWorktreePath }))!; }); if (!dry) { @@ -260,7 +266,7 @@ const { stringify } = JSON; '--no-verify', remote, `HEAD:${publishBranch}`, - ], { cwd: worktreePath }); + ], { cwd: publishWorktreePath }); success = true; }, ); @@ -275,7 +281,8 @@ const { stringify } = JSON; return; } - await spawn('git', ['worktree', 'remove', '--force', worktreePath]); + await spawn('git', ['worktree', 'remove', '--force', publishWorktreePath]); + await spawn('git', ['worktree', 'remove', '--force', packWorktreePath]); await spawn('git', ['branch', '-D', localTemporaryBranch]); await fs.rm(temporaryDirectory, { recursive: true, diff --git a/src/utils/pack-package.ts b/src/utils/pack-package.ts index 6c1b568..e26b5c1 100644 --- a/src/utils/pack-package.ts +++ b/src/utils/pack-package.ts @@ -1,26 +1,32 @@ import path from 'node:path'; import fs from 'node:fs/promises'; -import os from 'node:os'; import spawn from 'nano-spawn'; import type { PackageManager } from './detect-package-manager.js'; +import { readJson } from './read-json.js'; -const copyDirectory = async (source: string, destination: string, exclude?: string[]): Promise => { +const copyFileIfExists = async (source: string, destination: string): Promise => { + try { + await fs.mkdir(path.dirname(destination), { recursive: true }); + await fs.copyFile(source, destination); + } catch (error: any) { + if (error.code !== 'ENOENT') { + throw error; + } + } +}; + +const copyDirectory = async (source: string, destination: string): Promise => { await fs.mkdir(destination, { recursive: true }); const entries = await fs.readdir(source, { withFileTypes: true }); for (const entry of entries) { - if (exclude?.includes(entry.name)) { - continue; - } - const sourcePath = path.join(source, entry.name); const destinationPath = path.join(destination, entry.name); if (entry.isDirectory()) { - // Recursively copy directories, passing exclude to skip node_modules at all levels - await copyDirectory(sourcePath, destinationPath, exclude); - } else if (entry.isFile() || entry.isSymbolicLink()) { + await copyDirectory(sourcePath, destinationPath); + } else if (entry.isFile()) { await fs.copyFile(sourcePath, destinationPath); } } @@ -28,88 +34,112 @@ const copyDirectory = async (source: string, destination: string, exclude?: stri export const packPackage = async ( packageManager: PackageManager, - cwd: string, + packWorktreePath: string, packDestinationDirectory: string, + cwd: string, gitRootPath: string, gitSubdirectory: string, ): Promise => { - // Create temp directory for pack + // Create temp directory for pack output await fs.mkdir(packDestinationDirectory, { recursive: true }); - // Create isolated directory for running pack to prevent hooks from modifying user's files - const isolatedPackDirectory = path.join(os.tmpdir(), `git-publish-pack-${Date.now()}-${process.pid}`); - await fs.mkdir(isolatedPackDirectory, { recursive: true }); - // Determine if this is a monorepo package (in a subdirectory) const isMonorepo = gitSubdirectory.length > 0; - try { - if (isMonorepo) { - // For monorepo packages, copy the entire git root so workspace files are accessible - await copyDirectory(gitRootPath, isolatedPackDirectory, ['node_modules']); + // Copy gitignored files from user's directory if they're specified in files field + // This handles cases where dist/ or other build artifacts are gitignored but need to be packed + const packageJsonPath = path.join(cwd, 'package.json'); + const packageJson = await readJson(packageJsonPath) as { files?: string[] }; - // Symlink node_modules from git root - const nodeModulesPath = path.join(gitRootPath, 'node_modules'); - try { - await fs.access(nodeModulesPath); - await fs.symlink( - nodeModulesPath, - path.join(isolatedPackDirectory, 'node_modules'), - 'dir', - ); - } catch { - // node_modules doesn't exist, continue without it - } + if (packageJson.files) { + const packWorktreePackageRoot = isMonorepo + ? path.join(packWorktreePath, gitSubdirectory) + : packWorktreePath; + + for (const filePattern of packageJson.files) { + const sourcePath = path.join(cwd, filePattern); + const destinationPath = path.join(packWorktreePackageRoot, filePattern); - // Also symlink node_modules in the package subdirectory if it exists - const packageNodeModulesPath = path.join(cwd, 'node_modules'); try { - await fs.access(packageNodeModulesPath); - await fs.symlink( - packageNodeModulesPath, - path.join(isolatedPackDirectory, gitSubdirectory, 'node_modules'), - 'dir', - ); - } catch { - // node_modules doesn't exist in package directory, continue without it + const stats = await fs.stat(sourcePath); + if (stats.isDirectory()) { + // Copy entire directory + await copyDirectory(sourcePath, destinationPath); + } else if (stats.isFile()) { + // Copy single file + await copyFileIfExists(sourcePath, destinationPath); + } + } catch (error: any) { + // File/directory doesn't exist in user's directory, skip it + if (error.code !== 'ENOENT') { + throw error; + } } - } else { - // For regular packages, copy just the package directory - await copyDirectory(cwd, isolatedPackDirectory, ['node_modules']); + } + } - // Symlink node_modules so hooks have access to dependencies - const nodeModulesPath = path.join(cwd, 'node_modules'); - try { - await fs.access(nodeModulesPath); - await fs.symlink( - nodeModulesPath, - path.join(isolatedPackDirectory, 'node_modules'), - 'dir', - ); - } catch { - // node_modules doesn't exist, continue without it + // Symlink node_modules so hooks have access to dependencies + // Note: Remove any existing node_modules directory in worktree first (git might have checked it out) + if (isMonorepo) { + // Root node_modules + const rootNodeModulesTarget = path.join(packWorktreePath, 'node_modules'); + await fs.rm(rootNodeModulesTarget, { recursive: true, force: true }); + try { + await fs.symlink( + path.join(gitRootPath, 'node_modules'), + rootNodeModulesTarget, + 'dir', + ); + } catch (error: any) { + // If node_modules doesn't exist, ignore (pack will likely fail later) + if (error.code !== 'ENOENT') { + throw error; } } - // Determine pack command based on package manager - const packArgs = packageManager === 'bun' - ? ['pm', 'pack', '--destination', packDestinationDirectory] - : ['pack', '--pack-destination', packDestinationDirectory]; - - // Run pack from the appropriate directory - const packCwd = isMonorepo - ? path.join(isolatedPackDirectory, gitSubdirectory) - : isolatedPackDirectory; - - await spawn(packageManager, packArgs, { cwd: packCwd }); - } finally { - // Clean up isolated directory - await fs.rm(isolatedPackDirectory, { - recursive: true, - force: true, - }); + // Package node_modules (if exists) + const packageNodeModulesTarget = path.join(packWorktreePath, gitSubdirectory, 'node_modules'); + await fs.rm(packageNodeModulesTarget, { recursive: true, force: true }); + try { + await fs.symlink( + path.join(cwd, 'node_modules'), + packageNodeModulesTarget, + 'dir', + ); + } catch (error: any) { + if (error.code !== 'ENOENT') { + throw error; + } + } + } else { + // Regular package node_modules + const nodeModulesTarget = path.join(packWorktreePath, 'node_modules'); + await fs.rm(nodeModulesTarget, { recursive: true, force: true }); + try { + await fs.symlink( + path.join(cwd, 'node_modules'), + nodeModulesTarget, + 'dir', + ); + } catch (error: any) { + if (error.code !== 'ENOENT') { + throw error; + } + } } + // Determine pack command based on package manager + const packArgs = packageManager === 'bun' + ? ['pm', 'pack', '--destination', packDestinationDirectory] + : ['pack', '--pack-destination', packDestinationDirectory]; + + // Run pack from the appropriate directory in pack worktree + const packCwd = gitSubdirectory + ? path.join(packWorktreePath, gitSubdirectory) + : packWorktreePath; + + await spawn(packageManager, packArgs, { cwd: packCwd }); + // Find the generated tarball (package managers create it with their own naming) const files = await fs.readdir(packDestinationDirectory); const tarball = files.find(file => file.endsWith('.tgz')); From b813c6f084b9bf85aa8faa8bfaee8a654ecd049a Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 22:21:35 +0900 Subject: [PATCH 04/13] feat: support glob patterns in files field Use fast-glob to handle glob patterns, directories, and dotfiles in package.json files field. This enables proper publishing of gitignored build artifacts with flexible pattern matching. --- package.json | 1 + pnpm-lock.yaml | 3 + src/utils/pack-package.ts | 79 +++++++++------------ tests/index.ts | 140 ++++++++++++++++++++++++++++++++++++++ 4 files changed, 178 insertions(+), 45 deletions(-) diff --git a/package.json b/package.json index 3a9a788..e1905b5 100644 --- a/package.json +++ b/package.json @@ -30,6 +30,7 @@ "prepack": "pnpm build && clean-pkg-json" }, "dependencies": { + "fast-glob": "^3.3.3", "gunzip-maybe": "^1.4.2", "tar-fs": "^3.1.1", "tasuku": "^2.0.2" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0de687f..19f9043 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,6 +8,9 @@ importers: .: dependencies: + fast-glob: + specifier: ^3.3.3 + version: 3.3.3 gunzip-maybe: specifier: ^1.4.2 version: 1.4.2 diff --git a/src/utils/pack-package.ts b/src/utils/pack-package.ts index e26b5c1..baee814 100644 --- a/src/utils/pack-package.ts +++ b/src/utils/pack-package.ts @@ -1,35 +1,13 @@ import path from 'node:path'; import fs from 'node:fs/promises'; import spawn from 'nano-spawn'; +import glob from 'fast-glob'; import type { PackageManager } from './detect-package-manager.js'; import { readJson } from './read-json.js'; -const copyFileIfExists = async (source: string, destination: string): Promise => { - try { - await fs.mkdir(path.dirname(destination), { recursive: true }); - await fs.copyFile(source, destination); - } catch (error: any) { - if (error.code !== 'ENOENT') { - throw error; - } - } -}; - -const copyDirectory = async (source: string, destination: string): Promise => { - await fs.mkdir(destination, { recursive: true }); - - const entries = await fs.readdir(source, { withFileTypes: true }); - - for (const entry of entries) { - const sourcePath = path.join(source, entry.name); - const destinationPath = path.join(destination, entry.name); - - if (entry.isDirectory()) { - await copyDirectory(sourcePath, destinationPath); - } else if (entry.isFile()) { - await fs.copyFile(sourcePath, destinationPath); - } - } +const copyFile = async (source: string, destination: string): Promise => { + await fs.mkdir(path.dirname(destination), { recursive: true }); + await fs.copyFile(source, destination); }; export const packPackage = async ( @@ -51,30 +29,41 @@ export const packPackage = async ( const packageJsonPath = path.join(cwd, 'package.json'); const packageJson = await readJson(packageJsonPath) as { files?: string[] }; - if (packageJson.files) { + if (packageJson.files && packageJson.files.length > 0) { const packWorktreePackageRoot = isMonorepo ? path.join(packWorktreePath, gitSubdirectory) : packWorktreePath; - for (const filePattern of packageJson.files) { - const sourcePath = path.join(cwd, filePattern); - const destinationPath = path.join(packWorktreePackageRoot, filePattern); - - try { - const stats = await fs.stat(sourcePath); - if (stats.isDirectory()) { - // Copy entire directory - await copyDirectory(sourcePath, destinationPath); - } else if (stats.isFile()) { - // Copy single file - await copyFileIfExists(sourcePath, destinationPath); + // Transform directory entries to glob patterns + // npm/pnpm treat 'dist' as 'dist/**', but fast-glob needs explicit patterns + const patterns = await Promise.all( + packageJson.files.map(async (entry) => { + const fullPath = path.join(cwd, entry); + try { + const stats = await fs.stat(fullPath); + // If it's a directory, expand to recursive pattern + return stats.isDirectory() ? `${entry}/**` : entry; + } catch { + // If stat fails, treat as glob pattern (may not exist yet or is a pattern) + return entry; } - } catch (error: any) { - // File/directory doesn't exist in user's directory, skip it - if (error.code !== 'ENOENT') { - throw error; - } - } + }), + ); + + // Use fast-glob to resolve patterns in files field + // This handles glob patterns like "dist/*.js", directories like "dist", and dotfiles + const matchedFiles = await glob(patterns, { + cwd, + dot: true, // Include dotfiles like .env.production + gitignore: false, // Include gitignored files (they may be built artifacts we need to pack) + }); + + // Copy all matched files to pack worktree + for (const relativePath of matchedFiles) { + const sourcePath = path.join(cwd, relativePath); + const destinationPath = path.join(packWorktreePackageRoot, relativePath); + + await copyFile(sourcePath, destinationPath); } } diff --git a/tests/index.ts b/tests/index.ts index d10272f..bf36926 100644 --- a/tests/index.ts +++ b/tests/index.ts @@ -507,5 +507,145 @@ describe('git-publish', ({ describe }) => { const publishedFileContent = await git('show', [`origin/${publishedBranch}:prepack-created-file.txt`]); expect(publishedFileContent.trim()).toBe('hook-ran'); }); + + test('publishes gitignored files specified by glob pattern', async ({ onTestFail }) => { + const branchName = 'test-glob-pattern'; + + // Test that glob patterns in "files" field work correctly + // Pattern "dist/*.js" should only match .js files in dist, not subdirectories + await using fixture = await createFixture({ + 'package.json': JSON.stringify({ + name: 'test-glob-pattern', + version: '1.0.0', + files: ['dist/*.js'], + }, null, 2), + dist: { + 'index.js': 'export const main = true;', + 'utils.js': 'export const util = () => {};', + 'types.ts': '// This should not be published', + nested: { + 'deep.js': '// This should not be published (not matched by dist/*.js)', + }, + }, + '.gitignore': 'dist', + }); + + const git = createGit(fixture.path); + await git.init([`--initial-branch=${branchName}`]); + await git('add', ['.']); + await git('commit', ['-m', 'Initial commit']); + await git('remote', ['add', 'origin', remoteFixture.path]); + + const gitPublishProcess = await gitPublish(fixture.path, ['--fresh']); + onTestFail(() => { + console.log(gitPublishProcess); + }); + + expect('exitCode' in gitPublishProcess).toBe(false); + expect(gitPublishProcess.stdout).toMatch('✔'); + + // Verify only .js files in dist root are published + const publishedBranch = `npm/${branchName}`; + const filesInTreeString = await git('ls-tree', ['-r', '--name-only', `origin/${publishedBranch}`]); + const filesInTree = filesInTreeString.split('\n').filter(Boolean).sort(); + expect(filesInTree).toEqual([ + 'dist/index.js', + 'dist/utils.js', + 'package.json', + ]); + }); + + test('publishes gitignored directory recursively', async ({ onTestFail }) => { + const branchName = 'test-directory-recursive'; + + // Test that directory in "files" field includes all files recursively + await using fixture = await createFixture({ + 'package.json': JSON.stringify({ + name: 'test-directory-recursive', + version: '1.0.0', + files: ['dist'], + }, null, 2), + dist: { + 'index.js': 'export const main = true;', + nested: { + 'deep.js': 'export const deep = true;', + 'utils.js': 'export const util = () => {};', + }, + }, + '.gitignore': 'dist', + }); + + const git = createGit(fixture.path); + await git.init([`--initial-branch=${branchName}`]); + await git('add', ['.']); + await git('commit', ['-m', 'Initial commit']); + await git('remote', ['add', 'origin', remoteFixture.path]); + + const gitPublishProcess = await gitPublish(fixture.path, ['--fresh']); + onTestFail(() => { + console.log(gitPublishProcess); + }); + + expect('exitCode' in gitPublishProcess).toBe(false); + expect(gitPublishProcess.stdout).toMatch('✔'); + + // Verify all files in dist are published recursively + const publishedBranch = `npm/${branchName}`; + const filesInTreeString = await git('ls-tree', ['-r', '--name-only', `origin/${publishedBranch}`]); + const filesInTree = filesInTreeString.split('\n').filter(Boolean).sort(); + expect(filesInTree).toEqual([ + 'dist/index.js', + 'dist/nested/deep.js', + 'dist/nested/utils.js', + 'package.json', + ]); + }); + + test('publishes gitignored dotfiles', async ({ onTestFail }) => { + const branchName = 'test-dotfiles'; + + // Test that dotfiles specified in "files" field are published + await using fixture = await createFixture({ + 'package.json': JSON.stringify({ + name: 'test-dotfiles', + version: '1.0.0', + files: ['.env.production', 'dist'], + }, null, 2), + '.env.production': 'PRODUCTION=true', + dist: { + 'index.js': 'export const main = true;', + }, + '.env.development': '// This should not be published', + '.gitignore': 'dist\n.env.*', + }); + + const git = createGit(fixture.path); + await git.init([`--initial-branch=${branchName}`]); + await git('add', ['.']); + await git('commit', ['-m', 'Initial commit']); + await git('remote', ['add', 'origin', remoteFixture.path]); + + const gitPublishProcess = await gitPublish(fixture.path, ['--fresh']); + onTestFail(() => { + console.log(gitPublishProcess); + }); + + expect('exitCode' in gitPublishProcess).toBe(false); + expect(gitPublishProcess.stdout).toMatch('✔'); + + // Verify dotfile and dist files are published + const publishedBranch = `npm/${branchName}`; + const filesInTreeString = await git('ls-tree', ['-r', '--name-only', `origin/${publishedBranch}`]); + const filesInTree = filesInTreeString.split('\n').filter(Boolean).sort(); + expect(filesInTree).toEqual([ + '.env.production', + 'dist/index.js', + 'package.json', + ]); + + // Verify dotfile content + const dotfileContent = await git('show', [`origin/${publishedBranch}:.env.production`]); + expect(dotfileContent).toBe('PRODUCTION=true'); + }); }); }); From 02164b876bca292bbb330af5695f118605289763 Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 22:23:44 +0900 Subject: [PATCH 05/13] wip --- ...025-11-08-two-worktree-isolation-design.md | 150 ------------------ 1 file changed, 150 deletions(-) delete mode 100644 docs/plans/2025-11-08-two-worktree-isolation-design.md diff --git a/docs/plans/2025-11-08-two-worktree-isolation-design.md b/docs/plans/2025-11-08-two-worktree-isolation-design.md deleted file mode 100644 index da84608..0000000 --- a/docs/plans/2025-11-08-two-worktree-isolation-design.md +++ /dev/null @@ -1,150 +0,0 @@ -# Two-Worktree Isolation Design - -**Date:** 2025-11-08 -**Status:** Approved - -## Problem - -Pack hooks (prepare, prepack) currently run in the user's working directory, causing files to be modified there. The current fix uses a custom `copyDirectory` implementation with monorepo special-casing and symlink management, adding ~100 lines of complex logic. - -## Solution - -Use Git's native worktree mechanism to create two isolated environments: - -1. **Pack worktree:** Clean checkout of HEAD where pack runs -2. **Publish worktree:** Clean checkout of publish branch where tarball extracts - -This eliminates file copying logic while maintaining fast execution via symlinked node_modules. - -## Architecture - -``` -User's working directory (untouched) - -/tmp/git-publish-xxx/ - ├── pack-worktree/ (HEAD checkout, symlinked node_modules) - │ └─> Run pack here (isolated) - │ └─> Tarball output - │ - └── publish-worktree/ (publish branch, empty) - └─> Extract tarball here - └─> Commit & push -``` - -## Implementation Flow - -### 1. Create Publish Worktree (existing) - -```typescript -const publishWorktreePath = path.join(temporaryDirectory, 'publish-worktree'); -await spawn('git', ['worktree', 'add', '--force', publishWorktreePath, 'HEAD']); - -// Checkout publish branch or create orphan -if (fresh || !remoteBranchExists) { - await spawn('git', ['checkout', '--orphan', localTemporaryBranch], { cwd: publishWorktreePath }); -} else { - await spawn('git', ['fetch', '--depth=1', remote, `${publishBranch}:${localTemporaryBranch}`]); - await spawn('git', ['symbolic-ref', 'HEAD', `refs/heads/${localTemporaryBranch}`], { cwd: publishWorktreePath }); -} - -// Clear completely -await spawn('git', ['rm', '--cached', '-r', ':/'], { cwd: publishWorktreePath }).catch(() => {}); -await spawn('git', ['clean', '-fdx'], { cwd: publishWorktreePath }); -``` - -### 2. Create Pack Worktree (new) - -```typescript -const packWorktreePath = path.join(temporaryDirectory, 'pack-worktree'); -await spawn('git', ['worktree', 'add', '--force', packWorktreePath, 'HEAD'], { cwd: gitRootPath }); -``` - -### 3. Symlink node_modules (new) - -**Regular package:** -```typescript -const nodeModulesPath = path.join(cwd, 'node_modules'); -await fs.symlink( - nodeModulesPath, - path.join(packWorktreePath, 'node_modules'), - 'dir' -); -``` - -**Monorepo:** -```typescript -// Root node_modules -const rootNodeModules = path.join(gitRootPath, 'node_modules'); -await fs.symlink( - rootNodeModules, - path.join(packWorktreePath, 'node_modules'), - 'dir' -); - -// Package node_modules (if exists) -const packageNodeModules = path.join(cwd, 'node_modules'); -await fs.symlink( - packageNodeModules, - path.join(packWorktreePath, gitSubdirectory, 'node_modules'), - 'dir' -); -``` - -### 4. Run Pack (modified) - -```typescript -const packArgs = packageManager === 'bun' - ? ['pm', 'pack', '--destination', packTarballDirectory] - : ['pack', '--pack-destination', packTarballDirectory]; - -const packCwd = gitSubdirectory - ? path.join(packWorktreePath, gitSubdirectory) - : packWorktreePath; - -await spawn(packageManager, packArgs, { cwd: packCwd }); -``` - -### 5. Extract (existing) - -```typescript -await extractTarball(tarballPath, publishWorktreePath); -``` - -### 6. Cleanup (modified) - -```typescript -await spawn('git', ['worktree', 'remove', '--force', publishWorktreePath]); -await spawn('git', ['worktree', 'remove', '--force', packWorktreePath]); -await spawn('git', ['branch', '-D', localTemporaryBranch]); -await fs.rm(temporaryDirectory, { recursive: true, force: true }); -``` - -## Benefits - -1. **Simplicity:** Eliminates ~90 lines of custom file copying and filtering logic -2. **Performance:** No file copying, only symlinks for node_modules -3. **Correctness:** Uses Git's native worktree mechanism instead of manual operations -4. **Maintainability:** Less code, clearer intent, easier to understand - -## Code Changes - -**Remove:** -- `copyDirectory` helper function -- Monorepo special-case copying logic -- Recursive node_modules exclusion logic - -**Add:** -- Pack worktree creation -- Simplified node_modules symlinking - -**Modify:** -- `packPackage()` function signature and implementation -- Cleanup to remove both worktrees - -## Testing - -All existing tests should pass without modification: -- `prepack hook does not modify working directory` - verifies isolation -- `dependencies are accessible in pack hooks` - verifies symlinks work -- `monorepo workspace structure is accessible` - verifies monorepo handling -- All other existing tests From 0751975f2ddf9070d7e9105ad3e97fb114be1bc8 Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 22:30:47 +0900 Subject: [PATCH 06/13] refactor: address code review feedback - Rename creatingWorkTree to creatingWorktrees for consistency with plural task name - Only catch ENOENT in pattern mapping, re-throw permission errors - Add test for graceful failure when node_modules is missing --- src/index.ts | 4 ++-- src/utils/pack-package.ts | 8 ++++++-- tests/index.ts | 35 +++++++++++++++++++++++++++++++++++ 3 files changed, 43 insertions(+), 4 deletions(-) diff --git a/src/index.ts b/src/index.ts index 8e44255..bcf976d 100644 --- a/src/index.ts +++ b/src/index.ts @@ -116,7 +116,7 @@ const { stringify } = JSON; let commitSha: string; const packageManager = await detectPackageManager(cwd, gitRootPath); - const creatingWorkTree = await task('Creating worktrees', async ({ setWarning }) => { + const creatingWorktrees = await task('Creating worktrees', async ({ setWarning }) => { if (dry) { setWarning(''); return; @@ -132,7 +132,7 @@ const { stringify } = JSON; }); if (!dry) { - creatingWorkTree.clear(); + creatingWorktrees.clear(); } try { diff --git a/src/utils/pack-package.ts b/src/utils/pack-package.ts index baee814..8cbc349 100644 --- a/src/utils/pack-package.ts +++ b/src/utils/pack-package.ts @@ -43,8 +43,12 @@ export const packPackage = async ( const stats = await fs.stat(fullPath); // If it's a directory, expand to recursive pattern return stats.isDirectory() ? `${entry}/**` : entry; - } catch { - // If stat fails, treat as glob pattern (may not exist yet or is a pattern) + } catch (error: any) { + // Only catch ENOENT (file not found) - treat as glob pattern + // Re-throw other errors like EPERM (permission denied) + if (error.code !== 'ENOENT') { + throw error; + } return entry; } }), diff --git a/tests/index.ts b/tests/index.ts index bf36926..5c5dc34 100644 --- a/tests/index.ts +++ b/tests/index.ts @@ -508,6 +508,41 @@ describe('git-publish', ({ describe }) => { expect(publishedFileContent.trim()).toBe('hook-ran'); }); + test('fails gracefully when pack hook dependencies are missing', async ({ onTestFail }) => { + const branchName = 'test-missing-deps'; + + // Test that script doesn't crash on ENOENT when symlinking node_modules + // Pack should fail gracefully with proper error message + await using fixture = await createFixture({ + 'package.json': JSON.stringify({ + name: 'test-missing-deps', + version: '1.0.0', + scripts: { + prepack: 'nonexistent-binary', + }, + }, null, 2), + 'index.js': 'export const main = true;', + }); + + const git = createGit(fixture.path); + await git.init([`--initial-branch=${branchName}`]); + await git('add', ['.']); + await git('commit', ['-m', 'Initial commit']); + await git('remote', ['add', 'origin', remoteFixture.path]); + + // Do NOT run npm install - node_modules won't exist + const gitPublishProcess = await gitPublish(fixture.path, ['--fresh']); + onTestFail(() => { + console.log(gitPublishProcess); + }); + + // Should fail with exit code + expect('exitCode' in gitPublishProcess).toBe(true); + if ('exitCode' in gitPublishProcess) { + expect(gitPublishProcess.exitCode).not.toBe(0); + } + }); + test('publishes gitignored files specified by glob pattern', async ({ onTestFail }) => { const branchName = 'test-glob-pattern'; From fc13e5fab2ea720cfcdb978a2aa4525b61d29a2c Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 22:31:28 +0900 Subject: [PATCH 07/13] wip --- src/utils/pack-package.ts | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/src/utils/pack-package.ts b/src/utils/pack-package.ts index 8cbc349..6e74e2e 100644 --- a/src/utils/pack-package.ts +++ b/src/utils/pack-package.ts @@ -76,7 +76,10 @@ export const packPackage = async ( if (isMonorepo) { // Root node_modules const rootNodeModulesTarget = path.join(packWorktreePath, 'node_modules'); - await fs.rm(rootNodeModulesTarget, { recursive: true, force: true }); + await fs.rm(rootNodeModulesTarget, { + recursive: true, + force: true, + }); try { await fs.symlink( path.join(gitRootPath, 'node_modules'), @@ -92,7 +95,10 @@ export const packPackage = async ( // Package node_modules (if exists) const packageNodeModulesTarget = path.join(packWorktreePath, gitSubdirectory, 'node_modules'); - await fs.rm(packageNodeModulesTarget, { recursive: true, force: true }); + await fs.rm(packageNodeModulesTarget, { + recursive: true, + force: true, + }); try { await fs.symlink( path.join(cwd, 'node_modules'), @@ -107,7 +113,10 @@ export const packPackage = async ( } else { // Regular package node_modules const nodeModulesTarget = path.join(packWorktreePath, 'node_modules'); - await fs.rm(nodeModulesTarget, { recursive: true, force: true }); + await fs.rm(nodeModulesTarget, { + recursive: true, + force: true, + }); try { await fs.symlink( path.join(cwd, 'node_modules'), From 685a2633fce8d1454e982d1afe893ce39145a22f Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 22:34:07 +0900 Subject: [PATCH 08/13] fix: resolve linting errors - Replace `any` types with `unknown` and proper type guards - Break long comment line to respect max-len rule --- src/utils/pack-package.ts | 39 ++++++++++++++++++++++++++++++--------- 1 file changed, 30 insertions(+), 9 deletions(-) diff --git a/src/utils/pack-package.ts b/src/utils/pack-package.ts index 6e74e2e..90da342 100644 --- a/src/utils/pack-package.ts +++ b/src/utils/pack-package.ts @@ -43,10 +43,15 @@ export const packPackage = async ( const stats = await fs.stat(fullPath); // If it's a directory, expand to recursive pattern return stats.isDirectory() ? `${entry}/**` : entry; - } catch (error: any) { + } catch (error: unknown) { // Only catch ENOENT (file not found) - treat as glob pattern // Re-throw other errors like EPERM (permission denied) - if (error.code !== 'ENOENT') { + if ( + typeof error === 'object' + && error !== null + && 'code' in error + && error.code !== 'ENOENT' + ) { throw error; } return entry; @@ -72,7 +77,8 @@ export const packPackage = async ( } // Symlink node_modules so hooks have access to dependencies - // Note: Remove any existing node_modules directory in worktree first (git might have checked it out) + // Note: Remove any existing node_modules directory in worktree first + // (git might have checked it out) if (isMonorepo) { // Root node_modules const rootNodeModulesTarget = path.join(packWorktreePath, 'node_modules'); @@ -86,9 +92,14 @@ export const packPackage = async ( rootNodeModulesTarget, 'dir', ); - } catch (error: any) { + } catch (error: unknown) { // If node_modules doesn't exist, ignore (pack will likely fail later) - if (error.code !== 'ENOENT') { + if ( + typeof error === 'object' + && error !== null + && 'code' in error + && error.code !== 'ENOENT' + ) { throw error; } } @@ -105,8 +116,13 @@ export const packPackage = async ( packageNodeModulesTarget, 'dir', ); - } catch (error: any) { - if (error.code !== 'ENOENT') { + } catch (error: unknown) { + if ( + typeof error === 'object' + && error !== null + && 'code' in error + && error.code !== 'ENOENT' + ) { throw error; } } @@ -123,8 +139,13 @@ export const packPackage = async ( nodeModulesTarget, 'dir', ); - } catch (error: any) { - if (error.code !== 'ENOENT') { + } catch (error: unknown) { + if ( + typeof error === 'object' + && error !== null + && 'code' in error + && error.code !== 'ENOENT' + ) { throw error; } } From 0930cd806b2864f8fc4ee539854802d0d0206a93 Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 22:39:05 +0900 Subject: [PATCH 09/13] refactor: address PR review nitpicks - Extract ENOENT type guard to throwUnlessEnoent helper to reduce duplication - Strengthen graceful failure test to verify exit code 127 (command not found) proving the script correctly handed off to pack which then failed --- src/utils/pack-package.ts | 47 +++++++++++++-------------------------- tests/index.ts | 5 +++++ 2 files changed, 20 insertions(+), 32 deletions(-) diff --git a/src/utils/pack-package.ts b/src/utils/pack-package.ts index 90da342..9e4f209 100644 --- a/src/utils/pack-package.ts +++ b/src/utils/pack-package.ts @@ -10,6 +10,17 @@ const copyFile = async (source: string, destination: string): Promise => { await fs.copyFile(source, destination); }; +const throwUnlessEnoent = (error: unknown): void => { + if ( + typeof error === 'object' + && error !== null + && 'code' in error + && error.code !== 'ENOENT' + ) { + throw error; + } +}; + export const packPackage = async ( packageManager: PackageManager, packWorktreePath: string, @@ -46,14 +57,7 @@ export const packPackage = async ( } catch (error: unknown) { // Only catch ENOENT (file not found) - treat as glob pattern // Re-throw other errors like EPERM (permission denied) - if ( - typeof error === 'object' - && error !== null - && 'code' in error - && error.code !== 'ENOENT' - ) { - throw error; - } + throwUnlessEnoent(error); return entry; } }), @@ -94,14 +98,7 @@ export const packPackage = async ( ); } catch (error: unknown) { // If node_modules doesn't exist, ignore (pack will likely fail later) - if ( - typeof error === 'object' - && error !== null - && 'code' in error - && error.code !== 'ENOENT' - ) { - throw error; - } + throwUnlessEnoent(error); } // Package node_modules (if exists) @@ -117,14 +114,7 @@ export const packPackage = async ( 'dir', ); } catch (error: unknown) { - if ( - typeof error === 'object' - && error !== null - && 'code' in error - && error.code !== 'ENOENT' - ) { - throw error; - } + throwUnlessEnoent(error); } } else { // Regular package node_modules @@ -140,14 +130,7 @@ export const packPackage = async ( 'dir', ); } catch (error: unknown) { - if ( - typeof error === 'object' - && error !== null - && 'code' in error - && error.code !== 'ENOENT' - ) { - throw error; - } + throwUnlessEnoent(error); } } diff --git a/tests/index.ts b/tests/index.ts index 5c5dc34..2ed9d4a 100644 --- a/tests/index.ts +++ b/tests/index.ts @@ -540,6 +540,11 @@ describe('git-publish', ({ describe }) => { expect('exitCode' in gitPublishProcess).toBe(true); if ('exitCode' in gitPublishProcess) { expect(gitPublishProcess.exitCode).not.toBe(0); + + // Verify failure is from pack command (not from fs.symlink crash) + // Exit code 127 means "command not found" - proves pack ran and failed + // (If fs.symlink crashed, we wouldn't get this far) + expect(gitPublishProcess.stdout).toMatch(/exit code 127/); } }); From a642d614db9b24e0b9493dc3f5ac31f666f5db2c Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 22:43:30 +0900 Subject: [PATCH 10/13] test: add monorepo node_modules symlinking validation Add two critical tests to validate the new symlinking logic: 1. monorepo prepack hook can access root node_modules - Verifies fs.symlink(gitRootPath/node_modules) works correctly - Uses clean-pkg-json from root devDependencies in prepack hook - Confirms scripts field is removed from published package.json 2. monorepo prepack hook can access package-level node_modules - Verifies fs.symlink(cwd/node_modules) works correctly - Uses mkdirp from package-level devDependencies in prepack hook - Confirms dist/output.txt is created by the hook These tests close the gap in coverage for the two-worktree symlinking implementation. --- tests/index.ts | 102 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) diff --git a/tests/index.ts b/tests/index.ts index 2ed9d4a..059f9c0 100644 --- a/tests/index.ts +++ b/tests/index.ts @@ -304,6 +304,108 @@ describe('git-publish', ({ describe }) => { // Catalog should be resolved to actual version expect(packageJson.dependencies.ms).toBe(msVersion); }); + + test('monorepo prepack hook can access root node_modules', async ({ onTestFail }) => { + const branchName = 'test-monorepo-root-deps'; + const packageName = '@org/root-deps-test'; + + // Test that prepack hooks can access binaries from root node_modules + await using fixture = await createFixture({ + 'pnpm-workspace.yaml': yaml.dump({ + packages: ['packages/*'], + }), + 'package.json': JSON.stringify({ + private: true, + devDependencies: { + 'clean-pkg-json': '^1.0.0', + }, + }, null, 2), + 'packages/test-pkg': { + 'package.json': JSON.stringify({ + name: packageName, + version: '0.0.0', + scripts: { + prepack: 'clean-pkg-json', + }, + }, null, 2), + 'index.js': 'export const main = true;', + }, + }); + + await spawn('pnpm', ['install'], { cwd: fixture.path }); + + const git = createGit(fixture.path); + await git.init([`--initial-branch=${branchName}`]); + await git('add', ['.']); + await git('commit', ['-m', 'Initial commit']); + await git('remote', ['add', 'origin', remoteFixture.path]); + + const monorepoPackagePath = path.join(fixture.path, 'packages/test-pkg'); + const gitPublishProcess = await gitPublish(monorepoPackagePath, ['--fresh']); + onTestFail(() => { + console.log(gitPublishProcess); + }); + + expect('exitCode' in gitPublishProcess).toBe(false); + expect(gitPublishProcess.stdout).toMatch('✔'); + + // Verify clean-pkg-json ran (scripts field should be removed) + const publishedBranch = `npm/${branchName}-${packageName}`; + const packageJsonString = await git('show', [`origin/${publishedBranch}:package.json`]); + const packageJson = JSON.parse(packageJsonString); + expect(packageJson.scripts).toBeUndefined(); + }); + + test('monorepo prepack hook can access package-level node_modules', async ({ onTestFail }) => { + const branchName = 'test-monorepo-pkg-deps'; + const packageName = '@org/pkg-deps-test'; + + // Test that prepack hooks can access binaries from package-level node_modules + await using fixture = await createFixture({ + 'pnpm-workspace.yaml': yaml.dump({ + packages: ['packages/*'], + }), + 'package.json': JSON.stringify({ + private: true, + }, null, 2), + 'packages/test-pkg': { + 'package.json': JSON.stringify({ + name: packageName, + version: '0.0.0', + scripts: { + prepack: 'mkdirp dist && echo "built" > dist/output.txt', + }, + devDependencies: { + mkdirp: '^3.0.0', + }, + files: ['dist'], + }, null, 2), + 'index.js': 'export const main = true;', + }, + }); + + await spawn('pnpm', ['install'], { cwd: fixture.path }); + + const git = createGit(fixture.path); + await git.init([`--initial-branch=${branchName}`]); + await git('add', ['.']); + await git('commit', ['-m', 'Initial commit']); + await git('remote', ['add', 'origin', remoteFixture.path]); + + const monorepoPackagePath = path.join(fixture.path, 'packages/test-pkg'); + const gitPublishProcess = await gitPublish(monorepoPackagePath, ['--fresh']); + onTestFail(() => { + console.log(gitPublishProcess); + }); + + expect('exitCode' in gitPublishProcess).toBe(false); + expect(gitPublishProcess.stdout).toMatch('✔'); + + // Verify mkdirp ran and created dist/output.txt + const publishedBranch = `npm/${branchName}-${packageName}`; + const outputContent = await git('show', [`origin/${publishedBranch}:dist/output.txt`]); + expect(outputContent.trim()).toBe('built'); + }); }); test('npm pack is used', async ({ onTestFail }) => { From d777cfde7cb608760f2f921db827dc4751cca54d Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 22:44:01 +0900 Subject: [PATCH 11/13] wip --- tests/index.ts | 176 ++++++++++++++++++++++++------------------------- 1 file changed, 88 insertions(+), 88 deletions(-) diff --git a/tests/index.ts b/tests/index.ts index 059f9c0..3d8bf3e 100644 --- a/tests/index.ts +++ b/tests/index.ts @@ -305,107 +305,107 @@ describe('git-publish', ({ describe }) => { expect(packageJson.dependencies.ms).toBe(msVersion); }); - test('monorepo prepack hook can access root node_modules', async ({ onTestFail }) => { - const branchName = 'test-monorepo-root-deps'; - const packageName = '@org/root-deps-test'; - - // Test that prepack hooks can access binaries from root node_modules - await using fixture = await createFixture({ - 'pnpm-workspace.yaml': yaml.dump({ - packages: ['packages/*'], - }), - 'package.json': JSON.stringify({ - private: true, - devDependencies: { - 'clean-pkg-json': '^1.0.0', - }, - }, null, 2), - 'packages/test-pkg': { - 'package.json': JSON.stringify({ - name: packageName, - version: '0.0.0', - scripts: { - prepack: 'clean-pkg-json', + test('monorepo prepack hook can access root node_modules', async ({ onTestFail }) => { + const branchName = 'test-monorepo-root-deps'; + const packageName = '@org/root-deps-test'; + + // Test that prepack hooks can access binaries from root node_modules + await using fixture = await createFixture({ + 'pnpm-workspace.yaml': yaml.dump({ + packages: ['packages/*'], + }), + 'package.json': JSON.stringify({ + private: true, + devDependencies: { + 'clean-pkg-json': '^1.0.0', + }, + }, null, 2), + 'packages/test-pkg': { + 'package.json': JSON.stringify({ + name: packageName, + version: '0.0.0', + scripts: { + prepack: 'clean-pkg-json', + }, + }, null, 2), + 'index.js': 'export const main = true;', }, - }, null, 2), - 'index.js': 'export const main = true;', - }, - }); + }); - await spawn('pnpm', ['install'], { cwd: fixture.path }); + await spawn('pnpm', ['install'], { cwd: fixture.path }); - const git = createGit(fixture.path); - await git.init([`--initial-branch=${branchName}`]); - await git('add', ['.']); - await git('commit', ['-m', 'Initial commit']); - await git('remote', ['add', 'origin', remoteFixture.path]); + const git = createGit(fixture.path); + await git.init([`--initial-branch=${branchName}`]); + await git('add', ['.']); + await git('commit', ['-m', 'Initial commit']); + await git('remote', ['add', 'origin', remoteFixture.path]); - const monorepoPackagePath = path.join(fixture.path, 'packages/test-pkg'); - const gitPublishProcess = await gitPublish(monorepoPackagePath, ['--fresh']); - onTestFail(() => { - console.log(gitPublishProcess); - }); + const monorepoPackagePath = path.join(fixture.path, 'packages/test-pkg'); + const gitPublishProcess = await gitPublish(monorepoPackagePath, ['--fresh']); + onTestFail(() => { + console.log(gitPublishProcess); + }); - expect('exitCode' in gitPublishProcess).toBe(false); - expect(gitPublishProcess.stdout).toMatch('✔'); + expect('exitCode' in gitPublishProcess).toBe(false); + expect(gitPublishProcess.stdout).toMatch('✔'); - // Verify clean-pkg-json ran (scripts field should be removed) - const publishedBranch = `npm/${branchName}-${packageName}`; - const packageJsonString = await git('show', [`origin/${publishedBranch}:package.json`]); - const packageJson = JSON.parse(packageJsonString); - expect(packageJson.scripts).toBeUndefined(); - }); + // Verify clean-pkg-json ran (scripts field should be removed) + const publishedBranch = `npm/${branchName}-${packageName}`; + const packageJsonString = await git('show', [`origin/${publishedBranch}:package.json`]); + const packageJson = JSON.parse(packageJsonString); + expect(packageJson.scripts).toBeUndefined(); + }); - test('monorepo prepack hook can access package-level node_modules', async ({ onTestFail }) => { - const branchName = 'test-monorepo-pkg-deps'; - const packageName = '@org/pkg-deps-test'; - - // Test that prepack hooks can access binaries from package-level node_modules - await using fixture = await createFixture({ - 'pnpm-workspace.yaml': yaml.dump({ - packages: ['packages/*'], - }), - 'package.json': JSON.stringify({ - private: true, - }, null, 2), - 'packages/test-pkg': { - 'package.json': JSON.stringify({ - name: packageName, - version: '0.0.0', - scripts: { - prepack: 'mkdirp dist && echo "built" > dist/output.txt', - }, - devDependencies: { - mkdirp: '^3.0.0', + test('monorepo prepack hook can access package-level node_modules', async ({ onTestFail }) => { + const branchName = 'test-monorepo-pkg-deps'; + const packageName = '@org/pkg-deps-test'; + + // Test that prepack hooks can access binaries from package-level node_modules + await using fixture = await createFixture({ + 'pnpm-workspace.yaml': yaml.dump({ + packages: ['packages/*'], + }), + 'package.json': JSON.stringify({ + private: true, + }, null, 2), + 'packages/test-pkg': { + 'package.json': JSON.stringify({ + name: packageName, + version: '0.0.0', + scripts: { + prepack: 'mkdirp dist && echo "built" > dist/output.txt', + }, + devDependencies: { + mkdirp: '^3.0.0', + }, + files: ['dist'], + }, null, 2), + 'index.js': 'export const main = true;', }, - files: ['dist'], - }, null, 2), - 'index.js': 'export const main = true;', - }, - }); + }); - await spawn('pnpm', ['install'], { cwd: fixture.path }); + await spawn('pnpm', ['install'], { cwd: fixture.path }); - const git = createGit(fixture.path); - await git.init([`--initial-branch=${branchName}`]); - await git('add', ['.']); - await git('commit', ['-m', 'Initial commit']); - await git('remote', ['add', 'origin', remoteFixture.path]); + const git = createGit(fixture.path); + await git.init([`--initial-branch=${branchName}`]); + await git('add', ['.']); + await git('commit', ['-m', 'Initial commit']); + await git('remote', ['add', 'origin', remoteFixture.path]); - const monorepoPackagePath = path.join(fixture.path, 'packages/test-pkg'); - const gitPublishProcess = await gitPublish(monorepoPackagePath, ['--fresh']); - onTestFail(() => { - console.log(gitPublishProcess); - }); + const monorepoPackagePath = path.join(fixture.path, 'packages/test-pkg'); + const gitPublishProcess = await gitPublish(monorepoPackagePath, ['--fresh']); + onTestFail(() => { + console.log(gitPublishProcess); + }); - expect('exitCode' in gitPublishProcess).toBe(false); - expect(gitPublishProcess.stdout).toMatch('✔'); + expect('exitCode' in gitPublishProcess).toBe(false); + expect(gitPublishProcess.stdout).toMatch('✔'); - // Verify mkdirp ran and created dist/output.txt - const publishedBranch = `npm/${branchName}-${packageName}`; - const outputContent = await git('show', [`origin/${publishedBranch}:dist/output.txt`]); - expect(outputContent.trim()).toBe('built'); - }); + // Verify mkdirp ran and created dist/output.txt + const publishedBranch = `npm/${branchName}-${packageName}`; + const outputContent = await git('show', [`origin/${publishedBranch}:dist/output.txt`]); + expect(outputContent.trim()).toBe('built'); + }); }); test('npm pack is used', async ({ onTestFail }) => { From 8ed045ceccc578b153b53b5672eee63fd2f23534 Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 22:46:54 +0900 Subject: [PATCH 12/13] refactor: preserve stack trace in ENOENT error handling Rename throwUnlessEnoent to isNotEnoent and return boolean condition instead of throwing. This keeps the throw statement at the call site, preserving the original stack trace for better debugging. Before: Error thrown from helper loses original location After: Error thrown from actual catch block with accurate stack trace --- src/utils/pack-package.ts | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/src/utils/pack-package.ts b/src/utils/pack-package.ts index 9e4f209..c9fcf57 100644 --- a/src/utils/pack-package.ts +++ b/src/utils/pack-package.ts @@ -10,16 +10,12 @@ const copyFile = async (source: string, destination: string): Promise => { await fs.copyFile(source, destination); }; -const throwUnlessEnoent = (error: unknown): void => { - if ( - typeof error === 'object' - && error !== null - && 'code' in error - && error.code !== 'ENOENT' - ) { - throw error; - } -}; +const isNotEnoent = (error: unknown): boolean => ( + typeof error === 'object' + && error !== null + && 'code' in error + && error.code !== 'ENOENT' +); export const packPackage = async ( packageManager: PackageManager, @@ -57,7 +53,9 @@ export const packPackage = async ( } catch (error: unknown) { // Only catch ENOENT (file not found) - treat as glob pattern // Re-throw other errors like EPERM (permission denied) - throwUnlessEnoent(error); + if (isNotEnoent(error)) { + throw error; + } return entry; } }), @@ -98,7 +96,9 @@ export const packPackage = async ( ); } catch (error: unknown) { // If node_modules doesn't exist, ignore (pack will likely fail later) - throwUnlessEnoent(error); + if (isNotEnoent(error)) { + throw error; + } } // Package node_modules (if exists) @@ -114,7 +114,9 @@ export const packPackage = async ( 'dir', ); } catch (error: unknown) { - throwUnlessEnoent(error); + if (isNotEnoent(error)) { + throw error; + } } } else { // Regular package node_modules @@ -130,7 +132,9 @@ export const packPackage = async ( 'dir', ); } catch (error: unknown) { - throwUnlessEnoent(error); + if (isNotEnoent(error)) { + throw error; + } } } From 3a6169d1edfad7150c9d79fe4bc3e2774c70c9f6 Mon Sep 17 00:00:00 2001 From: Hiroki Osame Date: Sat, 8 Nov 2025 22:48:07 +0900 Subject: [PATCH 13/13] fix: remove non-existent gitignore option from fast-glob fast-glob doesn't have a gitignore option - it doesn't respect .gitignore files by default, which is the behavior we want (to include gitignored build artifacts like dist/). Updated comment to clarify this behavior. --- src/utils/pack-package.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/utils/pack-package.ts b/src/utils/pack-package.ts index c9fcf57..c4f3f59 100644 --- a/src/utils/pack-package.ts +++ b/src/utils/pack-package.ts @@ -63,10 +63,10 @@ export const packPackage = async ( // Use fast-glob to resolve patterns in files field // This handles glob patterns like "dist/*.js", directories like "dist", and dotfiles + // fast-glob doesn't respect .gitignore by default, so gitignored files are included const matchedFiles = await glob(patterns, { cwd, dot: true, // Include dotfiles like .env.production - gitignore: false, // Include gitignored files (they may be built artifacts we need to pack) }); // Copy all matched files to pack worktree