From d584de4ec2529ac1a9c284803d04166a306dcbe5 Mon Sep 17 00:00:00 2001 From: Eugene Brevdo Date: Fri, 17 Apr 2026 08:38:07 -0700 Subject: [PATCH 01/10] Add stats-based delta admission Add opt-in stats-v1 delta admission using persistent repository delta stats, JSONL decision logging, and calibration scripts. Keep empty admission mode on the historical behavior while letting stats-v1 use write-mass and read-pressure gates. Co-authored-by: Codex --- api.go | 37 ++ api_test.go | 69 ++ cmd/zoekt-git-index/main.go | 4 + cmd/zoekt-sourcegraph-indexserver/index.go | 7 + .../index_integration_test.go | 17 + .../index_test.go | 40 ++ cmd/zoekt-sourcegraph-indexserver/main.go | 25 + .../main_test.go | 32 + gitindex/index.go | 602 ++++++++++++++++++ gitindex/index_test.go | 410 ++++++++++++ index/builder.go | 1 + index/builder_test.go | 39 ++ .../analyze-query-stats-stacked-vs-clean.sh | 208 ++++++ scripts/delta-admission-one-step.sh | 241 +++++++ scripts/delta-admission-sequential.sh | 254 ++++++++ scripts/query-stats-stacked-vs-clean.sh | 167 +++++ 16 files changed, 2153 insertions(+) create mode 100755 scripts/analyze-query-stats-stacked-vs-clean.sh create mode 100755 scripts/delta-admission-one-step.sh create mode 100755 scripts/delta-admission-sequential.sh create mode 100755 scripts/query-stats-stacked-vs-clean.sh diff --git a/api.go b/api.go index 0ed06decf..228986d28 100644 --- a/api.go +++ b/api.go @@ -645,6 +645,43 @@ type Repository struct { // FileTombstones is a set of file paths that should be ignored across all branches // in this shard. FileTombstones map[string]struct{} `json:",omitempty"` + + // DeltaStats contains advisory statistics used by experimental delta-index + // admission rules. Missing or stale stats must not affect correctness. + DeltaStats *RepositoryDeltaStats `json:",omitempty"` +} + +// RepositoryDeltaStats records approximate live and physical index statistics +// for deciding whether a future delta build is likely to be cheap enough. +type RepositoryDeltaStats struct { + // LiveIndexedBytes approximates bytes that would contribute to shard sizing + // for the current live repository: file names plus indexed content bytes. + LiveIndexedBytes uint64 + + // LiveDocumentCount is the number of live, deduplicated indexed documents. + LiveDocumentCount uint64 + + // LivePathCount is the number of distinct live file paths. + LivePathCount uint64 + + // PhysicalIndexedBytes approximates bytes physically present across stacked + // shards, including stale documents hidden by tombstones. + PhysicalIndexedBytes uint64 + + // PhysicalDocumentCount is the number of physical documents across stacked + // shards, including stale documents hidden by tombstones. + PhysicalDocumentCount uint64 + + // TombstonePathCount is the number of paths currently tombstoned in old shards. + TombstonePathCount uint64 + + // DeltaLayerCount is the number of accepted delta generations since the last + // full rebuild. + DeltaLayerCount uint64 + + // LastFullIndexTimeUnix is the Unix timestamp of the last full rebuild when + // known. It is zero for old indexes where the value cannot be recovered. + LastFullIndexTimeUnix int64 } func (r *Repository) UnmarshalJSON(data []byte) error { diff --git a/api_test.go b/api_test.go index 55e0be5d8..1a64e6218 100644 --- a/api_test.go +++ b/api_test.go @@ -349,6 +349,37 @@ func TestRepositoryMergeMutable(t *testing.T) { t.Fatalf("got different LineFragmentTemplate, %s vs %s", a.LineFragmentTemplate, b.LineFragmentTemplate) } }) + t.Run("different DeltaStats", func(t *testing.T) { + a := a + a.DeltaStats = &RepositoryDeltaStats{ + LiveIndexedBytes: 10, + LiveDocumentCount: 1, + LivePathCount: 1, + PhysicalIndexedBytes: 10, + PhysicalDocumentCount: 1, + } + b := a + b.DeltaStats = &RepositoryDeltaStats{ + LiveIndexedBytes: 20, + LiveDocumentCount: 2, + LivePathCount: 2, + PhysicalIndexedBytes: 30, + PhysicalDocumentCount: 3, + TombstonePathCount: 1, + DeltaLayerCount: 1, + } + + mutated, err := a.MergeMutable(&b) + if err != nil { + t.Fatalf("got err %v", err) + } + if mutated { + t.Fatalf("want mutated=false, got true") + } + if reflect.DeepEqual(a.DeltaStats, b.DeltaStats) { + t.Fatalf("MergeMutable should ignore advisory DeltaStats") + } + }) t.Run("all same", func(t *testing.T) { b := a mutated, err := a.MergeMutable(&b) @@ -364,6 +395,44 @@ func TestRepositoryMergeMutable(t *testing.T) { }) } +func TestRepositoryDeltaStatsJSON(t *testing.T) { + repo := Repository{ + ID: 1, + Name: "repo", + DeltaStats: &RepositoryDeltaStats{ + LiveIndexedBytes: 11, + LiveDocumentCount: 2, + LivePathCount: 2, + PhysicalIndexedBytes: 19, + PhysicalDocumentCount: 3, + TombstonePathCount: 1, + DeltaLayerCount: 2, + LastFullIndexTimeUnix: 123, + }, + } + + blob, err := json.Marshal(repo) + if err != nil { + t.Fatalf("Marshal: %v", err) + } + + var round Repository + if err := json.Unmarshal(blob, &round); err != nil { + t.Fatalf("Unmarshal: %v", err) + } + if !reflect.DeepEqual(repo.DeltaStats, round.DeltaStats) { + t.Fatalf("DeltaStats round trip mismatch: got %+v, want %+v", round.DeltaStats, repo.DeltaStats) + } + + var old Repository + if err := json.Unmarshal([]byte(`{"ID":1,"Name":"repo"}`), &old); err != nil { + t.Fatalf("Unmarshal old metadata: %v", err) + } + if old.DeltaStats != nil { + t.Fatalf("missing DeltaStats should decode as nil, got %+v", old.DeltaStats) + } +} + func TestMonthsSince1970(t *testing.T) { tests := []struct { name string diff --git a/cmd/zoekt-git-index/main.go b/cmd/zoekt-git-index/main.go index b8beb78da..c8f0d0216 100644 --- a/cmd/zoekt-git-index/main.go +++ b/cmd/zoekt-git-index/main.go @@ -45,6 +45,8 @@ func run() int { "It also affects name if the indexed repository is under this directory.") isDelta := flag.Bool("delta", false, "whether we should use delta build") deltaShardNumberFallbackThreshold := flag.Uint64("delta_threshold", 0, "upper limit on the number of preexisting shards that can exist before attempting a delta build (0 to disable fallback behavior)") + deltaAdmissionMode := flag.String("delta_admission_mode", "", "experimental delta admission mode (empty or stats-v1)") + deltaAdmissionLogJSON := flag.String("delta_admission_log_json", "", "append experimental delta admission decisions as JSON lines to this file") languageMap := flag.String("language_map", "", "a mapping between a language and its ctags processor (a:0,b:3).") cpuProfile := flag.String("cpu_profile", "", "write cpu profile to `file`") @@ -132,6 +134,8 @@ func run() int { Branches: branches, RepoDir: dir, DeltaShardNumberFallbackThreshold: *deltaShardNumberFallbackThreshold, + DeltaAdmissionMode: *deltaAdmissionMode, + DeltaAdmissionLogPath: *deltaAdmissionLogJSON, } if _, err := gitindex.IndexGitRepo(gitOpts); err != nil { diff --git a/cmd/zoekt-sourcegraph-indexserver/index.go b/cmd/zoekt-sourcegraph-indexserver/index.go index 36f55331b..69989c643 100644 --- a/cmd/zoekt-sourcegraph-indexserver/index.go +++ b/cmd/zoekt-sourcegraph-indexserver/index.go @@ -93,6 +93,10 @@ type indexArgs struct { // before attempting a delta build. DeltaShardNumberFallbackThreshold uint64 + // DeltaAdmissionMode controls experimental delta admission behavior in + // zoekt-git-index. Empty preserves current behavior. + DeltaAdmissionMode string + // ShardMerging is true if we want zoekt-git-index to respect compound shards. ShardMerging bool } @@ -402,6 +406,9 @@ func indexRepo(ctx context.Context, gitDir string, sourcegraph Sourcegraph, o *i if o.UseDelta { args = append(args, "-delta") args = append(args, "-delta_threshold", strconv.FormatUint(o.DeltaShardNumberFallbackThreshold, 10)) + if o.DeltaAdmissionMode != "" { + args = append(args, "-delta_admission_mode", o.DeltaAdmissionMode) + } } if len(o.LanguageMap) > 0 { diff --git a/cmd/zoekt-sourcegraph-indexserver/index_integration_test.go b/cmd/zoekt-sourcegraph-indexserver/index_integration_test.go index fc89a4598..c546dc3a5 100644 --- a/cmd/zoekt-sourcegraph-indexserver/index_integration_test.go +++ b/cmd/zoekt-sourcegraph-indexserver/index_integration_test.go @@ -326,6 +326,23 @@ func gitIndexOptionsForTest(args *indexArgs, repoDir string) gitindex.Options { BranchPrefix: "refs/heads/", Branches: branches, DeltaShardNumberFallbackThreshold: args.DeltaShardNumberFallbackThreshold, + DeltaAdmissionMode: args.DeltaAdmissionMode, + } +} + +func TestGitIndexOptionsForTestDoesNotResolveHEADAlias(t *testing.T) { + args := &indexArgs{ + IndexOptions: IndexOptions{ + Name: "test/repo", + Branches: []zoekt.RepositoryBranch{ + {Name: "HEAD", Version: "deadbeef"}, + }, + }, + } + + opts := gitIndexOptionsForTest(args, "/tmp/repo.git") + if opts.ResolveHEADToBranch { + t.Fatal("Sourcegraph synthetic bare repos should not resolve HEAD inside gitindex") } } diff --git a/cmd/zoekt-sourcegraph-indexserver/index_test.go b/cmd/zoekt-sourcegraph-indexserver/index_test.go index 45b4fc355..8832a2aa2 100644 --- a/cmd/zoekt-sourcegraph-indexserver/index_test.go +++ b/cmd/zoekt-sourcegraph-indexserver/index_test.go @@ -24,6 +24,7 @@ import ( "github.com/sourcegraph/zoekt" configv1 "github.com/sourcegraph/zoekt/cmd/zoekt-sourcegraph-indexserver/grpc/protos/sourcegraph/zoekt/configuration/v1" + "github.com/sourcegraph/zoekt/gitindex" "github.com/sourcegraph/zoekt/internal/ctags" ) @@ -616,6 +617,45 @@ func TestIndex(t *testing.T) { "-delta -delta_threshold 22 -file_limit 1048576 -parallelism 4 -index /data/index -require_ctags -large_file foo -large_file bar " + "$TMPDIR/test%2Frepo.git", }, + }, { + name: "delta stats-v1 admission", + args: indexArgs{ + Incremental: true, + IndexDir: "/data/index", + UseDelta: true, + DeltaAdmissionMode: gitindex.DeltaAdmissionModeStatsV1, + IndexOptions: IndexOptions{ + Name: "test/repo", + CloneURL: "http://api.test/.internal/git/test/repo", + Branches: []zoekt.RepositoryBranch{ + {Name: "HEAD", Version: "deadbeef"}, + }, + TenantID: 1, + }, + DeltaShardNumberFallbackThreshold: 22, + }, + mockRepositoryMetadata: &zoekt.Repository{ + Name: "test/repo", + Branches: []zoekt.RepositoryBranch{ + {Name: "HEAD", Version: "oldhead"}, + }, + }, + want: []string{ + "git -c init.defaultBranch=nonExistentBranchBB0FOFCH32 init --bare $TMPDIR/test%2Frepo.git", + "git -C $TMPDIR/test%2Frepo.git config --add http.extraHeader X-Sourcegraph-Actor-UID: internal", + "git -C $TMPDIR/test%2Frepo.git config --add http.extraHeader X-Sourcegraph-Tenant-ID: 1", + "git -C $TMPDIR/test%2Frepo.git -c protocol.version=2 fetch --depth=1 --no-tags --filter=blob:limit=1048577 http://api.test/.internal/git/test/repo deadbeef oldhead", + "git -C $TMPDIR/test%2Frepo.git update-ref HEAD deadbeef", + "git -C $TMPDIR/test%2Frepo.git config zoekt.archived 0", + "git -C $TMPDIR/test%2Frepo.git config zoekt.fork 0", + "git -C $TMPDIR/test%2Frepo.git config zoekt.latestCommitDate 1", + "git -C $TMPDIR/test%2Frepo.git config zoekt.name test/repo", + "git -C $TMPDIR/test%2Frepo.git config zoekt.priority 0", + "git -C $TMPDIR/test%2Frepo.git config zoekt.public 0", + "git -C $TMPDIR/test%2Frepo.git config zoekt.repoid 0", + "git -C $TMPDIR/test%2Frepo.git config zoekt.tenantID 1", + "zoekt-git-index -submodules=false -incremental -branches HEAD -delta -delta_threshold 22 -delta_admission_mode stats-v1 -file_limit 1048576 -index /data/index -disable_ctags $TMPDIR/test%2Frepo.git", + }, }} for _, tc := range cases { diff --git a/cmd/zoekt-sourcegraph-indexserver/main.go b/cmd/zoekt-sourcegraph-indexserver/main.go index c218ba0a7..334539ba6 100644 --- a/cmd/zoekt-sourcegraph-indexserver/main.go +++ b/cmd/zoekt-sourcegraph-indexserver/main.go @@ -56,6 +56,7 @@ import ( "github.com/sourcegraph/zoekt" configv1 "github.com/sourcegraph/zoekt/cmd/zoekt-sourcegraph-indexserver/grpc/protos/sourcegraph/zoekt/configuration/v1" indexserverv1 "github.com/sourcegraph/zoekt/cmd/zoekt-sourcegraph-indexserver/grpc/protos/zoekt/indexserver/v1" + "github.com/sourcegraph/zoekt/gitindex" "github.com/sourcegraph/zoekt/grpc/defaults" "github.com/sourcegraph/zoekt/grpc/grpcutil" "github.com/sourcegraph/zoekt/grpc/internalerrs" @@ -228,6 +229,10 @@ type Server struct { // before attempting a delta build. deltaShardNumberFallbackThreshold uint64 + // deltaAdmissionMode controls experimental delta admission behavior. Empty + // preserves current behavior. + deltaAdmissionMode string + // repositoriesSkipSymbolsCalculationAllowList is an allowlist for repositories that // we skip calculating symbols metadata for during builds repositoriesSkipSymbolsCalculationAllowList map[string]struct{} @@ -626,6 +631,7 @@ func (s *Server) index(ctx context.Context, args *indexArgs) (state indexState, } args.DeltaShardNumberFallbackThreshold = s.deltaShardNumberFallbackThreshold + args.DeltaAdmissionMode = s.deltaAdmissionMode if _, ok := s.repositoriesSkipSymbolsCalculationAllowList[repositoryName]; ok { tr.LazyPrintf("skipping symbols calculation") @@ -1519,6 +1525,14 @@ func newServer(conf rootConfig) (*Server, error) { debugLog.Printf("disabling delta build fallback behavior - delta builds will be performed regardless of the number of preexisting shards") } + deltaAdmissionMode, err := deltaAdmissionModeFromEnv() + if err != nil { + return nil, err + } + if deltaAdmissionMode != "" { + debugLog.Printf("using experimental delta admission mode %q", deltaAdmissionMode) + } + reposShouldSkipSymbolsCalculation := getEnvWithDefaultEmptySet("SKIP_SYMBOLS_REPOS_ALLOWLIST") if len(reposShouldSkipSymbolsCalculation) > 0 { debugLog.Printf("skipping generating symbols metadata for: %s", joinStringSet(reposShouldSkipSymbolsCalculation, ", ")) @@ -1580,6 +1594,7 @@ func newServer(conf rootConfig) (*Server, error) { shardMerging: !conf.disableShardMerging, deltaBuildRepositoriesAllowList: deltaBuildRepositoriesAllowList, deltaShardNumberFallbackThreshold: deltaShardNumberFallbackThreshold, + deltaAdmissionMode: deltaAdmissionMode, repositoriesSkipSymbolsCalculationAllowList: reposShouldSkipSymbolsCalculation, hostname: conf.hostname, mergeOpts: mergeOpts{ @@ -1594,6 +1609,16 @@ func newServer(conf rootConfig) (*Server, error) { }, err } +func deltaAdmissionModeFromEnv() (string, error) { + mode := getEnvWithDefaultString("DELTA_ADMISSION_MODE", "") + switch mode { + case "", gitindex.DeltaAdmissionModeStatsV1: + return mode, nil + default: + return "", fmt.Errorf("invalid DELTA_ADMISSION_MODE %q", mode) + } +} + func newGRPCServer(logger sglog.Logger, s *Server, additionalOpts ...grpc.ServerOption) *grpc.Server { grpcServer := defaults.NewServer(logger, additionalOpts...) indexserverv1.RegisterSourcegraphIndexserverServiceServer(grpcServer, s) diff --git a/cmd/zoekt-sourcegraph-indexserver/main_test.go b/cmd/zoekt-sourcegraph-indexserver/main_test.go index 2fab54c68..8c4b5c381 100644 --- a/cmd/zoekt-sourcegraph-indexserver/main_test.go +++ b/cmd/zoekt-sourcegraph-indexserver/main_test.go @@ -22,6 +22,7 @@ import ( "github.com/sourcegraph/zoekt" configv1 "github.com/sourcegraph/zoekt/cmd/zoekt-sourcegraph-indexserver/grpc/protos/sourcegraph/zoekt/configuration/v1" + "github.com/sourcegraph/zoekt/gitindex" "github.com/sourcegraph/zoekt/internal/tenant" ) @@ -57,6 +58,37 @@ func TestIndexNoTenant(t *testing.T) { require.ErrorIs(t, err, tenant.ErrMissingTenant) } +func TestDeltaAdmissionModeFromEnv(t *testing.T) { + for _, tc := range []struct { + name string + value string + want string + wantErr bool + }{ + {name: "empty"}, + {name: "stats-v1", value: gitindex.DeltaAdmissionModeStatsV1, want: gitindex.DeltaAdmissionModeStatsV1}, + {name: "invalid", value: "current", wantErr: true}, + } { + t.Run(tc.name, func(t *testing.T) { + t.Setenv("DELTA_ADMISSION_MODE", tc.value) + + got, err := deltaAdmissionModeFromEnv() + if tc.wantErr { + if err == nil { + t.Fatal("expected error, got nil") + } + return + } + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if got != tc.want { + t.Fatalf("got %q, want %q", got, tc.want) + } + }) + } +} + func TestServer_parallelism(t *testing.T) { root, err := url.Parse("http://api.test") if err != nil { diff --git a/gitindex/index.go b/gitindex/index.go index 93c9148e2..7d689ed3b 100644 --- a/gitindex/index.go +++ b/gitindex/index.go @@ -19,6 +19,7 @@ import ( "bytes" "cmp" "context" + "encoding/json" "errors" "fmt" "io" @@ -31,6 +32,7 @@ import ( "sort" "strconv" "strings" + "time" "github.com/go-git/go-billy/v5/osfs" "github.com/go-git/go-git/v5/config" @@ -42,6 +44,7 @@ import ( "github.com/sourcegraph/zoekt" "github.com/sourcegraph/zoekt/ignore" "github.com/sourcegraph/zoekt/index" + "github.com/sourcegraph/zoekt/query" git "github.com/go-git/go-git/v5" ) @@ -391,6 +394,10 @@ type Options struct { // List of branch names to index, e.g. []string{"HEAD", "stable"} Branches []string + // ResolveHEADToBranch is reserved for a separate HEAD/worktree fix. It is + // intentionally not used by the delta admission logic. + ResolveHEADToBranch bool + // DeltaShardNumberFallbackThreshold defines an upper limit (inclusive) on the number of preexisting shards // that can exist before attempting another delta build. If the number of preexisting shards exceeds this threshold, // then a normal build will be performed instead. @@ -398,6 +405,101 @@ type Options struct { // If DeltaShardNumberFallbackThreshold is 0, then this fallback behavior is disabled: // a delta build will always be performed regardless of the number of preexisting shards. DeltaShardNumberFallbackThreshold uint64 + + // DeltaAdmissionMode controls experimental cost-based delta admission. The + // empty value preserves the historical delta behavior exactly. + DeltaAdmissionMode string + + // DeltaAdmissionThresholds configures the stats-v1 admission mode. Zero + // values are replaced with conservative defaults. + DeltaAdmissionThresholds DeltaAdmissionThresholds + + // DeltaAdmissionLogPath, if non-empty, receives one JSON object per + // stats-v1 delta admission decision. + DeltaAdmissionLogPath string +} + +const DeltaAdmissionModeStatsV1 = "stats-v1" + +// DeltaAdmissionThresholds are experimental cost gates for stats-v1 delta +// admission. These are performance/debt heuristics only; correctness gates such +// as branch-set compatibility and option hashes are checked separately before +// these thresholds are considered. +// +// A delta is accepted only when every configured gate is satisfied. The first +// failed gate becomes the logged rejection reason, but later gates may also +// have failed. +type DeltaAdmissionThresholds struct { + // MaxDeltaIndexedBytesRatio limits write mass: + // + // candidate_indexed_bytes / live_indexed_bytes + // + // This is the primary delta-vs-full signal. Delta indexing rewrites whole + // changed documents, so a tiny edit to a large file can still be expensive. + MaxDeltaIndexedBytesRatio float64 + + // MaxPhysicalLiveBytesRatio limits accumulated stacked-shard debt: + // + // next_physical_indexed_bytes / live_indexed_bytes + // + // Physical bytes include old documents hidden by file tombstones. Higher + // values mean more disk and more query-time index data than a clean full + // rebuild would need. + MaxPhysicalLiveBytesRatio float64 + + // MaxTombstonePathRatio limits accumulated stale-path metadata: + // + // next_tombstone_path_count / live_path_count + // + // This can force a rebuild even when the current candidate delta is small, + // because the stack has accumulated enough deleted/changed paths. + MaxTombstonePathRatio float64 + + // MaxShardFanoutRatio limits shard fanout relative to a clean rebuild + // estimate: + // + // next_shard_count / ceil(live_indexed_bytes / shard_max) + // + // It is intentionally approximate. It protects query fanout and file-system + // overhead when many small delta shards accumulate. Unlike the layer-count + // cap, this scales with repos whose clean full index naturally spans many + // shards. + MaxShardFanoutRatio float64 +} + +func (t DeltaAdmissionThresholds) withDefaults() DeltaAdmissionThresholds { + if t.MaxDeltaIndexedBytesRatio == 0 { + // A conservative write-mass default: once a candidate delta approaches + // one fifth of the live corpus, a clean rebuild is often competitive and + // avoids adding read debt. + t.MaxDeltaIndexedBytesRatio = 0.20 + } + if t.MaxPhysicalLiveBytesRatio == 0 { + // Allow at most roughly 25% extra physical indexed bytes before forcing + // a full rebuild to compact stale documents. + t.MaxPhysicalLiveBytesRatio = 1.25 + } + if t.MaxTombstonePathRatio == 0 { + // Rebuild once about one fifth of live paths have stale entries in old + // shards. This keeps tombstone metadata and skip checks bounded. + t.MaxTombstonePathRatio = 0.20 + } + if t.MaxShardFanoutRatio == 0 { + // Allow up to 20x the shard count of a clean rebuild. This directly caps + // query fanout without forcing large repos to rebuild after an arbitrary + // number of tiny deltas. + t.MaxShardFanoutRatio = 20 + } + return t +} + +func validateDeltaAdmissionMode(mode string) error { + switch mode { + case "", DeltaAdmissionModeStatsV1: + return nil + default: + return fmt.Errorf("unknown delta admission mode %q", mode) + } } func expandBranches(repo *git.Repository, bs []string, prefix string) ([]string, error) { @@ -475,6 +577,9 @@ func indexGitRepo(opts Options, config gitIndexConfig) (bool, error) { if opts.RepoDir == "" { return false, fmt.Errorf("gitindex: must set RepoDir") } + if err := validateDeltaAdmissionMode(opts.DeltaAdmissionMode); err != nil { + return false, err + } opts.BuildOptions.RepositoryDescription.Source = opts.RepoDir @@ -543,6 +648,14 @@ func indexGitRepo(opts Options, config gitIndexConfig) (bool, error) { if err != nil { log.Printf("delta build: falling back to normal build since delta build failed, repository=%q, err=%s", opts.BuildOptions.RepositoryDescription.Name, err) opts.BuildOptions.IsDelta = false + } else if opts.DeltaAdmissionMode == DeltaAdmissionModeStatsV1 { + stats, err := prepareDeltaStatsV1(opts, repo, repos, changedOrRemovedFiles) + if err != nil { + log.Printf("delta build: falling back to normal build since stats-v1 admission rejected delta, repository=%q, err=%s", opts.BuildOptions.RepositoryDescription.Name, err) + opts.BuildOptions.IsDelta = false + } else { + opts.BuildOptions.RepositoryDescription.DeltaStats = stats + } } } @@ -551,6 +664,18 @@ func indexGitRepo(opts Options, config gitIndexConfig) (bool, error) { if err != nil { return false, fmt.Errorf("preparing normal build: %w", err) } + if opts.DeltaAdmissionMode == DeltaAdmissionModeStatsV1 { + stats, err := repositoryStatsForFiles(repos, opts.BuildOptions) + if err != nil { + return false, fmt.Errorf("computing stats-v1 full build stats: %w", err) + } + stats.PhysicalIndexedBytes = stats.LiveIndexedBytes + stats.PhysicalDocumentCount = stats.LiveDocumentCount + stats.TombstonePathCount = 0 + stats.DeltaLayerCount = 0 + stats.LastFullIndexTimeUnix = time.Now().Unix() + opts.BuildOptions.RepositoryDescription.DeltaStats = stats + } } reposByPath := map[string]BlobLocation{} @@ -846,6 +971,478 @@ type gitIndexConfig struct { prepareNormalBuild prepareNormalBuildFunc } +type deltaAdmissionFallbackError struct { + reason string +} + +func (e deltaAdmissionFallbackError) Error() string { + return "stats-v1 delta admission rejected delta: " + e.reason +} + +type deltaAdmissionDecisionLogEntry struct { + Time time.Time `json:"time"` + Repo string `json:"repo"` + Mode string `json:"mode"` + Accepted bool `json:"accepted"` + Reason string `json:"reason"` + BranchName string `json:"branch_name,omitempty"` + + ExistingStatsPresent bool `json:"existing_stats_present"` + + CandidateIndexedBytes uint64 `json:"candidate_indexed_bytes"` + CandidateDocumentCount uint64 `json:"candidate_document_count"` + CandidatePathCount uint64 `json:"candidate_path_count"` + ChangedOrDeletedPaths uint64 `json:"changed_or_deleted_paths"` + + LiveIndexedBytes uint64 `json:"live_indexed_bytes"` + LiveDocumentCount uint64 `json:"live_document_count"` + LivePathCount uint64 `json:"live_path_count"` + + NextLiveIndexedBytes uint64 `json:"next_live_indexed_bytes,omitempty"` + NextLiveDocumentCount uint64 `json:"next_live_document_count,omitempty"` + NextLivePathCount uint64 `json:"next_live_path_count,omitempty"` + + NextPhysicalIndexedBytes uint64 `json:"next_physical_indexed_bytes"` + NextPhysicalDocumentCount uint64 `json:"next_physical_document_count"` + NextTombstonePathCount uint64 `json:"next_tombstone_path_count"` + NextDeltaLayerCount uint64 `json:"next_delta_layer_count"` + ShardFanoutRatio float64 `json:"shard_fanout_ratio"` + + WriteBytesRatio *float64 `json:"write_bytes_ratio,omitempty"` + PhysicalLiveRatio *float64 `json:"physical_live_ratio,omitempty"` + TombstonePathRatio *float64 `json:"tombstone_path_ratio,omitempty"` + + Thresholds DeltaAdmissionThresholds `json:"thresholds"` +} + +func prepareDeltaStatsV1(options Options, repository *git.Repository, candidateRepos map[fileKey]BlobLocation, changedOrDeletedPaths []string) (*zoekt.RepositoryDeltaStats, error) { + existingRepository, _, ok, err := options.BuildOptions.FindRepositoryMetadata() + if err != nil { + return nil, fmt.Errorf("loading existing repository metadata: %w", err) + } + if !ok { + return nil, fmt.Errorf("loading existing repository metadata: not found") + } + + candidateStats, err := repositoryStatsForFiles(candidateRepos, options.BuildOptions) + if err != nil { + return nil, fmt.Errorf("computing delta candidate stats: %w", err) + } + + var currentStats *zoekt.RepositoryDeltaStats + liveStatsForDecision := existingRepository.DeltaStats + if liveStatsForDecision == nil { + currentStats, err = computeCurrentLiveStats(options, repository) + if err != nil { + return nil, err + } + liveStatsForDecision = currentStats + } + + existingPhysicalIndexedBytes := liveStatsForDecision.LiveIndexedBytes + existingPhysicalDocumentCount := liveStatsForDecision.LiveDocumentCount + existingDeltaLayerCount := uint64(0) + lastFullIndexTimeUnix := int64(0) + if stats := existingRepository.DeltaStats; stats != nil { + existingPhysicalIndexedBytes = stats.PhysicalIndexedBytes + existingPhysicalDocumentCount = stats.PhysicalDocumentCount + existingDeltaLayerCount = stats.DeltaLayerCount + lastFullIndexTimeUnix = stats.LastFullIndexTimeUnix + } else if stats, err := existingPhysicalStats(options.BuildOptions); err == nil { + existingPhysicalIndexedBytes = stats.PhysicalIndexedBytes + existingPhysicalDocumentCount = stats.PhysicalDocumentCount + } else { + log.Printf("stats-v1: failed to compute physical stats from old index for repository=%q: %s", options.BuildOptions.RepositoryDescription.Name, err) + } + + tombstones, err := existingFileTombstones(options.BuildOptions) + if err != nil { + return nil, fmt.Errorf("computing existing tombstones: %w", err) + } + for _, path := range changedOrDeletedPaths { + tombstones[path] = struct{}{} + } + + nextDeltaLayerCount := existingDeltaLayerCount + if candidateStats.LiveDocumentCount > 0 || len(changedOrDeletedPaths) > 0 { + nextDeltaLayerCount++ + } + + nextPhysicalIndexedBytes := existingPhysicalIndexedBytes + candidateStats.LiveIndexedBytes + nextPhysicalDocumentCount := existingPhysicalDocumentCount + candidateStats.LiveDocumentCount + if nextPhysicalIndexedBytes < liveStatsForDecision.LiveIndexedBytes { + nextPhysicalIndexedBytes = liveStatsForDecision.LiveIndexedBytes + } + if nextPhysicalDocumentCount < liveStatsForDecision.LiveDocumentCount { + nextPhysicalDocumentCount = liveStatsForDecision.LiveDocumentCount + } + + thresholds := options.DeltaAdmissionThresholds.withDefaults() + shardFanoutRatio := shardFanoutRatio(options.BuildOptions, liveStatsForDecision.LiveIndexedBytes, candidateStats.LiveIndexedBytes) + decision := deltaAdmissionDecisionLogEntry{ + Time: time.Now().UTC(), + Repo: options.BuildOptions.RepositoryDescription.Name, + Mode: DeltaAdmissionModeStatsV1, + Accepted: false, + BranchName: singleBranchName(options.BuildOptions.RepositoryDescription.Branches), + + ExistingStatsPresent: existingRepository.DeltaStats != nil, + + CandidateIndexedBytes: candidateStats.LiveIndexedBytes, + CandidateDocumentCount: candidateStats.LiveDocumentCount, + CandidatePathCount: candidateStats.LivePathCount, + ChangedOrDeletedPaths: uint64(len(changedOrDeletedPaths)), + + LiveIndexedBytes: liveStatsForDecision.LiveIndexedBytes, + LiveDocumentCount: liveStatsForDecision.LiveDocumentCount, + LivePathCount: liveStatsForDecision.LivePathCount, + + NextPhysicalIndexedBytes: nextPhysicalIndexedBytes, + NextPhysicalDocumentCount: nextPhysicalDocumentCount, + NextTombstonePathCount: uint64(len(tombstones)), + NextDeltaLayerCount: nextDeltaLayerCount, + ShardFanoutRatio: shardFanoutRatio, + + WriteBytesRatio: finiteRatio(candidateStats.LiveIndexedBytes, liveStatsForDecision.LiveIndexedBytes), + PhysicalLiveRatio: finiteRatio(nextPhysicalIndexedBytes, liveStatsForDecision.LiveIndexedBytes), + TombstonePathRatio: finiteRatio(uint64(len(tombstones)), liveStatsForDecision.LivePathCount), + + Thresholds: thresholds, + } + reject := func(reason string) (*zoekt.RepositoryDeltaStats, error) { + decision.Reason = reason + writeDeltaAdmissionDecisionLog(options, decision) + return nil, deltaAdmissionFallbackError{reason: reason} + } + + // Evaluate write-mass gates before read-debt gates. The write-mass gates + // answer "is this individual delta worth doing?", while the read-debt gates + // answer "has the existing stack become expensive enough to compact?" The + // logged reason is the first failed gate in this order. + if r := ratio(candidateStats.LiveIndexedBytes, liveStatsForDecision.LiveIndexedBytes); r > thresholds.MaxDeltaIndexedBytesRatio { + return reject(fmt.Sprintf("write indexed bytes ratio %.4f exceeds %.4f", r, thresholds.MaxDeltaIndexedBytesRatio)) + } + if r := ratio(nextPhysicalIndexedBytes, liveStatsForDecision.LiveIndexedBytes); r > thresholds.MaxPhysicalLiveBytesRatio { + return reject(fmt.Sprintf("physical/live bytes ratio %.4f exceeds %.4f", r, thresholds.MaxPhysicalLiveBytesRatio)) + } + if r := ratio(uint64(len(tombstones)), liveStatsForDecision.LivePathCount); r > thresholds.MaxTombstonePathRatio { + return reject(fmt.Sprintf("tombstone path ratio %.4f exceeds %.4f", r, thresholds.MaxTombstonePathRatio)) + } + if shardFanoutRatio > thresholds.MaxShardFanoutRatio { + return reject(fmt.Sprintf("shard fanout ratio %.4f exceeds %.4f", shardFanoutRatio, thresholds.MaxShardFanoutRatio)) + } + + nextLiveStats := currentStats + if nextLiveStats == nil { + nextLiveStats, err = nextLiveStatsFromDelta(options, repository, existingRepository, candidateStats, candidateRepos, changedOrDeletedPaths) + if err != nil { + return nil, err + } + } + + if nextPhysicalIndexedBytes < nextLiveStats.LiveIndexedBytes { + nextPhysicalIndexedBytes = nextLiveStats.LiveIndexedBytes + } + if nextPhysicalDocumentCount < nextLiveStats.LiveDocumentCount { + nextPhysicalDocumentCount = nextLiveStats.LiveDocumentCount + } + + stats := &zoekt.RepositoryDeltaStats{ + LiveIndexedBytes: nextLiveStats.LiveIndexedBytes, + LiveDocumentCount: nextLiveStats.LiveDocumentCount, + LivePathCount: nextLiveStats.LivePathCount, + PhysicalIndexedBytes: nextPhysicalIndexedBytes, + PhysicalDocumentCount: nextPhysicalDocumentCount, + TombstonePathCount: uint64(len(tombstones)), + DeltaLayerCount: nextDeltaLayerCount, + LastFullIndexTimeUnix: lastFullIndexTimeUnix, + } + + decision.Accepted = true + decision.Reason = "accepted" + decision.NextLiveIndexedBytes = nextLiveStats.LiveIndexedBytes + decision.NextLiveDocumentCount = nextLiveStats.LiveDocumentCount + decision.NextLivePathCount = nextLiveStats.LivePathCount + decision.NextPhysicalIndexedBytes = nextPhysicalIndexedBytes + decision.NextPhysicalDocumentCount = nextPhysicalDocumentCount + writeDeltaAdmissionDecisionLog(options, decision) + + return stats, nil +} + +func writeDeltaAdmissionDecisionLog(options Options, decision deltaAdmissionDecisionLogEntry) { + if options.DeltaAdmissionLogPath == "" { + return + } + + f, err := os.OpenFile(options.DeltaAdmissionLogPath, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666) + if err != nil { + log.Printf("stats-v1: failed to open delta admission log %q: %s", options.DeltaAdmissionLogPath, err) + return + } + defer f.Close() + + if err := json.NewEncoder(f).Encode(decision); err != nil { + log.Printf("stats-v1: failed to write delta admission log %q: %s", options.DeltaAdmissionLogPath, err) + } +} + +func singleBranchName(branches []zoekt.RepositoryBranch) string { + if len(branches) != 1 { + return "" + } + return branches[0].Name +} + +func finiteRatio(n, d uint64) *float64 { + r := ratio(n, d) + if math.IsInf(r, 0) || math.IsNaN(r) { + return nil + } + return &r +} + +func computeCurrentLiveStats(options Options, repository *git.Repository) (*zoekt.RepositoryDeltaStats, error) { + currentRepos, _, err := prepareNormalBuild(options, repository) + if err != nil { + return nil, fmt.Errorf("computing current live stats: %w", err) + } + currentStats, err := repositoryStatsForFiles(currentRepos, options.BuildOptions) + if err != nil { + return nil, fmt.Errorf("computing current live stats: %w", err) + } + return currentStats, nil +} + +func nextLiveStatsFromDelta(options Options, repository *git.Repository, existingRepository *zoekt.Repository, candidateStats *zoekt.RepositoryDeltaStats, candidateRepos map[fileKey]BlobLocation, changedOrDeletedPaths []string) (*zoekt.RepositoryDeltaStats, error) { + if existingRepository.DeltaStats == nil { + return nil, fmt.Errorf("existing DeltaStats missing") + } + + affectedPaths := make(map[string]struct{}, len(changedOrDeletedPaths)+len(candidateRepos)) + for _, path := range changedOrDeletedPaths { + affectedPaths[path] = struct{}{} + } + for key := range candidateRepos { + affectedPaths[key.FullPath()] = struct{}{} + } + + oldAffectedRepos, err := oldLiveFilesForPaths(repository, existingRepository.Branches, affectedPaths) + if err != nil { + return nil, err + } + oldAffectedStats, err := repositoryStatsForFiles(oldAffectedRepos, options.BuildOptions) + if err != nil { + return nil, fmt.Errorf("computing old live stats for changed paths: %w", err) + } + + oldStats := existingRepository.DeltaStats + return &zoekt.RepositoryDeltaStats{ + LiveIndexedBytes: addAfterSubtract(oldStats.LiveIndexedBytes, oldAffectedStats.LiveIndexedBytes, candidateStats.LiveIndexedBytes), + LiveDocumentCount: addAfterSubtract(oldStats.LiveDocumentCount, oldAffectedStats.LiveDocumentCount, candidateStats.LiveDocumentCount), + LivePathCount: addAfterSubtract(oldStats.LivePathCount, oldAffectedStats.LivePathCount, candidateStats.LivePathCount), + PhysicalIndexedBytes: oldStats.PhysicalIndexedBytes, + PhysicalDocumentCount: oldStats.PhysicalDocumentCount, + TombstonePathCount: oldStats.TombstonePathCount, + DeltaLayerCount: oldStats.DeltaLayerCount, + LastFullIndexTimeUnix: oldStats.LastFullIndexTimeUnix, + }, nil +} + +func oldLiveFilesForPaths(repository *git.Repository, branches []zoekt.RepositoryBranch, paths map[string]struct{}) (map[fileKey]BlobLocation, error) { + repos := make(map[fileKey]BlobLocation) + if len(paths) == 0 { + return repos, nil + } + + for _, branch := range branches { + commit, err := getCommit(repository, "", branch.Version) + if err != nil { + return nil, fmt.Errorf("getting last indexed commit for branch %q: %w", branch.Name, err) + } + + tree, err := commit.Tree() + if err != nil { + return nil, fmt.Errorf("getting last indexed git tree for branch %q: %w", branch.Name, err) + } + + ig, err := newIgnoreMatcher(tree) + if err != nil { + return nil, fmt.Errorf("newIgnoreMatcher for branch %q: %w", branch.Name, err) + } + + for path := range paths { + if ig.Match(path) { + continue + } + + f, err := tree.File(path) + if err != nil { + if errors.Is(err, object.ErrFileNotFound) { + continue + } + return nil, fmt.Errorf("getting old file %q in branch %q: %w", path, branch.Name, err) + } + + key := fileKey{Path: path, ID: f.ID()} + if existing, ok := repos[key]; ok { + existing.Branches = append(existing.Branches, branch.Name) + repos[key] = existing + } else { + repos[key] = BlobLocation{ + GitRepo: repository, + Branches: []string{branch.Name}, + } + } + } + } + + return repos, nil +} + +func addAfterSubtract(base, subtract, add uint64) uint64 { + if subtract > base { + return add + } + return base - subtract + add +} + +func repositoryStatsForFiles(files map[fileKey]BlobLocation, opts index.Options) (*zoekt.RepositoryDeltaStats, error) { + stats := &zoekt.RepositoryDeltaStats{ + LiveDocumentCount: uint64(len(files)), + } + paths := make(map[string]struct{}, len(files)) + for key, location := range files { + name := key.FullPath() + paths[name] = struct{}{} + + indexedBytes := uint64(len(name)) + blob, err := location.GitRepo.BlobObject(key.ID) + if err == nil { + size := uint64(blob.Size) + if blob.Size <= int64(opts.SizeMax) || opts.IgnoreSizeMax(name) { + indexedBytes += size + } + } else if !errors.Is(err, plumbing.ErrObjectNotFound) { + return nil, fmt.Errorf("loading blob %s for %q: %w", key.ID, name, err) + } + stats.LiveIndexedBytes += indexedBytes + } + stats.LivePathCount = uint64(len(paths)) + return stats, nil +} + +func existingPhysicalStats(opts index.Options) (*zoekt.RepositoryDeltaStats, error) { + shards := opts.FindAllShards() + if len(shards) == 0 { + return nil, fmt.Errorf("no existing shards") + } + + stats := &zoekt.RepositoryDeltaStats{} + found := false + for _, shard := range shards { + f, err := os.Open(shard) + if err != nil { + return nil, err + } + indexFile, err := index.NewIndexFile(f) + if err != nil { + return nil, err + } + searcher, err := index.NewSearcher(indexFile) + if err != nil { + indexFile.Close() + return nil, err + } + list, err := searcher.List(context.Background(), &query.Const{Value: true}, &zoekt.ListOptions{}) + searcher.Close() + if err != nil { + return nil, err + } + for _, entry := range list.Repos { + if sameRepository(&entry.Repository, &opts.RepositoryDescription) { + found = true + stats.PhysicalIndexedBytes += uint64(max(entry.Stats.ContentBytes, 0)) + stats.PhysicalDocumentCount += uint64(max(entry.Stats.Documents, 0)) + } + } + } + if !found { + return nil, fmt.Errorf("repository %q not found in existing shards", opts.RepositoryDescription.Name) + } + return stats, nil +} + +func existingFileTombstones(opts index.Options) (map[string]struct{}, error) { + tombstones := map[string]struct{}{} + for _, shard := range opts.FindAllShards() { + repositories, _, err := index.ReadMetadataPathAlive(shard) + if err != nil { + return nil, err + } + for _, repository := range repositories { + if !sameRepository(repository, &opts.RepositoryDescription) { + continue + } + for path := range repository.FileTombstones { + tombstones[path] = struct{}{} + } + } + } + return tombstones, nil +} + +func ensureStatsV1DeltaShardsSupported(opts index.Options) error { + for _, shard := range opts.FindAllShards() { + if strings.HasPrefix(filepath.Base(shard), "compound-") { + return fmt.Errorf("stats-v1 delta builds don't support repositories contained in compound shards (shard %q)", shard) + } + repositories, _, err := index.ReadMetadataPathAlive(shard) + if err != nil { + return err + } + if len(repositories) > 1 { + return fmt.Errorf("stats-v1 delta builds don't support compound shards (shard %q contains %d repositories)", shard, len(repositories)) + } + } + return nil +} + +func sameRepository(a, b *zoekt.Repository) bool { + if a.ID != 0 || b.ID != 0 { + return a.ID == b.ID + } + return a.Name == b.Name +} + +func ratio(n, d uint64) float64 { + if d == 0 { + if n == 0 { + return 0 + } + return math.Inf(1) + } + return float64(n) / float64(d) +} + +func shardFanoutRatio(opts index.Options, liveIndexedBytes, candidateIndexedBytes uint64) float64 { + existingShardCount := uint64(len(opts.FindAllShards())) + nextShardCount := existingShardCount + if candidateIndexedBytes > 0 { + nextShardCount += max(ceilDiv(candidateIndexedBytes, uint64(opts.ShardMax)), 1) + } + + cleanShardCount := max(ceilDiv(liveIndexedBytes, uint64(opts.ShardMax)), 1) + return ratio(nextShardCount, cleanShardCount) +} + +func ceilDiv(n, d uint64) uint64 { + if d == 0 { + return 0 + } + return (n + d - 1) / d +} + func prepareDeltaBuild(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { if options.Submodules { return nil, nil, nil, fmt.Errorf("delta builds currently don't support submodule indexing") @@ -860,6 +1457,11 @@ func prepareDeltaBuild(options Options, repository *git.Repository) (repos map[f if !ok { return nil, nil, nil, fmt.Errorf("no existing shards found for repository") } + if options.DeltaAdmissionMode == DeltaAdmissionModeStatsV1 { + if err := ensureStatsV1DeltaShardsSupported(options.BuildOptions); err != nil { + return nil, nil, nil, err + } + } if options.DeltaShardNumberFallbackThreshold > 0 { // HACK: For our interim compaction strategy, we force a full normal index once diff --git a/gitindex/index_test.go b/gitindex/index_test.go index d8d303751..66dc71340 100644 --- a/gitindex/index_test.go +++ b/gitindex/index_test.go @@ -17,6 +17,7 @@ package gitindex import ( "bytes" "context" + "encoding/json" "errors" "net/url" "os" @@ -172,6 +173,272 @@ func TestIndexGitRepo_Worktree(t *testing.T) { } } +func TestIndexGitRepo_DeltaAdmissionStatsV1WritesForwardAndUsesDeltaDebt(t *testing.T) { + t.Parallel() + + repositoryDir := t.TempDir() + indexDir := t.TempDir() + runGit(t, repositoryDir, "init", "-b", "main") + writeAndCommitFile(t, repositoryDir, "stable.txt", "stable\n", "stable") + writeAndCommitFile(t, repositoryDir, "changing.txt", "alpha\n", "alpha") + + opts := Options{ + RepoDir: filepath.Join(repositoryDir, ".git"), + Branches: []string{"main"}, + DeltaAdmissionMode: DeltaAdmissionModeStatsV1, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repository"}, + IndexDir: indexDir, + DisableCTags: true, + }, + } + + if _, err := IndexGitRepo(opts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + repo := indexedRepositoryForTest(t, indexDir, "repository") + assertDeltaStats(t, repo, func(stats *zoekt.RepositoryDeltaStats) { + if stats.DeltaLayerCount != 0 { + t.Fatalf("initial full build DeltaLayerCount = %d, want 0", stats.DeltaLayerCount) + } + if stats.TombstonePathCount != 0 { + t.Fatalf("initial full build TombstonePathCount = %d, want 0", stats.TombstonePathCount) + } + if stats.LiveDocumentCount != 2 || stats.PhysicalDocumentCount != 2 { + t.Fatalf("initial doc counts = live %d physical %d, want 2/2", stats.LiveDocumentCount, stats.PhysicalDocumentCount) + } + if stats.LiveIndexedBytes == 0 || stats.PhysicalIndexedBytes != stats.LiveIndexedBytes { + t.Fatalf("initial bytes = live %d physical %d, want nonzero/equal", stats.LiveIndexedBytes, stats.PhysicalIndexedBytes) + } + }) + + writeAndCommitFile(t, repositoryDir, "changing.txt", "beta\n", "beta") + logPath := filepath.Join(indexDir, "delta-admission.jsonl") + deltaOpts := opts + deltaOpts.BuildOptions.IsDelta = true + deltaOpts.DeltaAdmissionLogPath = logPath + deltaOpts.DeltaAdmissionThresholds = DeltaAdmissionThresholds{ + MaxDeltaIndexedBytesRatio: 10, + MaxPhysicalLiveBytesRatio: 10, + MaxTombstonePathRatio: 10, + } + deltaBuildCalled, normalBuildCalled := indexGitRepoWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if normalBuildCalled { + t.Fatal("expected stats-v1 to accept a small first delta") + } + + decisions := readDeltaAdmissionLogEntries(t, logPath) + if len(decisions) != 1 { + t.Fatalf("got %d admission log entries, want 1", len(decisions)) + } + if !decisions[0].Accepted { + t.Fatalf("first decision accepted = false, reason %q", decisions[0].Reason) + } + if decisions[0].CandidateDocumentCount != 1 { + t.Fatalf("candidate document count = %d, want 1", decisions[0].CandidateDocumentCount) + } + if decisions[0].WriteBytesRatio == nil || *decisions[0].WriteBytesRatio <= 0 { + t.Fatalf("write bytes ratio = %v, want positive", decisions[0].WriteBytesRatio) + } + + repo = indexedRepositoryForTest(t, indexDir, "repository") + assertDeltaStats(t, repo, func(stats *zoekt.RepositoryDeltaStats) { + if stats.DeltaLayerCount != 1 { + t.Fatalf("first delta DeltaLayerCount = %d, want 1", stats.DeltaLayerCount) + } + if stats.TombstonePathCount != 1 { + t.Fatalf("first delta TombstonePathCount = %d, want 1", stats.TombstonePathCount) + } + if stats.LiveDocumentCount != 2 { + t.Fatalf("first delta LiveDocumentCount = %d, want 2", stats.LiveDocumentCount) + } + if stats.PhysicalDocumentCount <= stats.LiveDocumentCount { + t.Fatalf("first delta should record physical document debt, got physical %d live %d", stats.PhysicalDocumentCount, stats.LiveDocumentCount) + } + if stats.PhysicalIndexedBytes <= stats.LiveIndexedBytes { + t.Fatalf("first delta should record physical byte debt, got physical %d live %d", stats.PhysicalIndexedBytes, stats.LiveIndexedBytes) + } + }) + assertLiveDeltaStatsMatchFullBuild(t, opts, repo.DeltaStats) + + writeAndCommitFile(t, repositoryDir, "changing.txt", "gamma\n", "gamma") + deltaBuildCalled, normalBuildCalled = indexGitRepoWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if normalBuildCalled { + t.Fatal("expected stats-v1 to accept another small delta even though layer count increases") + } + + decisions = readDeltaAdmissionLogEntries(t, logPath) + if len(decisions) != 2 { + t.Fatalf("got %d admission log entries, want 2", len(decisions)) + } + if !decisions[1].Accepted { + t.Fatalf("second decision accepted = false, reason %q", decisions[1].Reason) + } + + repo = indexedRepositoryForTest(t, indexDir, "repository") + assertDeltaStats(t, repo, func(stats *zoekt.RepositoryDeltaStats) { + if stats.DeltaLayerCount != 2 { + t.Fatalf("second delta DeltaLayerCount = %d, want 2", stats.DeltaLayerCount) + } + if stats.PhysicalIndexedBytes <= stats.LiveIndexedBytes { + t.Fatalf("second delta should preserve physical byte debt, got physical %d live %d", stats.PhysicalIndexedBytes, stats.LiveIndexedBytes) + } + }) +} + +func TestIndexGitRepo_DeltaAdmissionStatsV1BackfillsOldIndexStats(t *testing.T) { + t.Parallel() + + repositoryDir := t.TempDir() + indexDir := t.TempDir() + runGit(t, repositoryDir, "init", "-b", "main") + writeAndCommitFile(t, repositoryDir, "one.txt", "one\n", "one") + writeAndCommitFile(t, repositoryDir, "two.txt", "two\n", "two") + + opts := Options{ + RepoDir: filepath.Join(repositoryDir, ".git"), + Branches: []string{"main"}, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repository"}, + IndexDir: indexDir, + DisableCTags: true, + }, + } + if _, err := IndexGitRepo(opts); err != nil { + t.Fatalf("initial legacy IndexGitRepo: %v", err) + } + if repo := indexedRepositoryForTest(t, indexDir, "repository"); repo.DeltaStats != nil { + t.Fatalf("empty delta admission mode should not write DeltaStats, got %+v", repo.DeltaStats) + } + + writeAndCommitFile(t, repositoryDir, "one.txt", "one updated\n", "one updated") + deltaOpts := opts + deltaOpts.BuildOptions.IsDelta = true + deltaOpts.DeltaAdmissionMode = DeltaAdmissionModeStatsV1 + deltaOpts.DeltaAdmissionThresholds = DeltaAdmissionThresholds{ + MaxDeltaIndexedBytesRatio: 10, + MaxPhysicalLiveBytesRatio: 10, + MaxTombstonePathRatio: 10, + } + + deltaBuildCalled, normalBuildCalled := indexGitRepoWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if normalBuildCalled { + t.Fatal("expected stats-v1 to use manual old-index stats and accept the small delta") + } + + repo := indexedRepositoryForTest(t, indexDir, "repository") + assertDeltaStats(t, repo, func(stats *zoekt.RepositoryDeltaStats) { + if stats.LiveDocumentCount != 2 { + t.Fatalf("LiveDocumentCount = %d, want 2", stats.LiveDocumentCount) + } + if stats.PhysicalDocumentCount <= stats.LiveDocumentCount { + t.Fatalf("expected physical document debt after delta, got physical %d live %d", stats.PhysicalDocumentCount, stats.LiveDocumentCount) + } + if stats.DeltaLayerCount != 1 { + t.Fatalf("DeltaLayerCount = %d, want 1", stats.DeltaLayerCount) + } + }) +} + +func TestIndexGitRepo_DeltaAdmissionStatsV1FallsBackOnWriteMass(t *testing.T) { + t.Parallel() + + repositoryDir := t.TempDir() + indexDir := t.TempDir() + runGit(t, repositoryDir, "init", "-b", "main") + writeAndCommitFile(t, repositoryDir, "large.txt", strings.Repeat("a", 1024), "large") + writeAndCommitFile(t, repositoryDir, "small.txt", "small\n", "small") + + opts := Options{ + RepoDir: filepath.Join(repositoryDir, ".git"), + Branches: []string{"main"}, + DeltaAdmissionMode: DeltaAdmissionModeStatsV1, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repository"}, + IndexDir: indexDir, + DisableCTags: true, + }, + } + if _, err := IndexGitRepo(opts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + writeAndCommitFile(t, repositoryDir, "large.txt", strings.Repeat("b", 1024), "large updated") + deltaOpts := opts + deltaOpts.BuildOptions.IsDelta = true + deltaOpts.DeltaAdmissionThresholds = DeltaAdmissionThresholds{ + MaxDeltaIndexedBytesRatio: 0.01, + MaxPhysicalLiveBytesRatio: 10, + MaxTombstonePathRatio: 10, + } + + deltaBuildCalled, normalBuildCalled := indexGitRepoWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if !normalBuildCalled { + t.Fatal("expected stats-v1 to fall back when candidate write mass exceeds threshold") + } + + repo := indexedRepositoryForTest(t, indexDir, "repository") + assertDeltaStats(t, repo, func(stats *zoekt.RepositoryDeltaStats) { + if stats.DeltaLayerCount != 0 { + t.Fatalf("fallback normal rebuild DeltaLayerCount = %d, want 0", stats.DeltaLayerCount) + } + if stats.PhysicalIndexedBytes != stats.LiveIndexedBytes { + t.Fatalf("fallback normal rebuild bytes = physical %d live %d, want equal", stats.PhysicalIndexedBytes, stats.LiveIndexedBytes) + } + }) +} + +func TestIndexGitRepo_DeltaAdmissionEmptyModePreservesCurrentRules(t *testing.T) { + t.Parallel() + + repositoryDir := t.TempDir() + indexDir := t.TempDir() + runGit(t, repositoryDir, "init", "-b", "main") + writeAndCommitFile(t, repositoryDir, "large.txt", strings.Repeat("a", 1024), "large") + + opts := Options{ + RepoDir: filepath.Join(repositoryDir, ".git"), + Branches: []string{"main"}, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repository"}, + IndexDir: indexDir, + DisableCTags: true, + }, + } + if _, err := IndexGitRepo(opts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + writeAndCommitFile(t, repositoryDir, "large.txt", strings.Repeat("b", 1024), "large updated") + deltaOpts := opts + deltaOpts.BuildOptions.IsDelta = true + deltaOpts.DeltaAdmissionThresholds = DeltaAdmissionThresholds{ + MaxDeltaIndexedBytesRatio: 0.01, + } + deltaBuildCalled, normalBuildCalled := indexGitRepoWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if normalBuildCalled { + t.Fatal("empty delta admission mode should preserve current delta behavior and ignore stats thresholds") + } + if repo := indexedRepositoryForTest(t, indexDir, "repository"); repo.DeltaStats != nil { + t.Fatalf("empty delta admission mode should not write DeltaStats, got %+v", repo.DeltaStats) + } +} + func TestOpenRepoVariants(t *testing.T) { t.Parallel() @@ -342,6 +609,149 @@ func cloneBareRepo(t *testing.T, repoDir string) string { return bareDir } +func indexedRepositoryForTest(t *testing.T, indexDir, repoName string) *zoekt.Repository { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + Name: repoName, + }, + } + + repo, _, ok, err := opts.FindRepositoryMetadata() + if err != nil { + t.Fatalf("FindRepositoryMetadata: %v", err) + } + if !ok { + t.Fatalf("FindRepositoryMetadata: repository %q not found", repoName) + } + + return repo +} + +func repositoryBranchNames(branches []zoekt.RepositoryBranch) []string { + names := make([]string, 0, len(branches)) + for _, branch := range branches { + names = append(names, branch.Name) + } + return names +} + +func searchFileNamesForTest(t *testing.T, indexDir, pattern string) []string { + t.Helper() + + searcher, err := search.NewDirectorySearcher(indexDir) + if err != nil { + t.Fatalf("NewDirectorySearcher(%s): %v", indexDir, err) + } + defer searcher.Close() + + result, err := searcher.Search(context.Background(), &query.Substring{Pattern: pattern}, &zoekt.SearchOptions{}) + if err != nil { + t.Fatalf("Search(%q): %v", pattern, err) + } + + var names []string + for _, file := range result.Files { + names = append(names, file.FileName) + } + sort.Strings(names) + return names +} + +func writeAndCommitFile(t *testing.T, repoDir, name, content, message string) { + t.Helper() + + file := filepath.Join(repoDir, name) + if err := os.MkdirAll(filepath.Dir(file), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(file), err) + } + if err := os.WriteFile(file, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", file, err) + } + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", message) +} + +func indexGitRepoWithPrepareSpies(t *testing.T, opts Options) (deltaBuildCalled, normalBuildCalled bool) { + t.Helper() + + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + if _, err := indexGitRepo(opts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }); err != nil { + t.Fatalf("IndexGitRepo: %v", err) + } + + return deltaBuildCalled, normalBuildCalled +} + +func assertDeltaStats(t *testing.T, repo *zoekt.Repository, check func(*zoekt.RepositoryDeltaStats)) { + t.Helper() + if repo.DeltaStats == nil { + t.Fatal("DeltaStats is nil") + } + check(repo.DeltaStats) +} + +func assertLiveDeltaStatsMatchFullBuild(t *testing.T, opts Options, deltaStats *zoekt.RepositoryDeltaStats) { + t.Helper() + + cleanIndexDir := t.TempDir() + cleanOpts := opts + cleanOpts.BuildOptions.IndexDir = cleanIndexDir + cleanOpts.BuildOptions.IsDelta = false + cleanOpts.BuildOptions.RepositoryDescription.DeltaStats = nil + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean IndexGitRepo: %v", err) + } + + cleanRepo := indexedRepositoryForTest(t, cleanIndexDir, cleanOpts.BuildOptions.RepositoryDescription.Name) + assertDeltaStats(t, cleanRepo, func(cleanStats *zoekt.RepositoryDeltaStats) { + if deltaStats.LiveIndexedBytes != cleanStats.LiveIndexedBytes { + t.Fatalf("LiveIndexedBytes = %d, want clean full build %d", deltaStats.LiveIndexedBytes, cleanStats.LiveIndexedBytes) + } + if deltaStats.LiveDocumentCount != cleanStats.LiveDocumentCount { + t.Fatalf("LiveDocumentCount = %d, want clean full build %d", deltaStats.LiveDocumentCount, cleanStats.LiveDocumentCount) + } + if deltaStats.LivePathCount != cleanStats.LivePathCount { + t.Fatalf("LivePathCount = %d, want clean full build %d", deltaStats.LivePathCount, cleanStats.LivePathCount) + } + }) +} + +func readDeltaAdmissionLogEntries(t *testing.T, path string) []deltaAdmissionDecisionLogEntry { + t.Helper() + + blob, err := os.ReadFile(path) + if err != nil { + t.Fatalf("ReadFile(%q): %v", path, err) + } + + var entries []deltaAdmissionDecisionLogEntry + for _, line := range strings.Split(strings.TrimSpace(string(blob)), "\n") { + if line == "" { + continue + } + var entry deltaAdmissionDecisionLogEntry + if err := json.Unmarshal([]byte(line), &entry); err != nil { + t.Fatalf("Unmarshal(%q): %v", line, err) + } + entries = append(entries, entry) + } + return entries +} + func TestIndexDeltaBasic(t *testing.T) { t.Parallel() diff --git a/index/builder.go b/index/builder.go index 41e89682a..47e7a7095 100644 --- a/index/builder.go +++ b/index/builder.go @@ -735,6 +735,7 @@ func (b *Builder) Finish() error { repository.LatestCommitDate = b.opts.RepositoryDescription.LatestCommitDate repository.Metadata = b.opts.RepositoryDescription.Metadata + repository.DeltaStats = b.opts.RepositoryDescription.DeltaStats tempPath, finalPath, err := JsonMarshalRepoMetaTemp(shard, repository) if err != nil { diff --git a/index/builder_test.go b/index/builder_test.go index f4eb5d0c8..987cd3e99 100644 --- a/index/builder_test.go +++ b/index/builder_test.go @@ -532,6 +532,11 @@ func TestBuilder_BranchNamesEqual(t *testing.T) { newBranches: []zoekt.RepositoryBranch{{Name: "release", Version: "v1"}}, expected: false, }, + { + oldBranches: []zoekt.RepositoryBranch{{Name: "HEAD", Version: "v1"}}, + newBranches: []zoekt.RepositoryBranch{{Name: "main", Version: "v1"}}, + expected: false, + }, { oldBranches: []zoekt.RepositoryBranch{{Name: "main", Version: "v1"}}, newBranches: []zoekt.RepositoryBranch{}, @@ -713,6 +718,40 @@ func TestBuilder_DeltaShardsMetadataInOlderShards(t *testing.T) { LatestCommitDate: olderTime, }, }, + { + name: "update advisory delta stats", + originalRepository: zoekt.Repository{ + Name: "repo", + ID: 1, + Branches: []zoekt.RepositoryBranch{ + {Name: "main", Version: "v1"}, + }, + DeltaStats: &zoekt.RepositoryDeltaStats{ + LiveIndexedBytes: 10, + LiveDocumentCount: 1, + LivePathCount: 1, + PhysicalIndexedBytes: 10, + PhysicalDocumentCount: 1, + }, + }, + updatedRepository: zoekt.Repository{ + Name: "repo", + ID: 1, + Branches: []zoekt.RepositoryBranch{ + {Name: "main", Version: "v2"}, + }, + DeltaStats: &zoekt.RepositoryDeltaStats{ + LiveIndexedBytes: 11, + LiveDocumentCount: 1, + LivePathCount: 1, + PhysicalIndexedBytes: 22, + PhysicalDocumentCount: 2, + TombstonePathCount: 1, + DeltaLayerCount: 1, + LastFullIndexTimeUnix: 123, + }, + }, + }, } { test := test diff --git a/scripts/analyze-query-stats-stacked-vs-clean.sh b/scripts/analyze-query-stats-stacked-vs-clean.sh new file mode 100755 index 000000000..5e98f1c39 --- /dev/null +++ b/scripts/analyze-query-stats-stacked-vs-clean.sh @@ -0,0 +1,208 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat >&2 <<'EOF' +usage: scripts/analyze-query-stats-stacked-vs-clean.sh TSV + +Analyzes output from scripts/query-stats-stacked-vs-clean.sh. It derives +stacked/clean ratios and highlights signals that may be useful for deciding +delta vs. full rebuild thresholds. +EOF +} + +if [[ $# -ne 1 || "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then + usage + exit $([[ $# -eq 1 ]] && 0 || 2) +fi + +input="$1" +if [[ ! -f "$input" ]]; then + echo "input does not exist: $input" >&2 + exit 2 +fi + +tmp="$(mktemp /tmp/zoekt-query-analysis.XXXXXX)" +trap 'rm -f "$tmp"' EXIT + +awk -F '\t' ' +NR == 1 { next } +{ + k=$1 + target=$2 + stacked_shards=$3+0 + clean_shards=$4+0 + stacked_bytes=$5+0 + clean_bytes=$6+0 + stacked_real=$7+0 + clean_real=$23+0 + stacked_content=$10+0 + clean_content=$26+0 + stacked_index_loaded=$11+0 + clean_index_loaded=$27+0 + stacked_files_considered=$13+0 + clean_files_considered=$29+0 + stacked_files_loaded=$14+0 + clean_files_loaded=$30+0 + stacked_shards_scanned=$16+0 + clean_shards_scanned=$32+0 + stacked_shards_skipped_filter=$17+0 + clean_shards_skipped_filter=$33+0 + stacked_matches=$18+0 + clean_matches=$34+0 + stacked_ngram_matches=$19+0 + clean_ngram_matches=$35+0 + stacked_ngram_lookups=$20+0 + clean_ngram_lookups=$36+0 + stacked_construction=$21+0 + clean_construction=$37+0 + stacked_search=$22+0 + clean_search=$38+0 + + size_ratio = ratio(stacked_bytes, clean_bytes) + real_ratio = ratio(stacked_real, clean_real) + index_loaded_ratio = ratio(stacked_index_loaded, clean_index_loaded) + files_considered_ratio = ratio(stacked_files_considered, clean_files_considered) + files_loaded_ratio = ratio(stacked_files_loaded, clean_files_loaded) + shards_scanned_ratio = ratio(stacked_shards_scanned, clean_shards_scanned) + ngram_matches_ratio = ratio(stacked_ngram_matches, clean_ngram_matches) + ngram_lookups_ratio = ratio(stacked_ngram_lookups, clean_ngram_lookups) + construction_ratio = ratio(stacked_construction, clean_construction) + search_ratio = ratio(stacked_search, clean_search) + stale_shard_fraction = stacked_shards > 0 ? stacked_shards_skipped_filter / stacked_shards : 0 + scanned_shard_fraction = stacked_shards > 0 ? stacked_shards_scanned / stacked_shards : 0 + skipped_per_scanned = stacked_shards_scanned > 0 ? stacked_shards_skipped_filter / stacked_shards_scanned : 0 + + print k, target, stacked_shards, clean_shards, stacked_bytes, clean_bytes, \ + size_ratio, real_ratio, index_loaded_ratio, files_considered_ratio, \ + files_loaded_ratio, shards_scanned_ratio, ngram_matches_ratio, \ + ngram_lookups_ratio, construction_ratio, search_ratio, \ + stale_shard_fraction, scanned_shard_fraction, skipped_per_scanned, \ + stacked_ngram_lookups, clean_ngram_lookups, stacked_shards_scanned, \ + clean_shards_scanned, stacked_files_considered, clean_files_considered, \ + stacked_content, clean_content, stacked_matches, clean_matches +} +function ratio(a, b) { + if (b == 0) { + return a == 0 ? 0 : 999999999 + } + return a / b +} +' "$input" > "$tmp" + +echo "== Summary ==" +awk ' +{ + n++ + sum_size += $7 + sum_real += $8 + sum_index_loaded += $9 + sum_files_considered += $10 + sum_shards_scanned += $12 + sum_ngram_matches += $13 + sum_ngram_lookups += $14 + sum_construction += $15 + sum_search += $16 + sum_stale_fraction += $17 + if (n == 1 || $7 > max_size) { max_size=$7; max_size_k=$1 } + if (n == 1 || $8 > max_real) { max_real=$8; max_real_k=$1 } + if (n == 1 || $14 > max_ngram_lookups) { max_ngram_lookups=$14; max_ngram_lookups_k=$1 } + if (n == 1 || $15 > max_construction) { max_construction=$15; max_construction_k=$1 } +} +END { + printf "rows: %d\n", n + printf "avg_size_ratio: %.3f\n", sum_size/n + printf "avg_real_ratio: %.3f\n", sum_real/n + printf "avg_index_loaded_ratio: %.3f\n", sum_index_loaded/n + printf "avg_files_considered_ratio: %.3f\n", sum_files_considered/n + printf "avg_shards_scanned_ratio: %.3f\n", sum_shards_scanned/n + printf "avg_ngram_matches_ratio: %.3f\n", sum_ngram_matches/n + printf "avg_ngram_lookups_ratio: %.3f\n", sum_ngram_lookups/n + printf "avg_matchtree_construction_ratio: %.3f\n", sum_construction/n + printf "avg_matchtree_search_ratio: %.3f\n", sum_search/n + printf "avg_stale_shard_fraction: %.3f\n", sum_stale_fraction/n + printf "max_size_ratio: %.3f at k=%s\n", max_size, max_size_k + printf "max_real_ratio: %.3f at k=%s\n", max_real, max_real_k + printf "max_ngram_lookup_ratio: %.3f at k=%s\n", max_ngram_lookups, max_ngram_lookups_k + printf "max_matchtree_construction_ratio: %.3f at k=%s\n", max_construction, max_construction_k +} +' "$tmp" + +echo +echo "== Correlations With Query Work Ratios ==" +awk ' +{ + add("size_ratio", $7, $14) + add("stacked_shards", $3, $14) + add("stale_shard_fraction", $17, $14) + add("scanned_shard_fraction", $18, $14) + add("skipped_per_scanned", $19, $14) + add("size_ratio_vs_real", $7, $8) + add("stacked_shards_vs_real", $3, $8) +} +END { + print "x_metric\ty_metric\tpearson" + for (name in n) { + split(name, parts, SUBSEP) + x=parts[1]; y=parts[2] + num=sumxy[name] - sumx[name]*sumy[name]/n[name] + denx=sumx2[name] - sumx[name]*sumx[name]/n[name] + deny=sumy2[name] - sumy[name]*sumy[name]/n[name] + corr=(denx > 0 && deny > 0) ? num / sqrt(denx * deny) : 0 + printf "%s\t%s\t%.3f\n", x, y, corr + } +} +function add(xname, x, y) { + key=xname SUBSEP "ngram_lookup_ratio" + if (xname ~ /_vs_real$/) { + sub(/_vs_real$/, "", xname) + key=xname SUBSEP "real_ratio" + } + n[key]++ + sumx[key]+=x + sumy[key]+=y + sumx2[key]+=x*x + sumy2[key]+=y*y + sumxy[key]+=x*y +} +' "$tmp" | sort | column -t -s $'\t' + +echo +echo "== Rows With Largest Ngram Lookup Inflation ==" +sort -k14,14nr "$tmp" | head -10 | awk ' +BEGIN { + print "k\tsize_ratio\tstacked_shards\tstale_shard_fraction\tshards_scanned\tfiles_considered\tngram_lookup_ratio\treal_ratio" +} +{ + printf "%s\t%.3f\t%s\t%.3f\t%s\t%s\t%.3f\t%.3f\n", $1, $7, $3, $17, $22, $24, $14, $8 +} +' | column -t -s $'\t' + +echo +echo "== Rows Where Stacked Query Did Extra Candidate Work But Not Extra Content IO ==" +awk '$24 > $25 && $26 == $27 { + printf "%s\t%.3f\t%s\t%s\t%s\t%s\t%.3f\t%.3f\n", $1, $7, $3, $22, $24, $26, $14, $8 +}' "$tmp" | { + echo "k size_ratio stacked_shards shards_scanned files_considered content_bytes ngram_lookup_ratio real_ratio" + cat +} | head -15 | column -t -s $'\t' + +echo +echo "== Possible Threshold Hints ==" +awk ' +{ + # These are query-work observations, not proposed defaults. + if ($14 >= 10 && first_ngram10 == "") first_ngram10=$1 + if ($14 >= 20 && first_ngram20 == "") first_ngram20=$1 + if ($7 >= 1.5 && first_size15 == "") first_size15=$1 + if ($7 >= 2.0 && first_size20 == "") first_size20=$1 + if ($3 >= 20 && first_shards20 == "") first_shards20=$1 +} +END { + printf "first k with ngram_lookup_ratio >= 10x: %s\n", first_ngram10 + printf "first k with ngram_lookup_ratio >= 20x: %s\n", first_ngram20 + printf "first k with size_ratio >= 1.5x: %s\n", first_size15 + printf "first k with size_ratio >= 2.0x: %s\n", first_size20 + printf "first k with stacked_shards >= 20: %s\n", first_shards20 +} +' "$tmp" diff --git a/scripts/delta-admission-one-step.sh b/scripts/delta-admission-one-step.sh new file mode 100755 index 000000000..cb327fc6a --- /dev/null +++ b/scripts/delta-admission-one-step.sh @@ -0,0 +1,241 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat >&2 <<'EOF' +usage: scripts/delta-admission-one-step.sh -repo WORKTREE -index INDEX_DIR -binary ZOEKT_GIT_INDEX -base COMMIT -k N [-log JSONL] + +Runs one clean delta-admission calibration step: + 1. checkout WORKTREE to BASE + 2. delete and recreate INDEX_DIR + 3. full-index BASE with stats-v1 + 4. checkout WORKTREE to BASE~N + 5. delta-index BASE~N with stats-v1 and JSONL decision logging + 6. print a compact tab-separated summary, including wall/user/sys time and + memory/process counters from /usr/bin/time when available + +The worktree should be disposable. +EOF +} + +repo="" +index_dir="" +binary="" +base="" +k="" +log_path="" + +while [[ $# -gt 0 ]]; do + case "$1" in + -repo) + repo="$2" + shift 2 + ;; + -index) + index_dir="$2" + shift 2 + ;; + -binary) + binary="$2" + shift 2 + ;; + -base) + base="$2" + shift 2 + ;; + -k) + k="$2" + shift 2 + ;; + -log) + log_path="$2" + shift 2 + ;; + -h|--help) + usage + exit 0 + ;; + *) + usage + echo "unknown argument: $1" >&2 + exit 2 + ;; + esac +done + +if [[ -z "$repo" || -z "$index_dir" || -z "$binary" || -z "$base" || -z "$k" ]]; then + usage + exit 2 +fi + +if [[ -z "$log_path" ]]; then + log_path="$index_dir/delta-admission.jsonl" +fi + +if [[ ! -x "$binary" ]]; then + echo "binary is not executable: $binary" >&2 + exit 2 +fi + +target="${base}~${k}" +target_commit="$(git -C "$repo" rev-parse "$target")" + +git -C "$repo" checkout --detach "$base" >/dev/null 2>&1 +git -C "$repo" reset --hard "$base" >/dev/null +rm -rf "$index_dir" +mkdir -p "$index_dir" + +full_time_file="$(mktemp /tmp/zoekt-delta-full-time.XXXXXX)" +delta_time_file="$(mktemp /tmp/zoekt-delta-step-time.XXXXXX)" +trap 'rm -f "$full_time_file" "$delta_time_file"' EXIT + +timed_run() { + local time_file="$1" + local stdout_file="$2" + local stderr_file="$3" + shift 3 + + /usr/bin/time -l bash -c ' + stdout_file="$1" + stderr_file="$2" + shift 2 + "$@" >"$stdout_file" 2>"$stderr_file" + ' bash "$stdout_file" "$stderr_file" "$@" 2>"$time_file" +} + +timed_run "$full_time_file" /tmp/zoekt-delta-full.stdout /tmp/zoekt-delta-full.stderr \ + "$binary" \ + -index "$index_dir" \ + -branches HEAD \ + -submodules=false \ + -disable_ctags \ + -delta_admission_mode stats-v1 \ + "$repo" + +git -C "$repo" checkout --detach "$target_commit" >/dev/null 2>&1 +git -C "$repo" reset --hard "$target_commit" >/dev/null + +timed_run "$delta_time_file" /tmp/zoekt-delta-step.stdout /tmp/zoekt-delta-step.stderr \ + "$binary" \ + -index "$index_dir" \ + -branches HEAD \ + -submodules=false \ + -disable_ctags \ + -delta \ + -delta_admission_mode stats-v1 \ + -delta_admission_log_json "$log_path" \ + "$repo" + +time_first_line_stat() { + local file="$1" + local position="$2" + awk -v position="$position" 'NR == 1 {print $position}' "$file" +} + +time_l_stat() { + local file="$1" + local label="$2" + awk -v label="$label" ' + $0 ~ "^[[:space:]]*[0-9]+[[:space:]]+" label "$" {print $1; found=1} + END {if (!found) print ""} + ' "$file" +} + +full_real="$(time_first_line_stat "$full_time_file" 1)" +full_user="$(time_first_line_stat "$full_time_file" 3)" +full_sys="$(time_first_line_stat "$full_time_file" 5)" +full_max_rss_bytes="$(time_l_stat "$full_time_file" "maximum resident set size")" + +delta_real="$(time_first_line_stat "$delta_time_file" 1)" +delta_user="$(time_first_line_stat "$delta_time_file" 3)" +delta_sys="$(time_first_line_stat "$delta_time_file" 5)" +delta_max_rss_bytes="$(time_l_stat "$delta_time_file" "maximum resident set size")" +delta_peak_footprint_bytes="$(time_l_stat "$delta_time_file" "peak memory footprint")" +delta_page_reclaims="$(time_l_stat "$delta_time_file" "page reclaims")" +delta_page_faults="$(time_l_stat "$delta_time_file" "page faults")" +delta_swaps="$(time_l_stat "$delta_time_file" "swaps")" +delta_block_inputs="$(time_l_stat "$delta_time_file" "block input operations")" +delta_block_outputs="$(time_l_stat "$delta_time_file" "block output operations")" +delta_voluntary_context_switches="$(time_l_stat "$delta_time_file" "voluntary context switches")" +delta_involuntary_context_switches="$(time_l_stat "$delta_time_file" "involuntary context switches")" +delta_instructions="$(time_l_stat "$delta_time_file" "instructions retired")" +delta_cycles="$(time_l_stat "$delta_time_file" "cycles elapsed")" + +delta_cpu_pct="$(awk -v user="$delta_user" -v sys="$delta_sys" -v real="$delta_real" 'BEGIN { + if (real > 0) { + printf "%.2f", 100 * (user + sys) / real + } +}')" + +shards="$(find "$index_dir" -name '*.zoekt' -type f | wc -l | tr -d ' ')" +index_bytes="$(du -sk "$index_dir" | awk '{print $1 * 1024}')" + +if [[ -s "$log_path" ]]; then + decision="$(tail -n 1 "$log_path")" +else + decision='{}' +fi + +jq -r \ + --arg k "$k" \ + --arg base "$base" \ + --arg target "$target_commit" \ + --arg full_real "$full_real" \ + --arg full_user "$full_user" \ + --arg full_sys "$full_sys" \ + --arg full_max_rss_bytes "$full_max_rss_bytes" \ + --arg delta_real "$delta_real" \ + --arg delta_user "$delta_user" \ + --arg delta_sys "$delta_sys" \ + --arg delta_cpu_pct "$delta_cpu_pct" \ + --arg delta_max_rss_bytes "$delta_max_rss_bytes" \ + --arg delta_peak_footprint_bytes "$delta_peak_footprint_bytes" \ + --arg delta_page_reclaims "$delta_page_reclaims" \ + --arg delta_page_faults "$delta_page_faults" \ + --arg delta_swaps "$delta_swaps" \ + --arg delta_block_inputs "$delta_block_inputs" \ + --arg delta_block_outputs "$delta_block_outputs" \ + --arg delta_voluntary_context_switches "$delta_voluntary_context_switches" \ + --arg delta_involuntary_context_switches "$delta_involuntary_context_switches" \ + --arg delta_instructions "$delta_instructions" \ + --arg delta_cycles "$delta_cycles" \ + --arg shards "$shards" \ + --arg index_bytes "$index_bytes" \ + ' + [ + $k, + $base[0:12], + $target[0:12], + (.accepted // null), + (.reason // ""), + $full_real, + $full_user, + $full_sys, + $full_max_rss_bytes, + $delta_real, + $delta_user, + $delta_sys, + $delta_cpu_pct, + $delta_max_rss_bytes, + $delta_peak_footprint_bytes, + $delta_page_reclaims, + $delta_page_faults, + $delta_swaps, + $delta_block_inputs, + $delta_block_outputs, + $delta_voluntary_context_switches, + $delta_involuntary_context_switches, + $delta_instructions, + $delta_cycles, + (.write_bytes_ratio // null), + (.physical_live_ratio // null), + (.tombstone_path_ratio // null), + (.next_delta_layer_count // null), + (.shard_fanout_ratio // null), + (.candidate_indexed_bytes // null), + (.candidate_document_count // null), + (.changed_or_deleted_paths // null), + $shards, + $index_bytes + ] | @tsv + ' <<<"$decision" diff --git a/scripts/delta-admission-sequential.sh b/scripts/delta-admission-sequential.sh new file mode 100755 index 000000000..331b79b1d --- /dev/null +++ b/scripts/delta-admission-sequential.sh @@ -0,0 +1,254 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat >&2 <<'EOF' +usage: scripts/delta-admission-sequential.sh -repo WORKTREE -index INDEX_DIR -binary ZOEKT_GIT_INDEX -base COMMIT -max-k N [-log JSONL] [-out TSV] + +Runs a sequential delta-admission calibration: + 1. checkout WORKTREE to BASE + 2. delete and recreate INDEX_DIR + 3. full-index BASE with stats-v1 + 4. for k=1..N: + checkout WORKTREE to BASE~k + run delta stats-v1 against the existing index stack + append one JSONL admission decision + append one TSV summary row + +The worktree should be disposable. +EOF +} + +repo="" +index_dir="" +binary="" +base="" +max_k="" +log_path="" +out_path="" + +while [[ $# -gt 0 ]]; do + case "$1" in + -repo) + repo="$2" + shift 2 + ;; + -index) + index_dir="$2" + shift 2 + ;; + -binary) + binary="$2" + shift 2 + ;; + -base) + base="$2" + shift 2 + ;; + -max-k) + max_k="$2" + shift 2 + ;; + -log) + log_path="$2" + shift 2 + ;; + -out) + out_path="$2" + shift 2 + ;; + -h|--help) + usage + exit 0 + ;; + *) + usage + echo "unknown argument: $1" >&2 + exit 2 + ;; + esac +done + +if [[ -z "$repo" || -z "$index_dir" || -z "$binary" || -z "$base" || -z "$max_k" ]]; then + usage + exit 2 +fi + +if [[ -z "$log_path" ]]; then + log_path="$index_dir/delta-admission-sequential.jsonl" +fi +if [[ -z "$out_path" ]]; then + out_path="$index_dir/delta-admission-sequential.tsv" +fi + +if [[ ! -x "$binary" ]]; then + echo "binary is not executable: $binary" >&2 + exit 2 +fi + +timed_run() { + local time_file="$1" + local stdout_file="$2" + local stderr_file="$3" + shift 3 + + /usr/bin/time -l bash -c ' + stdout_file="$1" + stderr_file="$2" + shift 2 + "$@" >"$stdout_file" 2>"$stderr_file" + ' bash "$stdout_file" "$stderr_file" "$@" 2>"$time_file" +} + +time_first_line_stat() { + local file="$1" + local position="$2" + awk -v position="$position" 'NR == 1 {print $position}' "$file" +} + +time_l_stat() { + local file="$1" + local label="$2" + awk -v label="$label" ' + $0 ~ "^[[:space:]]*[0-9]+[[:space:]]+" label "$" {print $1; found=1} + END {if (!found) print ""} + ' "$file" +} + +git -C "$repo" checkout --detach "$base" >/dev/null 2>&1 +git -C "$repo" reset --hard "$base" >/dev/null +rm -rf "$index_dir" +mkdir -p "$index_dir" +mkdir -p "$(dirname "$log_path")" "$(dirname "$out_path")" +rm -f "$log_path" "$out_path" + +full_time_file="$(mktemp /tmp/zoekt-seq-full-time.XXXXXX)" +delta_time_file="$(mktemp /tmp/zoekt-seq-delta-time.XXXXXX)" +trap 'rm -f "$full_time_file" "$delta_time_file"' EXIT + +timed_run "$full_time_file" /tmp/zoekt-seq-full.stdout /tmp/zoekt-seq-full.stderr \ + "$binary" \ + -index "$index_dir" \ + -branches HEAD \ + -submodules=false \ + -disable_ctags \ + -delta_admission_mode stats-v1 \ + "$repo" + +full_real="$(time_first_line_stat "$full_time_file" 1)" +full_user="$(time_first_line_stat "$full_time_file" 3)" +full_sys="$(time_first_line_stat "$full_time_file" 5)" +full_max_rss_bytes="$(time_l_stat "$full_time_file" "maximum resident set size")" + +printf 'k\ttarget_short\taccepted\treason\tfull_real_s\tfull_user_s\tfull_sys_s\tfull_max_rss_bytes\tdelta_real_s\tdelta_user_s\tdelta_sys_s\tdelta_cpu_pct\tdelta_max_rss_bytes\tdelta_peak_footprint_bytes\tdelta_page_reclaims\tdelta_page_faults\tdelta_swaps\tdelta_block_inputs\tdelta_block_outputs\tdelta_voluntary_context_switches\tdelta_involuntary_context_switches\tdelta_instructions\tdelta_cycles\twrite_bytes_ratio\tphysical_live_ratio\ttombstone_path_ratio\tnext_delta_layer_count\tshard_fanout_ratio\tcandidate_indexed_bytes\tcandidate_document_count\tchanged_or_deleted_paths\tshard_count\tindex_bytes\n' > "$out_path" + +for k in $(seq 1 "$max_k"); do + target="${base}~${k}" + if ! target_commit="$(git -C "$repo" rev-parse "$target" 2>/dev/null)"; then + echo "stopping: cannot resolve $target" >&2 + break + fi + + git -C "$repo" checkout --detach "$target_commit" >/dev/null 2>&1 + git -C "$repo" reset --hard "$target_commit" >/dev/null + + : > "$delta_time_file" + timed_run "$delta_time_file" /tmp/zoekt-seq-delta.stdout /tmp/zoekt-seq-delta.stderr \ + "$binary" \ + -index "$index_dir" \ + -branches HEAD \ + -submodules=false \ + -disable_ctags \ + -delta \ + -delta_admission_mode stats-v1 \ + -delta_admission_log_json "$log_path" \ + "$repo" + + delta_real="$(time_first_line_stat "$delta_time_file" 1)" + delta_user="$(time_first_line_stat "$delta_time_file" 3)" + delta_sys="$(time_first_line_stat "$delta_time_file" 5)" + delta_max_rss_bytes="$(time_l_stat "$delta_time_file" "maximum resident set size")" + delta_peak_footprint_bytes="$(time_l_stat "$delta_time_file" "peak memory footprint")" + delta_page_reclaims="$(time_l_stat "$delta_time_file" "page reclaims")" + delta_page_faults="$(time_l_stat "$delta_time_file" "page faults")" + delta_swaps="$(time_l_stat "$delta_time_file" "swaps")" + delta_block_inputs="$(time_l_stat "$delta_time_file" "block input operations")" + delta_block_outputs="$(time_l_stat "$delta_time_file" "block output operations")" + delta_voluntary_context_switches="$(time_l_stat "$delta_time_file" "voluntary context switches")" + delta_involuntary_context_switches="$(time_l_stat "$delta_time_file" "involuntary context switches")" + delta_instructions="$(time_l_stat "$delta_time_file" "instructions retired")" + delta_cycles="$(time_l_stat "$delta_time_file" "cycles elapsed")" + delta_cpu_pct="$(awk -v user="$delta_user" -v sys="$delta_sys" -v real="$delta_real" 'BEGIN { + if (real > 0) { + printf "%.2f", 100 * (user + sys) / real + } + }')" + + shards="$(find "$index_dir" -name '*.zoekt' -type f | wc -l | tr -d ' ')" + index_bytes="$(du -sk "$index_dir" | awk '{print $1 * 1024}')" + decision="$(tail -n 1 "$log_path")" + + row="$(jq -r \ + --arg target "$target_commit" \ + --arg full_real "$full_real" \ + --arg full_user "$full_user" \ + --arg full_sys "$full_sys" \ + --arg full_max_rss_bytes "$full_max_rss_bytes" \ + --arg delta_real "$delta_real" \ + --arg delta_user "$delta_user" \ + --arg delta_sys "$delta_sys" \ + --arg delta_cpu_pct "$delta_cpu_pct" \ + --arg delta_max_rss_bytes "$delta_max_rss_bytes" \ + --arg delta_peak_footprint_bytes "$delta_peak_footprint_bytes" \ + --arg delta_page_reclaims "$delta_page_reclaims" \ + --arg delta_page_faults "$delta_page_faults" \ + --arg delta_swaps "$delta_swaps" \ + --arg delta_block_inputs "$delta_block_inputs" \ + --arg delta_block_outputs "$delta_block_outputs" \ + --arg delta_voluntary_context_switches "$delta_voluntary_context_switches" \ + --arg delta_involuntary_context_switches "$delta_involuntary_context_switches" \ + --arg delta_instructions "$delta_instructions" \ + --arg delta_cycles "$delta_cycles" \ + --arg shards "$shards" \ + --arg index_bytes "$index_bytes" \ + ' + [ + $target[0:12], + (.accepted // null), + (.reason // ""), + $full_real, + $full_user, + $full_sys, + $full_max_rss_bytes, + $delta_real, + $delta_user, + $delta_sys, + $delta_cpu_pct, + $delta_max_rss_bytes, + $delta_peak_footprint_bytes, + $delta_page_reclaims, + $delta_page_faults, + $delta_swaps, + $delta_block_inputs, + $delta_block_outputs, + $delta_voluntary_context_switches, + $delta_involuntary_context_switches, + $delta_instructions, + $delta_cycles, + (.write_bytes_ratio // null), + (.physical_live_ratio // null), + (.tombstone_path_ratio // null), + (.next_delta_layer_count // null), + (.shard_fanout_ratio // null), + (.candidate_indexed_bytes // null), + (.candidate_document_count // null), + (.changed_or_deleted_paths // null), + $shards, + $index_bytes + ] | @tsv + ' <<<"$decision")" + + printf '%s\t%s\n' "$k" "$row" | tee -a "$out_path" +done + +printf 'wrote %s and %s\n' "$out_path" "$log_path" >&2 diff --git a/scripts/query-stats-stacked-vs-clean.sh b/scripts/query-stats-stacked-vs-clean.sh new file mode 100755 index 000000000..ba7076850 --- /dev/null +++ b/scripts/query-stats-stacked-vs-clean.sh @@ -0,0 +1,167 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat >&2 <<'EOF' +usage: scripts/query-stats-stacked-vs-clean.sh -repo WORKTREE -stacked-index INDEX_DIR -clean-index INDEX_DIR -git-index ZOEKT_GIT_INDEX -zoekt ZOEKT -base COMMIT -max-k N -query QUERY [-out TSV] + +Builds a forced stacked-delta index from BASE through BASE~N. At each k it also +builds a clean full index at BASE~k, runs `zoekt -v` on both indexes with QUERY, +and writes side-by-side query stats to TSV. + +The worktree should be disposable. +EOF +} + +repo="" +stacked_index="" +clean_index="" +git_index="" +zoekt_bin="" +base="" +max_k="" +query="" +out_path="" + +while [[ $# -gt 0 ]]; do + case "$1" in + -repo) repo="$2"; shift 2 ;; + -stacked-index) stacked_index="$2"; shift 2 ;; + -clean-index) clean_index="$2"; shift 2 ;; + -git-index) git_index="$2"; shift 2 ;; + -zoekt) zoekt_bin="$2"; shift 2 ;; + -base) base="$2"; shift 2 ;; + -max-k) max_k="$2"; shift 2 ;; + -query) query="$2"; shift 2 ;; + -out) out_path="$2"; shift 2 ;; + -h|--help) usage; exit 0 ;; + *) usage; echo "unknown argument: $1" >&2; exit 2 ;; + esac +done + +if [[ -z "$repo" || -z "$stacked_index" || -z "$clean_index" || -z "$git_index" || -z "$zoekt_bin" || -z "$base" || -z "$max_k" || -z "$query" ]]; then + usage + exit 2 +fi + +if [[ -z "$out_path" ]]; then + out_path="/tmp/zoekt-query-stacked-vs-clean.tsv" +fi + +if [[ ! -x "$git_index" ]]; then + echo "git index binary is not executable: $git_index" >&2 + exit 2 +fi +if [[ ! -x "$zoekt_bin" ]]; then + echo "zoekt binary is not executable: $zoekt_bin" >&2 + exit 2 +fi + +stat_field() { + local file="$1" + local field="$2" + sed -nE "s/.*${field}:([0-9]+).*/\\1/p" "$file" | tail -1 +} + +run_query_stats() { + local index_dir="$1" + local prefix="$2" + local stderr_file="/tmp/zoekt-query-${prefix}.stderr" + local stdout_file="/tmp/zoekt-query-${prefix}.stdout" + local time_file="/tmp/zoekt-query-${prefix}.time" + + /usr/bin/time -p "$zoekt_bin" -v -index_dir "$index_dir" "$query" >"$stdout_file" 2>"$stderr_file.time" || true + # /usr/bin/time -p and zoekt -v share stderr. Split the final time lines from + # zoekt logs by pattern. + awk '/^(real|user|sys) / {print > "'"$time_file"'"; next} {print > "'"$stderr_file"'"}' "$stderr_file.time" + + local real user sys + real="$(awk '/^real / {print $2}' "$time_file")" + user="$(awk '/^user / {print $2}' "$time_file")" + sys="$(awk '/^sys / {print $2}' "$time_file")" + + printf '%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s' \ + "$real" \ + "$user" \ + "$sys" \ + "$(stat_field "$stderr_file" ContentBytesLoaded)" \ + "$(stat_field "$stderr_file" IndexBytesLoaded)" \ + "$(stat_field "$stderr_file" FileCount)" \ + "$(stat_field "$stderr_file" FilesConsidered)" \ + "$(stat_field "$stderr_file" FilesLoaded)" \ + "$(stat_field "$stderr_file" FilesSkipped)" \ + "$(stat_field "$stderr_file" ShardsScanned)" \ + "$(stat_field "$stderr_file" ShardsSkippedFilter)" \ + "$(stat_field "$stderr_file" MatchCount)" \ + "$(stat_field "$stderr_file" NgramMatches)" \ + "$(stat_field "$stderr_file" NgramLookups)" \ + "$(stat_field "$stderr_file" MatchTreeConstruction)" \ + "$(stat_field "$stderr_file" MatchTreeSearch)" +} + +index_full() { + local index_dir="$1" + "$git_index" \ + -index "$index_dir" \ + -branches HEAD \ + -submodules=false \ + -disable_ctags \ + "$repo" >/tmp/zoekt-query-index.stdout 2>/tmp/zoekt-query-index.stderr +} + +index_delta() { + "$git_index" \ + -index "$stacked_index" \ + -branches HEAD \ + -submodules=false \ + -disable_ctags \ + -delta \ + "$repo" >/tmp/zoekt-query-delta.stdout 2>/tmp/zoekt-query-delta.stderr +} + +mkdir -p "$(dirname "$out_path")" +rm -rf "$stacked_index" "$clean_index" +mkdir -p "$stacked_index" "$clean_index" + +git -C "$repo" checkout --detach "$base" >/dev/null 2>&1 +git -C "$repo" reset --hard "$base" >/dev/null +index_full "$stacked_index" + +printf 'k\ttarget_short\tstacked_shards\tclean_shards\tstacked_index_bytes\tclean_index_bytes\tstacked_query_real_s\tstacked_query_user_s\tstacked_query_sys_s\tstacked_ContentBytesLoaded\tstacked_IndexBytesLoaded\tstacked_FileCount\tstacked_FilesConsidered\tstacked_FilesLoaded\tstacked_FilesSkipped\tstacked_ShardsScanned\tstacked_ShardsSkippedFilter\tstacked_MatchCount\tstacked_NgramMatches\tstacked_NgramLookups\tstacked_MatchTreeConstruction\tstacked_MatchTreeSearch\tclean_query_real_s\tclean_query_user_s\tclean_query_sys_s\tclean_ContentBytesLoaded\tclean_IndexBytesLoaded\tclean_FileCount\tclean_FilesConsidered\tclean_FilesLoaded\tclean_FilesSkipped\tclean_ShardsScanned\tclean_ShardsSkippedFilter\tclean_MatchCount\tclean_NgramMatches\tclean_NgramLookups\tclean_MatchTreeConstruction\tclean_MatchTreeSearch\n' >"$out_path" + +for k in $(seq 1 "$max_k"); do + target="${base}~${k}" + if ! target_commit="$(git -C "$repo" rev-parse "$target" 2>/dev/null)"; then + echo "stopping: cannot resolve $target" >&2 + break + fi + + git -C "$repo" checkout --detach "$target_commit" >/dev/null 2>&1 + git -C "$repo" reset --hard "$target_commit" >/dev/null + + index_delta + + rm -rf "$clean_index" + mkdir -p "$clean_index" + index_full "$clean_index" + + stacked_shards="$(find "$stacked_index" -name '*.zoekt' -type f | wc -l | tr -d ' ')" + clean_shards="$(find "$clean_index" -name '*.zoekt' -type f | wc -l | tr -d ' ')" + stacked_bytes="$(du -sk "$stacked_index" | awk '{print $1 * 1024}')" + clean_bytes="$(du -sk "$clean_index" | awk '{print $1 * 1024}')" + + stacked_stats="$(run_query_stats "$stacked_index" stacked)" + clean_stats="$(run_query_stats "$clean_index" clean)" + + printf '%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' \ + "$k" \ + "${target_commit:0:12}" \ + "$stacked_shards" \ + "$clean_shards" \ + "$stacked_bytes" \ + "$clean_bytes" \ + "$stacked_stats" \ + "$clean_stats" | tee -a "$out_path" +done + +printf 'wrote %s\n' "$out_path" >&2 From 895c6bec8d93170a3f89486df616ea2fccc0ba5c Mon Sep 17 00:00:00 2001 From: Eugene Brevdo Date: Fri, 17 Apr 2026 10:08:27 -0700 Subject: [PATCH 02/10] Add multi-branch delta test matrix Add desired-future tests for branch-set delta updates across exact multi-branch updates, renames, additions, removals, combined changes, HEAD/worktree resolution, ambiguity handling, stats, and query surfaces. Most branch-set change tests are expected to fail until explicit branch mapping is implemented. Co-authored-by: Codex --- TEST_LIST.md | 276 ++ gitindex/delta_multibranch_branchset_test.go | 2501 +++++++++++++++++ gitindex/delta_multibranch_existing_test.go | 434 +++ gitindex/delta_multibranch_head_query_test.go | 939 +++++++ .../delta_multibranch_safety_stats_test.go | 1241 ++++++++ gitindex/index_test.go | 332 +++ 6 files changed, 5723 insertions(+) create mode 100644 TEST_LIST.md create mode 100644 gitindex/delta_multibranch_branchset_test.go create mode 100644 gitindex/delta_multibranch_existing_test.go create mode 100644 gitindex/delta_multibranch_head_query_test.go create mode 100644 gitindex/delta_multibranch_safety_stats_test.go diff --git a/TEST_LIST.md b/TEST_LIST.md new file mode 100644 index 000000000..4cdf9f4ba --- /dev/null +++ b/TEST_LIST.md @@ -0,0 +1,276 @@ +# Multi-Branch Delta Test List + +Goal: define failing tests that should pass once delta indexing can update multi-branch indexes across branch-set changes. These tests should verify that the delta path is actually used and that branch-filtered lookups return the same results as a clean full rebuild. + +## Test Harness Expectations + +- Each test should build an initial full index, mutate the Git repo, request a delta build, and assert the delta build path was used. +- Each test should compare search results against a clean full rebuild of the final branch set. +- Search assertions should include unfiltered search and branch-filtered search for every old, new, unchanged, added, and removed branch involved. +- Search assertions should verify both positive hits and absence of stale hits. +- Tests should inspect repository metadata branch names and versions after the delta. +- Tests should inspect that old shards carry the expected file tombstones when paths changed or disappeared. +- Tests should cover both default branch filters and `query.BranchesRepos` because Sourcegraph branch filtering uses `BranchesRepos`. +- Where `ResolveHEADToBranch` is involved, tests should verify stored short branch names, not `HEAD`, unless the worktree is detached. + +## Baseline Existing-Behavior Controls + +1. Exact same multi-branch set, one branch content changes. + - Initial: `[main, release]` + - Final: `[main, release]` + - Change: `main: shared.txt` changes, `release: shared.txt` unchanged. + - Expect: delta used; `branch:main` sees new main content; `branch:release` sees old release content; old main content is absent. + +2. Exact same multi-branch set, one branch deletes a path that still exists on another branch. + - Initial: `[main, release]` + - Final: `[main, release]` + - Change: `main` deletes `shared.txt`, `release` keeps it. + - Expect: delta used; `branch:main` misses `shared.txt`; `branch:release` still finds `shared.txt`. + +3. Exact same multi-branch set, a file becomes identical across two branches. + - Initial: `main: a.txt = A`, `release: a.txt = B` + - Final: `main: a.txt = B`, `release: a.txt = B` + - Expect: delta used; one logical final document may serve both branches, but branch-filtered searches for both branches must find it. + +4. Exact same multi-branch set, a file diverges from shared content. + - Initial: `main` and `release` both have `a.txt = same`. + - Final: `main: a.txt = new-main`, `release: a.txt = same`. + - Expect: delta used; branch masks are split correctly. + +## Single Branch Rename + +5. Rename one branch in a two-branch index, no file changes on renamed branch. + - Initial: `[feature-a, release]` + - Final: `[feature-b, release]` + - `feature-b` points to the same commit as old `feature-a`. + - Expect: delta used; metadata says `[feature-b, release]`; `branch:feature-b` finds old feature content; `branch:feature-a` finds nothing. + +6. Rename one branch and modify a path on renamed branch. + - Initial: `[feature-a, release]` + - Final: `[feature-b, release]` + - Change: `feature-b: branch.txt` differs from old `feature-a`. + - Expect: delta used; new feature content found only on `feature-b`; old feature content absent; release content preserved. + +7. Rename one branch and delete a path on renamed branch. + - Initial: `[feature-a, release]` + - Final: `[feature-b, release]` + - Change: `feature-b` deletes `branch.txt`; `release` keeps unrelated content. + - Expect: delta used; `branch:feature-b` misses deleted path; stale `feature-a` path absent; release queries unchanged. + +8. Rename one branch and add a path on renamed branch. + - Initial: `[feature-a, release]` + - Final: `[feature-b, release]` + - Change: `feature-b` adds `new.txt`. + - Expect: delta used; `branch:feature-b` finds `new.txt`; `branch:feature-a` finds nothing. + +## Multiple Branch Renames + +9. Rename two branches at the same time with no file changes. + - Initial: `[feature-a, qa-a, release]` + - Final: `[feature-b, qa-b, release]` + - Expect: delta used; both renamed branch filters work; old branch filters return no results; release unchanged. + +10. Rename two branches with independent file changes. + - Initial: `[feature-a, qa-a, release]` + - Final: `[feature-b, qa-b, release]` + - Change: `feature-b` modifies `feature.txt`; `qa-b` modifies `qa.txt`. + - Expect: delta used; each branch sees only its new content; old content absent. + +11. Rename two branches where both end up sharing the same blob for a path. + - Initial: `feature-a: shared.txt = A`, `qa-a: shared.txt = B` + - Final: `feature-b: shared.txt = C`, `qa-b: shared.txt = C` + - Expect: delta used; final branch masks include both renamed branches. + +12. Rename branch order changes at the same time. + - Initial: `[feature-a, release, qa-a]` + - Final: `[qa-b, feature-b, release]` + - Expect: delta used only if mapping is explicit and unambiguous; branch-filtered search must match clean rebuild exactly. + +## Add Branches + +13. Add one new branch that is identical to an existing branch. + - Initial: `[main]` + - Final: `[main, release]` + - `release` points to same commit as `main`. + - Expect: delta used; existing docs gain `release` membership; `branch:release` finds the same files as `branch:main`. + +14. Add one new branch with one changed path. + - Initial: `[main]` + - Final: `[main, release]` + - `release` branches from `main` and modifies `release.txt`. + - Expect: delta used; `branch:release` sees release-only file/content; `branch:main` does not. + +15. Add one new branch with deletions relative to existing branch. + - Initial: `[main]` + - Final: `[main, slim]` + - `slim` deletes `large.txt`. + - Expect: delta used; `branch:slim` misses `large.txt`; `branch:main` still finds it. + +16. Add multiple new branches at once. + - Initial: `[main]` + - Final: `[main, release, dev]` + - `release` and `dev` each have distinct path changes. + - Expect: delta used; all three branch filters match clean rebuild. + +17. Add a new branch that shares some docs and diverges on others. + - Initial: `[main]` + - Final: `[main, feature]` + - `feature` changes `a.txt`, keeps `b.txt`, adds `c.txt`, deletes `d.txt`. + - Expect: delta used; branch masks correct for all four categories. + +## Remove Branches + +18. Remove one branch, no file content changes. + - Initial: `[main, release]` + - Final: `[main]` + - Expect: delta used; `branch:release` returns no results; unfiltered search does not duplicate stale release-only docs. + +19. Remove one branch that had branch-only files. + - Initial: `[main, release]` + - Final: `[main]` + - `release` had `release-only.txt`. + - Expect: delta used; release-only file absent from unfiltered and branch-filtered searches. + +20. Remove one branch while another remaining branch still has the same path with different content. + - Initial: `main: shared.txt = main`, `release: shared.txt = release` + - Final: `[main]` + - Expect: delta used; final unfiltered and `branch:main` searches show only main content. + +21. Remove multiple branches at once. + - Initial: `[main, release, dev, qa]` + - Final: `[main]` + - Expect: delta used; removed branch filters return no results; main results match clean rebuild. + +22. Remove a branch and modify a retained branch in the same update. + - Initial: `[main, release]` + - Final: `[main]` + - Change: `main` modifies `a.txt`; `release` removed. + - Expect: delta used; new main content present; old release-only content absent. + +## Combined Rename/Add/Remove + +23. Rename one branch and add one branch. + - Initial: `[feature-a, release]` + - Final: `[feature-b, release, dev]` + - Expect: delta used; renamed branch, unchanged branch, and added branch all match clean rebuild. + +24. Rename one branch and remove one branch. + - Initial: `[feature-a, release, dev]` + - Final: `[feature-b, release]` + - Expect: delta used; `feature-b` correct, `dev` absent, release unchanged. + +25. Add one branch and remove one branch. + - Initial: `[main, old-release]` + - Final: `[main, new-release]` + - Expect: delta used; old-release absent; new-release present. + +26. Rename multiple branches, add one branch, remove one branch. + - Initial: `[feature-a, qa-a, old-release, main]` + - Final: `[feature-b, qa-b, new-release, main]` + - Expect: delta used; branch-filtered searches for every final branch match clean rebuild; every removed branch returns no results. + +27. Branch count changes and branch order changes at the same time. + - Initial: `[main, release, dev]` + - Final: `[qa, main, release-renamed]` + - Expect: delta used only with explicit old-to-new mapping; results must be order-independent. + +## HEAD Resolution And Worktrees + +28. Single requested `HEAD` resolves from `feature-a` to `feature-b`. + - Initial indexed branch: `[feature-a]` + - Final indexed branch: `[feature-b]` + - Expect: delta used; metadata stores `feature-b`; old branch filter returns no results. + +29. Multi-branch `HEAD, release`, where `HEAD` resolves from `feature-a` to `feature-b`. + - Initial: `[feature-a, release]` + - Final: `[feature-b, release]` + - Expect: delta used; same assertions as single branch rename plus release preservation. + +30. Multi-branch `HEAD, release`, where `HEAD` resolves to `release`. + - Initial or final branch list would contain duplicate logical branches. + - Expect: either deterministic dedupe with correct branch masks or conservative full rebuild; define chosen behavior before implementation. + +31. Detached `HEAD` in a multi-branch index. + - Initial: `[feature-a, release]` + - Final: `[HEAD, release]` + - Expect: define whether detached HEAD can delta; if allowed, branch-filtered lookup for `HEAD` must match clean rebuild. + +32. Two linked worktrees of the same repo in the same index dir. + - Worktree A: `HEAD -> feature-a` + - Worktree B: `HEAD -> feature-b` + - Expect: indexes either have distinct repo identities or explicit behavior; branch metadata must not conflate both as `HEAD`. + +## Ambiguity And Safety Tests + +33. Ambiguous rename without provenance. + - Initial: `[foo, bar]` + - Final: `[baz, bar]` + - If no metadata can prove `foo -> baz`, expect full rebuild or explicit error, not silent arbitrary mapping. + +34. Many-to-one branch rename. + - Initial: `[a, b]` + - Final: `[c]` + - Expect: full rebuild unless a precise merge mapping is supported. + +35. One-to-many branch split. + - Initial: `[a]` + - Final: `[b, c]` + - Expect: delta used only if both new branches can be compared to old `a`; otherwise full rebuild. + +36. Duplicate final branch names after wildcard expansion. + - Initial: `[main]` + - Final request expands to `[main, main]` + - Expect: deterministic dedupe or full rebuild; never duplicate branch masks. + +37. Branch removed and re-added with the same name but unrelated history. + - Initial: `[release]` at old lineage + - Final: `[release]` at unrelated commit + - Expect: delta can still be correct if old and new commits are diffable or fallback if commit ancestry/object availability is insufficient. + +38. Old commit missing locally for a removed/renamed branch. + - Expect: full rebuild; no partial stale results. + +39. New branch commit missing locally after fetch/filter. + - Expect: full rebuild or hard error according to existing fetch semantics; no partial index. + +40. Compound shards present before multi-branch branch-set change. + - Expect: current unsupported compound-shard delta behavior remains conservative until separately implemented. + +## Stats And Observability + +41. DeltaStats after branch rename. + - Expect: live document/path counts match clean rebuild; physical counts include stacked debt; tombstones reflect affected paths. + +42. DeltaStats after adding branch identical to existing branch. + - Expect: live document count may not grow for shared docs, but branch membership changes must be reflected; counters match chosen semantics. + +43. DeltaStats after removing branch with branch-only files. + - Expect: live counts decrease; physical counts retain old docs until rebuild; tombstone/path debt increases appropriately. + +44. Admission log for successful multi-branch branch-set delta. + - Expect: JSONL contains mapping summary, accepted=true, old/new branch counts, and cost metrics. + +45. Admission log for fallback due ambiguous mapping. + - Expect: accepted=false with a specific reason such as `ambiguous branch mapping`, not a generic branch-list mismatch. + +## Query Surfaces To Exercise + +46. Plain substring query across all branches. +47. `branch:` parsed query. +48. `query.BranchesRepos` query for one branch and repo ID. +49. `BranchesRepos` query with multiple branch/repo pairs. +50. File-name query after branch rename/add/remove. +51. Regex content query after branch rename/add/remove. +52. Case-sensitive content query after branch rename/add/remove. + +## Comparison Strategy + +For each successful delta test: + +1. Build initial index. +2. Mutate branch set and branch contents. +3. Run delta indexing with spies and require the delta path, not fallback. +4. Build a clean full index in a separate temp dir for the final branch set. +5. Compare search results for all relevant queries between delta and clean indexes. +6. Compare repository branch metadata. +7. Compare advisory live `DeltaStats` fields against the clean full index when `stats-v1` is enabled. diff --git a/gitindex/delta_multibranch_branchset_test.go b/gitindex/delta_multibranch_branchset_test.go new file mode 100644 index 000000000..8ec10a857 --- /dev/null +++ b/gitindex/delta_multibranch_branchset_test.go @@ -0,0 +1,2501 @@ +package gitindex + +import ( + "context" + "github.com/RoaringBitmap/roaring" + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + "github.com/google/go-cmp/cmp" + "github.com/sourcegraph/zoekt" + "github.com/sourcegraph/zoekt/index" + "github.com/sourcegraph/zoekt/query" + "github.com/sourcegraph/zoekt/search" + "os" + "os/exec" + "path/filepath" + "sort" + "strings" + "testing" +) + +// ---- delta_multibranch_single_rename_test.go ---- + +const ( + singleRenameRepoName = "single-rename-repository" + singleRenameRepoID = 4242 + + singleRenameOldBranch = "feature-a" + singleRenameNewBranch = "feature-b" + singleRenameRelease = "release" +) + +func TestIndexGitRepo_DeltaMultiBranchSingleRename(t *testing.T) { + t.Parallel() + + for _, tc := range []struct { + name string + mutateAfterRename func(t *testing.T, repoDir string) + expectations []singleRenameExpectation + compareQueries []singleRenameNamedQuery + }{ + { + name: "same commit no content changes", + expectations: []singleRenameExpectation{ + { + name: "renamed branch keeps existing file", + branch: singleRenameNewBranch, + query: singleRenameContentQuery("feature-a-initial-needle"), + want: []string{"branch.txt"}, + }, + { + name: "unchanged release branch keeps release file", + branch: singleRenameRelease, + query: singleRenameContentQuery("release-initial-needle"), + want: []string{"release.txt"}, + }, + }, + compareQueries: []singleRenameNamedQuery{ + {name: "feature content", q: singleRenameContentQuery("feature-a-initial-needle")}, + {name: "release content", q: singleRenameContentQuery("release-initial-needle")}, + {name: "branch path", q: singleRenameFileNameQuery("branch.txt")}, + {name: "release path", q: singleRenameFileNameQuery("release.txt")}, + }, + }, + { + name: "modified path on renamed branch", + mutateAfterRename: func(t *testing.T, repoDir string) { + singleRenameWriteAndCommitFile(t, repoDir, "branch.txt", "feature-b-modified-needle\n", "modify renamed branch") + }, + expectations: []singleRenameExpectation{ + { + name: "renamed branch sees modified content", + branch: singleRenameNewBranch, + query: singleRenameContentQuery("feature-b-modified-needle"), + want: []string{"branch.txt"}, + }, + { + name: "renamed branch no longer sees old content", + branch: singleRenameNewBranch, + query: singleRenameContentQuery("feature-a-initial-needle"), + want: nil, + }, + { + name: "unchanged release branch keeps release file", + branch: singleRenameRelease, + query: singleRenameContentQuery("release-initial-needle"), + want: []string{"release.txt"}, + }, + }, + compareQueries: []singleRenameNamedQuery{ + {name: "old feature content", q: singleRenameContentQuery("feature-a-initial-needle")}, + {name: "new feature content", q: singleRenameContentQuery("feature-b-modified-needle")}, + {name: "release content", q: singleRenameContentQuery("release-initial-needle")}, + {name: "branch path", q: singleRenameFileNameQuery("branch.txt")}, + }, + }, + { + name: "deleted path on renamed branch", + mutateAfterRename: func(t *testing.T, repoDir string) { + if err := os.Remove(filepath.Join(repoDir, "branch.txt")); err != nil { + t.Fatalf("Remove branch.txt: %v", err) + } + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "delete renamed branch path") + }, + expectations: []singleRenameExpectation{ + { + name: "renamed branch no longer has deleted path", + branch: singleRenameNewBranch, + query: singleRenameFileNameQuery("branch.txt"), + want: nil, + }, + { + name: "old feature content is absent", + branch: singleRenameNewBranch, + query: singleRenameContentQuery("feature-a-initial-needle"), + want: nil, + }, + { + name: "unchanged release branch keeps release file", + branch: singleRenameRelease, + query: singleRenameContentQuery("release-initial-needle"), + want: []string{"release.txt"}, + }, + }, + compareQueries: []singleRenameNamedQuery{ + {name: "old feature content", q: singleRenameContentQuery("feature-a-initial-needle")}, + {name: "release content", q: singleRenameContentQuery("release-initial-needle")}, + {name: "branch path", q: singleRenameFileNameQuery("branch.txt")}, + {name: "release path", q: singleRenameFileNameQuery("release.txt")}, + }, + }, + { + name: "added path on renamed branch", + mutateAfterRename: func(t *testing.T, repoDir string) { + singleRenameWriteAndCommitFile(t, repoDir, "new.txt", "feature-b-added-needle\n", "add renamed branch path") + }, + expectations: []singleRenameExpectation{ + { + name: "renamed branch keeps existing file", + branch: singleRenameNewBranch, + query: singleRenameContentQuery("feature-a-initial-needle"), + want: []string{"branch.txt"}, + }, + { + name: "renamed branch sees added file", + branch: singleRenameNewBranch, + query: singleRenameContentQuery("feature-b-added-needle"), + want: []string{"new.txt"}, + }, + { + name: "unchanged release branch keeps release file", + branch: singleRenameRelease, + query: singleRenameContentQuery("release-initial-needle"), + want: []string{"release.txt"}, + }, + }, + compareQueries: []singleRenameNamedQuery{ + {name: "feature content", q: singleRenameContentQuery("feature-a-initial-needle")}, + {name: "added feature content", q: singleRenameContentQuery("feature-b-added-needle")}, + {name: "release content", q: singleRenameContentQuery("release-initial-needle")}, + {name: "branch path", q: singleRenameFileNameQuery("branch.txt")}, + {name: "added path", q: singleRenameFileNameQuery("new.txt")}, + }, + }, + } { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + repoDir := singleRenameInitRepo(t) + indexDir := t.TempDir() + + initialOpts := singleRenameOptions(repoDir, indexDir, []string{singleRenameOldBranch, singleRenameRelease}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + singleRenameRenameBranch(t, repoDir) + if tc.mutateAfterRename != nil { + tc.mutateAfterRename(t, repoDir) + } + + deltaOpts := singleRenameOptions(repoDir, indexDir, []string{singleRenameNewBranch, singleRenameRelease}) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled := singleRenameIndexGitRepoWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Error("expected delta build to be attempted") + } + if normalBuildCalled { + t.Error("expected single branch rename to stay on the delta path, got normal rebuild fallback") + } + + repo := singleRenameIndexedRepository(t, indexDir) + singleRenameAssertBranchMetadata(t, repoDir, repo) + singleRenameAssertOldBranchHasNoResults(t, indexDir) + + cleanIndexDir := t.TempDir() + cleanOpts := singleRenameOptions(repoDir, cleanIndexDir, []string{singleRenameNewBranch, singleRenameRelease}) + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean IndexGitRepo: %v", err) + } + + for _, expectation := range tc.expectations { + singleRenameAssertExpectation(t, indexDir, expectation) + } + + singleRenameCompareWithCleanRebuild(t, indexDir, cleanIndexDir, tc.compareQueries) + }) + } +} + +type singleRenameExpectation struct { + name string + branch string + query query.Q + want []string +} + +type singleRenameNamedQuery struct { + name string + q query.Q +} + +type singleRenameSearchHit struct { + FileName string + Content string + Branches []string + Version string +} + +func singleRenameOptions(repoDir, indexDir string, branches []string) Options { + return Options{ + RepoDir: filepath.Join(repoDir, ".git"), + Branches: append([]string(nil), branches...), + BuildOptions: index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + Name: singleRenameRepoName, + ID: singleRenameRepoID, + }, + DisableCTags: true, + }, + } +} + +func singleRenameInitRepo(t *testing.T) string { + t.Helper() + + repoDir := t.TempDir() + runGit(t, repoDir, "init", "-b", "base") + runGit(t, repoDir, "commit", "--allow-empty", "-m", "base") + + runGit(t, repoDir, "checkout", "-b", singleRenameRelease) + singleRenameWriteAndCommitFile(t, repoDir, "release.txt", "release-initial-needle\n", "release content") + + runGit(t, repoDir, "checkout", "base") + runGit(t, repoDir, "checkout", "-b", singleRenameOldBranch) + singleRenameWriteAndCommitFile(t, repoDir, "branch.txt", "feature-a-initial-needle\n", "feature content") + + return repoDir +} + +func singleRenameRenameBranch(t *testing.T, repoDir string) { + t.Helper() + + runGit(t, repoDir, "checkout", singleRenameOldBranch) + runGit(t, repoDir, "branch", "-m", singleRenameNewBranch) +} + +func singleRenameWriteAndCommitFile(t *testing.T, repoDir, name, content, message string) { + t.Helper() + + file := filepath.Join(repoDir, name) + if err := os.MkdirAll(filepath.Dir(file), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(file), err) + } + if err := os.WriteFile(file, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", file, err) + } + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", message) +} + +func singleRenameIndexGitRepoWithPrepareSpies(t *testing.T, opts Options) (deltaBuildCalled, normalBuildCalled bool) { + t.Helper() + + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + if _, err := indexGitRepo(opts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }); err != nil { + t.Fatalf("IndexGitRepo: %v", err) + } + + return deltaBuildCalled, normalBuildCalled +} + +func singleRenameIndexedRepository(t *testing.T, indexDir string) *zoekt.Repository { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + Name: singleRenameRepoName, + ID: singleRenameRepoID, + }, + } + repo, _, ok, err := opts.FindRepositoryMetadata() + if err != nil { + t.Fatalf("FindRepositoryMetadata: %v", err) + } + if !ok { + t.Fatalf("FindRepositoryMetadata: repository %q not found", singleRenameRepoName) + } + return repo +} + +func singleRenameAssertBranchMetadata(t *testing.T, repoDir string, repo *zoekt.Repository) { + t.Helper() + + wantBranches := []zoekt.RepositoryBranch{ + {Name: singleRenameNewBranch, Version: singleRenameRevParse(t, repoDir, singleRenameNewBranch)}, + {Name: singleRenameRelease, Version: singleRenameRevParse(t, repoDir, singleRenameRelease)}, + } + if diff := cmp.Diff(wantBranches, repo.Branches); diff != "" { + t.Fatalf("repository branches mismatch (-want +got):\n%s", diff) + } +} + +func singleRenameRevParse(t *testing.T, repoDir, rev string) string { + t.Helper() + + hash, err := git.PlainOpen(repoDir) + if err != nil { + t.Fatalf("PlainOpen(%q): %v", repoDir, err) + } + ref, err := hash.ResolveRevision(plumbing.Revision(rev)) + if err != nil { + t.Fatalf("ResolveRevision(%q): %v", rev, err) + } + return ref.String() +} + +func singleRenameAssertOldBranchHasNoResults(t *testing.T, indexDir string) { + t.Helper() + + allFiles := &query.Const{Value: true} + singleRenameAssertSearchFileNames(t, indexDir, singleRenameBranchQuery(singleRenameOldBranch, allFiles), nil) + singleRenameAssertSearchFileNames(t, indexDir, singleRenameBranchesReposQuery(singleRenameOldBranch, allFiles), nil) +} + +func singleRenameAssertExpectation(t *testing.T, indexDir string, expectation singleRenameExpectation) { + t.Helper() + + t.Run(expectation.name+"/branch", func(t *testing.T) { + singleRenameAssertSearchFileNames(t, indexDir, singleRenameBranchQuery(expectation.branch, expectation.query), expectation.want) + }) + t.Run(expectation.name+"/branches-repos", func(t *testing.T) { + singleRenameAssertSearchFileNames(t, indexDir, singleRenameBranchesReposQuery(expectation.branch, expectation.query), expectation.want) + }) +} + +func singleRenameCompareWithCleanRebuild(t *testing.T, deltaIndexDir, cleanIndexDir string, queries []singleRenameNamedQuery) { + t.Helper() + + branches := []string{singleRenameOldBranch, singleRenameNewBranch, singleRenameRelease} + for _, named := range queries { + t.Run("compare/"+named.name+"/unfiltered", func(t *testing.T) { + singleRenameAssertSameSearchHits(t, deltaIndexDir, cleanIndexDir, named.q) + }) + for _, branch := range branches { + branch := branch + t.Run("compare/"+named.name+"/branch/"+branch, func(t *testing.T) { + singleRenameAssertSameSearchHits(t, deltaIndexDir, cleanIndexDir, singleRenameBranchQuery(branch, named.q)) + }) + t.Run("compare/"+named.name+"/branches-repos/"+branch, func(t *testing.T) { + singleRenameAssertSameSearchHits(t, deltaIndexDir, cleanIndexDir, singleRenameBranchesReposQuery(branch, named.q)) + }) + } + } +} + +func singleRenameAssertSameSearchHits(t *testing.T, deltaIndexDir, cleanIndexDir string, q query.Q) { + t.Helper() + + deltaHits := singleRenameSearchHits(t, deltaIndexDir, q) + cleanHits := singleRenameSearchHits(t, cleanIndexDir, q) + if diff := cmp.Diff(cleanHits, deltaHits); diff != "" { + t.Fatalf("delta search results differ from clean rebuild (-clean +delta):\n%s", diff) + } +} + +func singleRenameAssertSearchFileNames(t *testing.T, indexDir string, q query.Q, want []string) { + t.Helper() + + hits := singleRenameSearchHits(t, indexDir, q) + var got []string + for _, hit := range hits { + got = append(got, hit.FileName) + } + if diff := cmp.Diff(want, got); diff != "" { + t.Fatalf("search file names mismatch (-want +got):\n%s", diff) + } +} + +func singleRenameSearchHits(t *testing.T, indexDir string, q query.Q) []singleRenameSearchHit { + t.Helper() + + searcher, err := search.NewDirectorySearcher(indexDir) + if err != nil { + t.Fatalf("NewDirectorySearcher(%q): %v", indexDir, err) + } + defer searcher.Close() + + result, err := searcher.Search(context.Background(), q, &zoekt.SearchOptions{Whole: true}) + if err != nil { + t.Fatalf("Search(%s): %v", q, err) + } + + hits := make([]singleRenameSearchHit, 0, len(result.Files)) + for _, file := range result.Files { + branches := append([]string(nil), file.Branches...) + sort.Strings(branches) + hits = append(hits, singleRenameSearchHit{ + FileName: file.FileName, + Content: string(file.Content), + Branches: branches, + Version: file.Version, + }) + } + sort.Slice(hits, func(i, j int) bool { + if hits[i].FileName != hits[j].FileName { + return hits[i].FileName < hits[j].FileName + } + if hits[i].Content != hits[j].Content { + return hits[i].Content < hits[j].Content + } + if hits[i].Version != hits[j].Version { + return hits[i].Version < hits[j].Version + } + return strings.Join(hits[i].Branches, "\x00") < strings.Join(hits[j].Branches, "\x00") + }) + return hits +} + +func singleRenameBranchQuery(branch string, q query.Q) query.Q { + return query.NewAnd(&query.Branch{Pattern: branch}, q) +} + +func singleRenameBranchesReposQuery(branch string, q query.Q) query.Q { + return query.NewAnd(&query.BranchesRepos{List: []query.BranchRepos{{ + Branch: branch, + Repos: roaring.BitmapOf(singleRenameRepoID), + }}}, q) +} + +func singleRenameContentQuery(pattern string) query.Q { + return &query.Substring{Pattern: pattern, Content: true} +} + +func singleRenameFileNameQuery(pattern string) query.Q { + return &query.Substring{Pattern: pattern, FileName: true} +} + +// ---- delta_multibranch_multi_rename_test.go ---- + +func TestIndexGitRepo_DeltaMultipleBranchRenamesNoFileChanges(t *testing.T) { + t.Parallel() + + repositoryDir := multiRenameInitRepository(t, map[string]map[string]string{ + "feature-a": { + "feature.txt": "feature-a-carryover-needle\n", + }, + "qa-a": { + "qa.txt": "qa-a-carryover-needle\n", + }, + "release": { + "release.txt": "release-stable-needle\n", + }, + }) + + multiRenameRunCase(t, multiRenameCase{ + repositoryDir: repositoryDir, + initialBranches: []string{"feature-a", "qa-a", "release"}, + finalBranches: []string{"feature-b", "qa-b", "release"}, + mutate: func(t *testing.T, repositoryDir string) { + multiRenameBranch(t, repositoryDir, "feature-a", "feature-b") + multiRenameBranch(t, repositoryDir, "qa-a", "qa-b") + }, + checks: []multiRenameCheck{ + { + pattern: "feature-a-carryover-needle", + unfiltered: []string{"feature.txt"}, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": {"feature.txt"}, + "qa-a": nil, + "qa-b": nil, + "release": nil, + }, + }, + { + pattern: "qa-a-carryover-needle", + unfiltered: []string{"qa.txt"}, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": nil, + "qa-a": nil, + "qa-b": {"qa.txt"}, + "release": nil, + }, + }, + { + pattern: "release-stable-needle", + unfiltered: []string{"release.txt"}, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": nil, + "qa-a": nil, + "qa-b": nil, + "release": {"release.txt"}, + }, + }, + }, + }) +} + +func TestIndexGitRepo_DeltaMultipleBranchRenamesIndependentContentChanges(t *testing.T) { + t.Parallel() + + repositoryDir := multiRenameInitRepository(t, map[string]map[string]string{ + "feature-a": { + "feature.txt": "feature-a-old-needle\n", + }, + "qa-a": { + "qa.txt": "qa-a-old-needle\n", + }, + "release": { + "release.txt": "release-stable-needle\n", + }, + }) + + multiRenameRunCase(t, multiRenameCase{ + repositoryDir: repositoryDir, + initialBranches: []string{"feature-a", "qa-a", "release"}, + finalBranches: []string{"feature-b", "qa-b", "release"}, + expectedTombstones: []string{"feature.txt", "qa.txt"}, + mutate: func(t *testing.T, repositoryDir string) { + multiRenameBranch(t, repositoryDir, "feature-a", "feature-b") + multiRenameBranch(t, repositoryDir, "qa-a", "qa-b") + multiRenameCheckout(t, repositoryDir, "feature-b") + multiRenameWriteAndCommit(t, repositoryDir, map[string]string{ + "feature.txt": "feature-b-new-needle\n", + }, "update feature-b") + multiRenameCheckout(t, repositoryDir, "qa-b") + multiRenameWriteAndCommit(t, repositoryDir, map[string]string{ + "qa.txt": "qa-b-new-needle\n", + }, "update qa-b") + }, + checks: []multiRenameCheck{ + { + pattern: "feature-b-new-needle", + unfiltered: []string{"feature.txt"}, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": {"feature.txt"}, + "qa-a": nil, + "qa-b": nil, + "release": nil, + }, + }, + { + pattern: "qa-b-new-needle", + unfiltered: []string{"qa.txt"}, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": nil, + "qa-a": nil, + "qa-b": {"qa.txt"}, + "release": nil, + }, + }, + { + pattern: "feature-a-old-needle", + unfiltered: nil, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": nil, + "qa-a": nil, + "qa-b": nil, + "release": nil, + }, + }, + { + pattern: "qa-a-old-needle", + unfiltered: nil, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": nil, + "qa-a": nil, + "qa-b": nil, + "release": nil, + }, + }, + { + pattern: "release-stable-needle", + unfiltered: []string{"release.txt"}, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": nil, + "qa-a": nil, + "qa-b": nil, + "release": {"release.txt"}, + }, + }, + }, + }) +} + +func TestIndexGitRepo_DeltaMultipleBranchRenamesConvergeToSharedBlob(t *testing.T) { + t.Parallel() + + repositoryDir := multiRenameInitRepository(t, map[string]map[string]string{ + "feature-a": { + "shared.txt": "feature-a-shared-old-needle\n", + }, + "qa-a": { + "shared.txt": "qa-a-shared-old-needle\n", + }, + "release": { + "release.txt": "release-stable-needle\n", + }, + }) + + multiRenameRunCase(t, multiRenameCase{ + repositoryDir: repositoryDir, + initialBranches: []string{"feature-a", "qa-a", "release"}, + finalBranches: []string{"feature-b", "qa-b", "release"}, + expectedTombstones: []string{"shared.txt"}, + mutate: func(t *testing.T, repositoryDir string) { + multiRenameBranch(t, repositoryDir, "feature-a", "feature-b") + multiRenameBranch(t, repositoryDir, "qa-a", "qa-b") + multiRenameCheckout(t, repositoryDir, "feature-b") + multiRenameWriteAndCommit(t, repositoryDir, map[string]string{ + "shared.txt": "renamed-branches-shared-blob-needle\n", + }, "converge feature-b") + multiRenameCheckout(t, repositoryDir, "qa-b") + multiRenameWriteAndCommit(t, repositoryDir, map[string]string{ + "shared.txt": "renamed-branches-shared-blob-needle\n", + }, "converge qa-b") + }, + checks: []multiRenameCheck{ + { + pattern: "renamed-branches-shared-blob-needle", + unfiltered: []string{"shared.txt"}, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": {"shared.txt"}, + "qa-a": nil, + "qa-b": {"shared.txt"}, + "release": nil, + }, + }, + { + pattern: "feature-a-shared-old-needle", + unfiltered: nil, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": nil, + "qa-a": nil, + "qa-b": nil, + "release": nil, + }, + }, + { + pattern: "qa-a-shared-old-needle", + unfiltered: nil, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": nil, + "qa-a": nil, + "qa-b": nil, + "release": nil, + }, + }, + { + pattern: "release-stable-needle", + unfiltered: []string{"release.txt"}, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": nil, + "qa-a": nil, + "qa-b": nil, + "release": {"release.txt"}, + }, + }, + }, + }) +} + +func TestIndexGitRepo_DeltaMultipleBranchRenamesAndBranchOrderChanges(t *testing.T) { + t.Parallel() + + repositoryDir := multiRenameInitRepository(t, map[string]map[string]string{ + "feature-a": { + "feature.txt": "feature-a-order-needle\n", + }, + "qa-a": { + "qa.txt": "qa-a-order-needle\n", + }, + "release": { + "release.txt": "release-order-needle\n", + }, + }) + + multiRenameRunCase(t, multiRenameCase{ + repositoryDir: repositoryDir, + initialBranches: []string{"feature-a", "release", "qa-a"}, + finalBranches: []string{"qa-b", "feature-b", "release"}, + mutate: func(t *testing.T, repositoryDir string) { + multiRenameBranch(t, repositoryDir, "feature-a", "feature-b") + multiRenameBranch(t, repositoryDir, "qa-a", "qa-b") + }, + checks: []multiRenameCheck{ + { + pattern: "feature-a-order-needle", + unfiltered: []string{"feature.txt"}, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": {"feature.txt"}, + "qa-a": nil, + "qa-b": nil, + "release": nil, + }, + }, + { + pattern: "qa-a-order-needle", + unfiltered: []string{"qa.txt"}, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": nil, + "qa-a": nil, + "qa-b": {"qa.txt"}, + "release": nil, + }, + }, + { + pattern: "release-order-needle", + unfiltered: []string{"release.txt"}, + branches: map[string][]string{ + "feature-a": nil, + "feature-b": nil, + "qa-a": nil, + "qa-b": nil, + "release": {"release.txt"}, + }, + }, + }, + }) +} + +type multiRenameCase struct { + repositoryDir string + initialBranches []string + finalBranches []string + expectedTombstones []string + mutate func(t *testing.T, repositoryDir string) + checks []multiRenameCheck +} + +type multiRenameCheck struct { + pattern string + unfiltered []string + branches map[string][]string +} + +type multiRenameSearchResult struct { + FileName string + Content string + Version string + Branches []string +} + +func multiRenameRunCase(t *testing.T, tc multiRenameCase) { + t.Helper() + + indexDir := t.TempDir() + initialOpts := multiRenameIndexOptions(tc.repositoryDir, indexDir, tc.initialBranches) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + tc.mutate(t, tc.repositoryDir) + + deltaOpts := multiRenameIndexOptions(tc.repositoryDir, indexDir, tc.finalBranches) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled := multiRenameIndexGitRepoWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Error("expected delta build to be attempted") + } + if normalBuildCalled { + t.Error("expected multiple branch renames to use delta, got normal build fallback") + } + + cleanIndexDir := t.TempDir() + cleanOpts := multiRenameIndexOptions(tc.repositoryDir, cleanIndexDir, tc.finalBranches) + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean IndexGitRepo: %v", err) + } + + multiRenameAssertRepositoryMetadataMatchesClean(t, indexDir, cleanIndexDir) + multiRenameAssertSearchesMatchClean(t, indexDir, cleanIndexDir, tc.checks) + if len(tc.expectedTombstones) > 0 { + multiRenameAssertTombstones(t, indexDir, tc.expectedTombstones) + } +} + +func multiRenameInitRepository(t *testing.T, branchFiles map[string]map[string]string) string { + t.Helper() + + repositoryDir := t.TempDir() + runGit(t, repositoryDir, "init", "-b", "main") + multiRenameWriteAndCommit(t, repositoryDir, map[string]string{ + "base.txt": "base-needle\n", + }, "base") + + branchNames := make([]string, 0, len(branchFiles)) + for branch := range branchFiles { + branchNames = append(branchNames, branch) + } + sort.Strings(branchNames) + for _, branch := range branchNames { + runGit(t, repositoryDir, "checkout", "main") + runGit(t, repositoryDir, "checkout", "-b", branch) + multiRenameWriteAndCommit(t, repositoryDir, branchFiles[branch], "add "+branch) + } + + runGit(t, repositoryDir, "checkout", "main") + return repositoryDir +} + +func multiRenameIndexOptions(repositoryDir, indexDir string, branches []string) Options { + return Options{ + RepoDir: filepath.Join(repositoryDir, ".git"), + Branches: append([]string(nil), branches...), + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repository"}, + IndexDir: indexDir, + DisableCTags: true, + }, + } +} + +func multiRenameIndexGitRepoWithPrepareSpies(t *testing.T, opts Options) (deltaBuildCalled, normalBuildCalled bool) { + t.Helper() + + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + if _, err := indexGitRepo(opts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }); err != nil { + t.Fatalf("IndexGitRepo: %v", err) + } + + return deltaBuildCalled, normalBuildCalled +} + +func multiRenameAssertRepositoryMetadataMatchesClean(t *testing.T, deltaIndexDir, cleanIndexDir string) { + t.Helper() + + deltaRepo := indexedRepositoryForTest(t, deltaIndexDir, "repository") + cleanRepo := indexedRepositoryForTest(t, cleanIndexDir, "repository") + if diff := cmp.Diff(cleanRepo.Branches, deltaRepo.Branches); diff != "" { + t.Errorf("repository branches mismatch against clean rebuild (-want +got):\n%s", diff) + } +} + +func multiRenameAssertSearchesMatchClean(t *testing.T, deltaIndexDir, cleanIndexDir string, checks []multiRenameCheck) { + t.Helper() + + deltaRepo := indexedRepositoryForTest(t, deltaIndexDir, "repository") + cleanRepo := indexedRepositoryForTest(t, cleanIndexDir, "repository") + + for _, check := range checks { + t.Run("unfiltered/"+check.pattern, func(t *testing.T) { + delta := multiRenameSearch(t, deltaIndexDir, multiRenameSubstringQuery(check.pattern)) + clean := multiRenameSearch(t, cleanIndexDir, multiRenameSubstringQuery(check.pattern)) + multiRenameAssertResultNames(t, "delta unfiltered", check.unfiltered, delta) + multiRenameAssertResultNames(t, "clean unfiltered", check.unfiltered, clean) + if diff := cmp.Diff(clean, delta); diff != "" { + t.Errorf("unfiltered search mismatch against clean rebuild (-want +got):\n%s", diff) + } + }) + + branchNames := make([]string, 0, len(check.branches)) + for branch := range check.branches { + branchNames = append(branchNames, branch) + } + sort.Strings(branchNames) + for _, branch := range branchNames { + want := check.branches[branch] + t.Run("branch/"+check.pattern+"/"+branch, func(t *testing.T) { + q := query.NewAnd(&query.Branch{Pattern: branch, Exact: true}, multiRenameSubstringQuery(check.pattern)) + delta := multiRenameSearch(t, deltaIndexDir, q) + clean := multiRenameSearch(t, cleanIndexDir, q) + multiRenameAssertResultNames(t, "delta branch:"+branch, want, delta) + multiRenameAssertResultNames(t, "clean branch:"+branch, want, clean) + if diff := cmp.Diff(clean, delta); diff != "" { + t.Errorf("branch:%s search mismatch against clean rebuild (-want +got):\n%s", branch, diff) + } + }) + + t.Run("branchesrepos/"+check.pattern+"/"+branch, func(t *testing.T) { + deltaQ := query.NewAnd(query.NewSingleBranchesRepos(branch, deltaRepo.ID), multiRenameSubstringQuery(check.pattern)) + cleanQ := query.NewAnd(query.NewSingleBranchesRepos(branch, cleanRepo.ID), multiRenameSubstringQuery(check.pattern)) + delta := multiRenameSearch(t, deltaIndexDir, deltaQ) + clean := multiRenameSearch(t, cleanIndexDir, cleanQ) + multiRenameAssertResultNames(t, "delta BranchesRepos:"+branch, want, delta) + multiRenameAssertResultNames(t, "clean BranchesRepos:"+branch, want, clean) + if diff := cmp.Diff(clean, delta); diff != "" { + t.Errorf("BranchesRepos %s search mismatch against clean rebuild (-want +got):\n%s", branch, diff) + } + }) + } + } +} + +func multiRenameSearch(t *testing.T, indexDir string, q query.Q) []multiRenameSearchResult { + t.Helper() + + searcher, err := search.NewDirectorySearcher(indexDir) + if err != nil { + t.Fatalf("NewDirectorySearcher(%s): %v", indexDir, err) + } + defer searcher.Close() + + result, err := searcher.Search(context.Background(), q, &zoekt.SearchOptions{Whole: true}) + if err != nil { + t.Fatalf("Search(%s): %v", q.String(), err) + } + + results := make([]multiRenameSearchResult, 0, len(result.Files)) + for _, file := range result.Files { + branches := append([]string(nil), file.Branches...) + sort.Strings(branches) + results = append(results, multiRenameSearchResult{ + FileName: file.FileName, + Content: string(file.Content), + Version: file.Version, + Branches: branches, + }) + } + sort.Slice(results, func(i, j int) bool { + a, b := results[i], results[j] + if a.FileName != b.FileName { + return a.FileName < b.FileName + } + if a.Content != b.Content { + return a.Content < b.Content + } + return strings.Join(a.Branches, "\x00") < strings.Join(b.Branches, "\x00") + }) + return results +} + +func multiRenameSubstringQuery(pattern string) query.Q { + return &query.Substring{Pattern: pattern, Content: true} +} + +func multiRenameAssertResultNames(t *testing.T, label string, want []string, results []multiRenameSearchResult) { + t.Helper() + + if want == nil { + want = []string{} + } + got := make([]string, 0, len(results)) + for _, result := range results { + got = append(got, result.FileName) + } + sort.Strings(got) + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("%s file names mismatch (-want +got):\n%s", label, diff) + } +} + +func multiRenameAssertTombstones(t *testing.T, indexDir string, wantPaths []string) { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + Name: "repository", + }, + } + + got := map[string]struct{}{} + for _, shard := range opts.FindAllShards() { + repositories, _, err := index.ReadMetadataPathAlive(shard) + if err != nil { + t.Fatalf("ReadMetadataPathAlive(%q): %v", shard, err) + } + for _, repository := range repositories { + if repository.Name != "repository" { + continue + } + for path := range repository.FileTombstones { + got[path] = struct{}{} + } + } + } + + for _, path := range wantPaths { + if _, ok := got[path]; !ok { + t.Errorf("expected old shards to tombstone %q, got tombstones %v", path, multiRenameSortedKeys(got)) + } + } +} + +func multiRenameSortedKeys(m map[string]struct{}) []string { + keys := make([]string, 0, len(m)) + for key := range m { + keys = append(keys, key) + } + sort.Strings(keys) + return keys +} + +func multiRenameBranch(t *testing.T, repositoryDir, oldName, newName string) { + t.Helper() + runGit(t, repositoryDir, "branch", "-m", oldName, newName) +} + +func multiRenameCheckout(t *testing.T, repositoryDir, branch string) { + t.Helper() + runGit(t, repositoryDir, "checkout", branch) +} + +func multiRenameWriteAndCommit(t *testing.T, repositoryDir string, files map[string]string, message string) { + t.Helper() + + for name, content := range files { + path := filepath.Join(repositoryDir, name) + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(path), err) + } + if err := os.WriteFile(path, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", path, err) + } + } + runGit(t, repositoryDir, "add", "-A") + runGit(t, repositoryDir, "commit", "-m", message) +} + +// ---- delta_multibranch_add_test.go ---- + +const addBranchRepoID uint32 = 4242 + +func TestIndexGitRepo_DeltaMultibranchAddBranches(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + initialFiles map[string]string + mutate func(t *testing.T, repoDir string) + finalBranches []string + lookups []addBranchLookup + }{ + { + name: "add one new branch identical to existing branch", + initialFiles: map[string]string{ + "common.txt": "common-token\n", + "shared.txt": "shared-token\n", + }, + mutate: func(t *testing.T, repoDir string) { + runGit(t, repoDir, "branch", "release", "main") + }, + finalBranches: []string{"main", "release"}, + lookups: []addBranchLookup{ + {name: "unfiltered shared content", pattern: "shared-token", want: []string{"shared.txt"}}, + {name: "main all files", branch: "main", want: []string{"common.txt", "shared.txt"}}, + {name: "release all files", branch: "release", want: []string{"common.txt", "shared.txt"}}, + {name: "release common content", pattern: "common-token", branch: "release", want: []string{"common.txt"}}, + }, + }, + { + name: "add one new branch with one changed path", + initialFiles: map[string]string{ + "release.txt": "base-release-token\n", + "stable.txt": "stable-token\n", + }, + mutate: func(t *testing.T, repoDir string) { + runGit(t, repoDir, "checkout", "-b", "release", "main") + addBranchWriteCommit(t, repoDir, map[string]string{ + "release.txt": "release-only-token\n", + }, nil, "release changes one path") + }, + finalBranches: []string{"main", "release"}, + lookups: []addBranchLookup{ + {name: "main keeps old content", pattern: "base-release-token", branch: "main", want: []string{"release.txt"}}, + {name: "release misses old content", pattern: "base-release-token", branch: "release", want: nil}, + {name: "release sees changed content", pattern: "release-only-token", branch: "release", want: []string{"release.txt"}}, + {name: "main misses changed content", pattern: "release-only-token", branch: "main", want: nil}, + {name: "stable shared on release", pattern: "stable-token", branch: "release", want: []string{"stable.txt"}}, + }, + }, + { + name: "add one new branch with deletions relative to existing branch", + initialFiles: map[string]string{ + "keep.txt": "keep-token\n", + "large.txt": "large-token\n", + }, + mutate: func(t *testing.T, repoDir string) { + runGit(t, repoDir, "checkout", "-b", "slim", "main") + addBranchWriteCommit(t, repoDir, nil, []string{"large.txt"}, "slim deletes large path") + }, + finalBranches: []string{"main", "slim"}, + lookups: []addBranchLookup{ + {name: "main keeps deleted-on-new-branch file", pattern: "large-token", branch: "main", want: []string{"large.txt"}}, + {name: "slim misses deleted file", pattern: "large-token", branch: "slim", want: nil}, + {name: "slim keeps shared file", pattern: "keep-token", branch: "slim", want: []string{"keep.txt"}}, + {name: "slim all files", branch: "slim", want: []string{"keep.txt"}}, + }, + }, + { + name: "add multiple new branches at once", + initialFiles: map[string]string{ + "dev.txt": "base-dev-token\n", + "main.txt": "main-token\n", + "release.txt": "base-release-token\n", + "shared.txt": "shared-token\n", + }, + mutate: func(t *testing.T, repoDir string) { + runGit(t, repoDir, "checkout", "-b", "release", "main") + addBranchWriteCommit(t, repoDir, map[string]string{ + "release.txt": "release-multi-token\n", + }, nil, "release changes one path") + runGit(t, repoDir, "checkout", "main") + runGit(t, repoDir, "checkout", "-b", "dev", "main") + addBranchWriteCommit(t, repoDir, map[string]string{ + "dev.txt": "dev-multi-token\n", + }, nil, "dev changes one path") + }, + finalBranches: []string{"main", "release", "dev"}, + lookups: []addBranchLookup{ + {name: "main all files", branch: "main", want: []string{"dev.txt", "main.txt", "release.txt", "shared.txt"}}, + {name: "release all files", branch: "release", want: []string{"dev.txt", "main.txt", "release.txt", "shared.txt"}}, + {name: "dev all files", branch: "dev", want: []string{"dev.txt", "main.txt", "release.txt", "shared.txt"}}, + {name: "release sees release content", pattern: "release-multi-token", branch: "release", want: []string{"release.txt"}}, + {name: "main misses release content", pattern: "release-multi-token", branch: "main", want: nil}, + {name: "dev sees dev content", pattern: "dev-multi-token", branch: "dev", want: []string{"dev.txt"}}, + {name: "main misses dev content", pattern: "dev-multi-token", branch: "main", want: nil}, + }, + }, + { + name: "add one branch that shares some docs and diverges on others", + initialFiles: map[string]string{ + "a.txt": "a-main-token\n", + "b.txt": "b-shared-token\n", + "d.txt": "d-main-token\n", + }, + mutate: func(t *testing.T, repoDir string) { + runGit(t, repoDir, "checkout", "-b", "feature", "main") + addBranchWriteCommit(t, repoDir, map[string]string{ + "a.txt": "a-feature-token\n", + "c.txt": "c-feature-token\n", + }, []string{"d.txt"}, "feature shares diverges adds deletes") + }, + finalBranches: []string{"main", "feature"}, + lookups: []addBranchLookup{ + {name: "main all files", branch: "main", want: []string{"a.txt", "b.txt", "d.txt"}}, + {name: "feature all files", branch: "feature", want: []string{"a.txt", "b.txt", "c.txt"}}, + {name: "main keeps old a", pattern: "a-main-token", branch: "main", want: []string{"a.txt"}}, + {name: "feature misses old a", pattern: "a-main-token", branch: "feature", want: nil}, + {name: "feature sees changed a", pattern: "a-feature-token", branch: "feature", want: []string{"a.txt"}}, + {name: "shared b on feature", pattern: "b-shared-token", branch: "feature", want: []string{"b.txt"}}, + {name: "feature sees added c", pattern: "c-feature-token", branch: "feature", want: []string{"c.txt"}}, + {name: "main misses added c", pattern: "c-feature-token", branch: "main", want: nil}, + {name: "main keeps feature-deleted d", pattern: "d-main-token", branch: "main", want: []string{"d.txt"}}, + {name: "feature misses deleted d", pattern: "d-main-token", branch: "feature", want: nil}, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + repoDir := addBranchInitRepo(t, tc.initialFiles) + indexDir := t.TempDir() + initialOpts := addBranchOptions(repoDir, indexDir, []string{"main"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + tc.mutate(t, repoDir) + + deltaOpts := addBranchOptions(repoDir, indexDir, tc.finalBranches) + deltaOpts.BuildOptions.IsDelta = true + deltaCalled, normalCalled := indexGitRepoWithPrepareSpies(t, deltaOpts) + if !deltaCalled { + t.Errorf("expected delta build to be attempted") + } + if normalCalled { + t.Errorf("expected branch-add update to use delta without falling back to a normal build") + } + + cleanIndexDir := t.TempDir() + cleanOpts := addBranchOptions(repoDir, cleanIndexDir, tc.finalBranches) + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean full IndexGitRepo: %v", err) + } + + addBranchAssertMetadata(t, indexDir, cleanIndexDir, tc.finalBranches) + addBranchAssertLiveStats(t, indexDir, cleanIndexDir) + for _, lookup := range tc.lookups { + addBranchAssertLookup(t, indexDir, cleanIndexDir, lookup) + } + }) + } +} + +type addBranchLookup struct { + name string + pattern string + branch string + want []string +} + +func addBranchInitRepo(t *testing.T, files map[string]string) string { + t.Helper() + + repoDir := t.TempDir() + runGit(t, repoDir, "init", "-b", "main") + addBranchWriteCommit(t, repoDir, files, nil, "initial main") + return repoDir +} + +func addBranchWriteCommit(t *testing.T, repoDir string, files map[string]string, deletes []string, message string) { + t.Helper() + + for name, content := range files { + path := filepath.Join(repoDir, name) + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(path), err) + } + if err := os.WriteFile(path, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", path, err) + } + } + for _, name := range deletes { + path := filepath.Join(repoDir, name) + if err := os.Remove(path); err != nil { + t.Fatalf("Remove(%q): %v", path, err) + } + } + + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", message) +} + +func addBranchOptions(repoDir, indexDir string, branches []string) Options { + return Options{ + RepoDir: filepath.Join(repoDir, ".git"), + Branches: branches, + DeltaAdmissionMode: DeltaAdmissionModeStatsV1, + DeltaAdmissionThresholds: DeltaAdmissionThresholds{ + MaxDeltaIndexedBytesRatio: 100, + MaxPhysicalLiveBytesRatio: 100, + MaxTombstonePathRatio: 100, + MaxShardFanoutRatio: 100, + }, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{ + ID: addBranchRepoID, + Name: "repository", + }, + IndexDir: indexDir, + DisableCTags: true, + }, + } +} + +func addBranchAssertMetadata(t *testing.T, deltaIndexDir, cleanIndexDir string, wantBranches []string) { + t.Helper() + + deltaRepo := addBranchIndexedRepository(t, deltaIndexDir) + cleanRepo := addBranchIndexedRepository(t, cleanIndexDir) + + if got := repositoryBranchNames(deltaRepo.Branches); !cmp.Equal(got, wantBranches) { + t.Fatalf("delta branch names mismatch (-want +got):\n%s", cmp.Diff(wantBranches, got)) + } + if diff := cmp.Diff(cleanRepo.Branches, deltaRepo.Branches); diff != "" { + t.Fatalf("delta branch metadata differs from clean full rebuild (-want +got):\n%s", diff) + } +} + +func addBranchAssertLiveStats(t *testing.T, deltaIndexDir, cleanIndexDir string) { + t.Helper() + + deltaRepo := addBranchIndexedRepository(t, deltaIndexDir) + cleanRepo := addBranchIndexedRepository(t, cleanIndexDir) + if deltaRepo.DeltaStats == nil { + t.Fatal("delta index DeltaStats is nil") + } + if cleanRepo.DeltaStats == nil { + t.Fatal("clean index DeltaStats is nil") + } + + got := addBranchLiveStats{ + LiveIndexedBytes: deltaRepo.DeltaStats.LiveIndexedBytes, + LiveDocumentCount: deltaRepo.DeltaStats.LiveDocumentCount, + LivePathCount: deltaRepo.DeltaStats.LivePathCount, + } + want := addBranchLiveStats{ + LiveIndexedBytes: cleanRepo.DeltaStats.LiveIndexedBytes, + LiveDocumentCount: cleanRepo.DeltaStats.LiveDocumentCount, + LivePathCount: cleanRepo.DeltaStats.LivePathCount, + } + if diff := cmp.Diff(want, got); diff != "" { + t.Fatalf("delta live stats differ from clean full rebuild (-want +got):\n%s", diff) + } +} + +func addBranchIndexedRepository(t *testing.T, indexDir string) *zoekt.Repository { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + ID: addBranchRepoID, + Name: "repository", + }, + } + + repo, _, ok, err := opts.FindRepositoryMetadata() + if err != nil { + t.Fatalf("FindRepositoryMetadata: %v", err) + } + if !ok { + t.Fatal("FindRepositoryMetadata: repository not found") + } + return repo +} + +type addBranchLiveStats struct { + LiveIndexedBytes uint64 + LiveDocumentCount uint64 + LivePathCount uint64 +} + +func addBranchAssertLookup(t *testing.T, deltaIndexDir, cleanIndexDir string, lookup addBranchLookup) { + t.Helper() + + base := addBranchContentQuery(lookup.pattern) + if lookup.branch == "" { + addBranchAssertQuery(t, deltaIndexDir, cleanIndexDir, lookup.name+"/unfiltered", base, lookup.want) + return + } + + branchQuery := query.NewAnd(&query.Branch{Pattern: lookup.branch, Exact: true}, base) + addBranchAssertQuery(t, deltaIndexDir, cleanIndexDir, lookup.name+"/branch-query", branchQuery, lookup.want) + + branchesReposQuery := query.NewAnd(query.NewSingleBranchesRepos(lookup.branch, addBranchRepoID), base) + addBranchAssertQuery(t, deltaIndexDir, cleanIndexDir, lookup.name+"/branches-repos", branchesReposQuery, lookup.want) +} + +func addBranchContentQuery(pattern string) query.Q { + if pattern == "" { + return &query.Const{Value: true} + } + return &query.Substring{Pattern: pattern} +} + +func addBranchAssertQuery(t *testing.T, deltaIndexDir, cleanIndexDir, name string, q query.Q, want []string) { + t.Helper() + + delta := addBranchSearchHits(t, deltaIndexDir, q) + clean := addBranchSearchHits(t, cleanIndexDir, q) + if diff := cmp.Diff(clean, delta); diff != "" { + t.Fatalf("%s: delta results differ from clean full rebuild (-want +got):\n%s", name, diff) + } + + gotNames := addBranchHitFileNames(delta) + want = append([]string(nil), want...) + if want == nil { + want = []string{} + } + sort.Strings(want) + if diff := cmp.Diff(want, gotNames); diff != "" { + t.Fatalf("%s: result file names mismatch (-want +got):\n%s", name, diff) + } +} + +type addBranchSearchHit struct { + FileName string + Content string +} + +func addBranchSearchHits(t *testing.T, indexDir string, q query.Q) []addBranchSearchHit { + t.Helper() + + searcher, err := search.NewDirectorySearcher(indexDir) + if err != nil { + t.Fatalf("NewDirectorySearcher(%q): %v", indexDir, err) + } + defer searcher.Close() + + result, err := searcher.Search(context.Background(), q, &zoekt.SearchOptions{Whole: true}) + if err != nil { + t.Fatalf("Search(%s): %v", q, err) + } + + hits := make([]addBranchSearchHit, 0, len(result.Files)) + for _, file := range result.Files { + hits = append(hits, addBranchSearchHit{ + FileName: file.FileName, + Content: string(file.Content), + }) + } + sort.Slice(hits, func(i, j int) bool { + if hits[i].FileName != hits[j].FileName { + return hits[i].FileName < hits[j].FileName + } + return hits[i].Content < hits[j].Content + }) + return hits +} + +func addBranchHitFileNames(hits []addBranchSearchHit) []string { + names := make([]string, 0, len(hits)) + for _, hit := range hits { + names = append(names, hit.FileName) + } + sort.Strings(names) + return names +} + +// ---- delta_multibranch_remove_test.go ---- + +const removeBranchRepoID uint32 = 42 + +type removeBranchTree map[string]string + +type removeBranchDocument struct { + Name string + Content string + Branches []string +} + +func TestIndexGitRepo_DeltaRemoveBranchNoContentChanges(t *testing.T) { + t.Parallel() + + removeBranchRunScenario(t, removeBranchScenario{ + initialBranches: []string{"main", "release"}, + initialTrees: map[string]removeBranchTree{ + "main": { + "shared.txt": "shared-retained-needle\n", + }, + "release": { + "shared.txt": "shared-retained-needle\n", + }, + }, + finalBranches: []string{"main"}, + removedBranches: []string{"release"}, + retainedBranches: []string{"main"}, + mutate: func(t *testing.T, repoDir string) { + removeBranchDeleteBranches(t, repoDir, "release") + }, + patterns: []string{ + "shared-retained-needle", + "release-only-needle", + }, + }) +} + +func TestIndexGitRepo_DeltaRemoveBranchOnlyFiles(t *testing.T) { + t.Parallel() + + removeBranchRunScenario(t, removeBranchScenario{ + initialBranches: []string{"main", "release"}, + initialTrees: map[string]removeBranchTree{ + "main": { + "main.txt": "main-retained-needle\n", + }, + "release": { + "release-only.txt": "release-only-needle\n", + }, + }, + finalBranches: []string{"main"}, + removedBranches: []string{"release"}, + retainedBranches: []string{"main"}, + mutate: func(t *testing.T, repoDir string) { + removeBranchDeleteBranches(t, repoDir, "release") + }, + patterns: []string{ + "main-retained-needle", + "release-only-needle", + }, + }) +} + +func TestIndexGitRepo_DeltaRemoveBranchSamePathDifferentContent(t *testing.T) { + t.Parallel() + + removeBranchRunScenario(t, removeBranchScenario{ + initialBranches: []string{"main", "release"}, + initialTrees: map[string]removeBranchTree{ + "main": { + "shared.txt": "main-shared-retained-needle\n", + }, + "release": { + "shared.txt": "release-shared-stale-needle\n", + }, + }, + finalBranches: []string{"main"}, + removedBranches: []string{"release"}, + retainedBranches: []string{"main"}, + mutate: func(t *testing.T, repoDir string) { + removeBranchDeleteBranches(t, repoDir, "release") + }, + patterns: []string{ + "main-shared-retained-needle", + "release-shared-stale-needle", + }, + }) +} + +func TestIndexGitRepo_DeltaRemoveMultipleBranches(t *testing.T) { + t.Parallel() + + removeBranchRunScenario(t, removeBranchScenario{ + initialBranches: []string{"main", "release", "dev", "qa"}, + initialTrees: map[string]removeBranchTree{ + "main": { + "main.txt": "main-retained-needle\n", + }, + "release": { + "release.txt": "release-stale-needle\n", + }, + "dev": { + "dev.txt": "dev-stale-needle\n", + }, + "qa": { + "qa.txt": "qa-stale-needle\n", + }, + }, + finalBranches: []string{"main"}, + removedBranches: []string{"release", "dev", "qa"}, + retainedBranches: []string{"main"}, + mutate: func(t *testing.T, repoDir string) { + removeBranchDeleteBranches(t, repoDir, "release", "dev", "qa") + }, + patterns: []string{ + "main-retained-needle", + "release-stale-needle", + "dev-stale-needle", + "qa-stale-needle", + }, + }) +} + +func TestIndexGitRepo_DeltaRemoveBranchAndModifyRetainedBranch(t *testing.T) { + t.Parallel() + + removeBranchRunScenario(t, removeBranchScenario{ + initialBranches: []string{"main", "release"}, + initialTrees: map[string]removeBranchTree{ + "main": { + "a.txt": "main-old-stale-needle\n", + }, + "release": { + "release-only.txt": "release-only-needle\n", + }, + }, + finalBranches: []string{"main"}, + removedBranches: []string{"release"}, + retainedBranches: []string{"main"}, + mutate: func(t *testing.T, repoDir string) { + runGit(t, repoDir, "checkout", "main") + removeBranchWriteFile(t, repoDir, "a.txt", "main-new-retained-needle\n") + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "modify retained main") + removeBranchDeleteBranches(t, repoDir, "release") + }, + patterns: []string{ + "main-new-retained-needle", + "main-old-stale-needle", + "release-only-needle", + }, + }) +} + +type removeBranchScenario struct { + initialBranches []string + initialTrees map[string]removeBranchTree + finalBranches []string + + removedBranches []string + retainedBranches []string + + mutate func(t *testing.T, repoDir string) + patterns []string +} + +func removeBranchRunScenario(t *testing.T, scenario removeBranchScenario) { + t.Helper() + + repoDir := removeBranchInitRepository(t, scenario.initialBranches, scenario.initialTrees) + indexDir := t.TempDir() + + initialOpts := removeBranchOptions(repoDir, indexDir, scenario.initialBranches) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + scenario.mutate(t, repoDir) + + deltaOpts := removeBranchOptions(repoDir, indexDir, scenario.finalBranches) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled := removeBranchIndexWithSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Errorf("expected delta build to be attempted") + } + if normalBuildCalled { + t.Errorf("expected branch removal to use delta build without falling back to normal build") + } + + cleanIndexDir := t.TempDir() + cleanOpts := removeBranchOptions(repoDir, cleanIndexDir, scenario.finalBranches) + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean IndexGitRepo: %v", err) + } + + removeBranchAssertRepositoryMatchesClean(t, indexDir, cleanIndexDir) + removeBranchAssertIndexMatchesClean(t, indexDir, cleanIndexDir, scenario.retainedBranches, scenario.removedBranches, scenario.patterns) +} + +func removeBranchOptions(repoDir, indexDir string, branches []string) Options { + return Options{ + RepoDir: filepath.Join(repoDir, ".git"), + Branches: append([]string(nil), branches...), + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{ + ID: removeBranchRepoID, + Name: "repository", + }, + IndexDir: indexDir, + DisableCTags: true, + }, + } +} + +func removeBranchIndexWithSpies(t *testing.T, opts Options) (deltaBuildCalled, normalBuildCalled bool) { + t.Helper() + + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + if _, err := indexGitRepo(opts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }); err != nil { + t.Fatalf("delta IndexGitRepo: %v", err) + } + + return deltaBuildCalled, normalBuildCalled +} + +func removeBranchAssertRepositoryMatchesClean(t *testing.T, indexDir, cleanIndexDir string) { + t.Helper() + + got := removeBranchIndexedRepositoryForTest(t, indexDir) + want := removeBranchIndexedRepositoryForTest(t, cleanIndexDir) + if diff := cmp.Diff(want.Branches, got.Branches); diff != "" { + t.Errorf("indexed branches mismatch against clean full rebuild (-want +got):\n%s", diff) + } +} + +func removeBranchIndexedRepositoryForTest(t *testing.T, indexDir string) *zoekt.Repository { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + ID: removeBranchRepoID, + Name: "repository", + }, + } + + repo, _, ok, err := opts.FindRepositoryMetadata() + if err != nil { + t.Fatalf("FindRepositoryMetadata: %v", err) + } + if !ok { + t.Fatalf("FindRepositoryMetadata: repository not found") + } + + return repo +} + +func removeBranchAssertIndexMatchesClean(t *testing.T, indexDir, cleanIndexDir string, retainedBranches, removedBranches, patterns []string) { + t.Helper() + + removeBranchAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "unfiltered/all", &query.Const{Value: true}) + for _, pattern := range patterns { + removeBranchAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "unfiltered/"+pattern, &query.Substring{Pattern: pattern}) + } + + for _, branch := range retainedBranches { + removeBranchAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "branch/"+branch+"/all", &query.Branch{Pattern: branch, Exact: true}) + removeBranchAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "branchesrepos/"+branch+"/all", query.NewSingleBranchesRepos(branch, removeBranchRepoID)) + for _, pattern := range patterns { + substr := &query.Substring{Pattern: pattern} + removeBranchAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "branch/"+branch+"/"+pattern, query.NewAnd(&query.Branch{Pattern: branch, Exact: true}, substr)) + removeBranchAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "branchesrepos/"+branch+"/"+pattern, query.NewAnd(query.NewSingleBranchesRepos(branch, removeBranchRepoID), substr)) + } + } + + for _, branch := range removedBranches { + removeBranchAssertNoResults(t, indexDir, "removed branch/"+branch, &query.Branch{Pattern: branch, Exact: true}) + removeBranchAssertNoResults(t, indexDir, "removed branchesrepos/"+branch, query.NewSingleBranchesRepos(branch, removeBranchRepoID)) + } +} + +func removeBranchAssertQueryMatchesClean(t *testing.T, indexDir, cleanIndexDir, name string, q query.Q) { + t.Helper() + + got := removeBranchSearchDocuments(t, indexDir, q) + want := removeBranchSearchDocuments(t, cleanIndexDir, q) + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("%s mismatch against clean full rebuild (-want +got):\n%s", name, diff) + } +} + +func removeBranchAssertNoResults(t *testing.T, indexDir, name string, q query.Q) { + t.Helper() + + if got := removeBranchSearchDocuments(t, indexDir, q); len(got) != 0 { + t.Errorf("%s returned removed-branch results: %+v", name, got) + } +} + +func removeBranchSearchDocuments(t *testing.T, indexDir string, q query.Q) []removeBranchDocument { + t.Helper() + + searcher, err := search.NewDirectorySearcher(indexDir) + if err != nil { + t.Fatalf("NewDirectorySearcher(%s): %v", indexDir, err) + } + defer searcher.Close() + + result, err := searcher.Search(context.Background(), q, &zoekt.SearchOptions{Whole: true}) + if err != nil { + t.Fatalf("Search(%s): %v", q.String(), err) + } + + docs := make([]removeBranchDocument, 0, len(result.Files)) + for _, file := range result.Files { + branches := append([]string(nil), file.Branches...) + sort.Strings(branches) + docs = append(docs, removeBranchDocument{ + Name: file.FileName, + Content: string(file.Content), + Branches: branches, + }) + } + sort.Slice(docs, func(i, j int) bool { + if docs[i].Name != docs[j].Name { + return docs[i].Name < docs[j].Name + } + if docs[i].Content != docs[j].Content { + return docs[i].Content < docs[j].Content + } + return strings.Join(docs[i].Branches, "\x00") < strings.Join(docs[j].Branches, "\x00") + }) + return docs +} + +func removeBranchInitRepository(t *testing.T, branches []string, trees map[string]removeBranchTree) string { + t.Helper() + + repoDir := t.TempDir() + runGit(t, repoDir, "init", "-b", "main") + runGit(t, repoDir, "commit", "--allow-empty", "-m", "base") + base := removeBranchGitOutput(t, repoDir, "rev-parse", "HEAD") + + for _, branch := range branches { + runGit(t, repoDir, "checkout", "-B", branch, base) + removeBranchReplaceTree(t, repoDir, trees[branch]) + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "--allow-empty", "-m", branch+" initial") + } + runGit(t, repoDir, "checkout", branches[0]) + return repoDir +} + +func removeBranchReplaceTree(t *testing.T, repoDir string, tree removeBranchTree) { + t.Helper() + + runGit(t, repoDir, "rm", "-r", "--ignore-unmatch", ".") + + names := make([]string, 0, len(tree)) + for name := range tree { + names = append(names, name) + } + sort.Strings(names) + for _, name := range names { + removeBranchWriteFile(t, repoDir, name, tree[name]) + } +} + +func removeBranchWriteFile(t *testing.T, repoDir, name, content string) { + t.Helper() + + path := filepath.Join(repoDir, name) + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(path), err) + } + if err := os.WriteFile(path, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", path, err) + } +} + +func removeBranchDeleteBranches(t *testing.T, repoDir string, branches ...string) { + t.Helper() + + runGit(t, repoDir, "checkout", "main") + for _, branch := range branches { + runGit(t, repoDir, "branch", "-D", branch) + } +} + +func removeBranchGitOutput(t *testing.T, cwd string, args ...string) string { + t.Helper() + + if err := os.MkdirAll(cwd, 0o755); err != nil { + t.Fatalf("ensuring path %q exists: %s", cwd, err) + } + + cmd := exec.Command("git", args...) + cmd.Dir = cwd + cmd.Env = append(os.Environ(), + "GIT_CONFIG_GLOBAL=", + "GIT_CONFIG_SYSTEM=", + "GIT_COMMITTER_NAME=Kierkegaard", + "GIT_COMMITTER_EMAIL=soren@apache.com", + "GIT_AUTHOR_NAME=Kierkegaard", + "GIT_AUTHOR_EMAIL=soren@apache.com", + ) + + out, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("execution error: %v, output %s", err, out) + } + return strings.TrimSpace(string(out)) +} + +// ---- delta_multibranch_combined_test.go ---- + +const ( + combinedBranchRepoID = 4242 + combinedBranchRepoName = "repository" +) + +type combinedBranchFiles map[string]string + +type combinedBranchExpectedLookup struct { + branch string + pattern string + wantFiles []string +} + +func TestIndexGitRepo_DeltaMultiBranchCombinedRenameAddRemove(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + initialBranches []string + finalBranches []string + initialFiles map[string]combinedBranchFiles + mutate func(t *testing.T, repoDir string) + patterns []string + expectedLookups []combinedBranchExpectedLookup + expectedTombstones []string + }{ + { + name: "rename one branch and add one branch", + initialBranches: []string{"feature-a", "release"}, + finalBranches: []string{"feature-b", "release", "dev"}, + initialFiles: map[string]combinedBranchFiles{ + "feature-a": {"branch.txt": "feature-a-stale-needle\n"}, + "release": {"release.txt": "release-unchanged-needle\n"}, + }, + mutate: func(t *testing.T, repoDir string) { + combinedBranchRenameBranch(t, repoDir, "feature-a", "feature-b") + combinedBranchCheckoutWriteCommit(t, repoDir, "feature-b", combinedBranchFiles{ + "branch.txt": "feature-b-renamed-needle\n", + }, "update feature-b") + combinedBranchCheckoutNewBranchWriteCommit(t, repoDir, "dev", "release", combinedBranchFiles{ + "dev.txt": "dev-added-needle\n", + }, "add dev") + }, + patterns: []string{ + "feature-a-stale-needle", + "feature-b-renamed-needle", + "release-unchanged-needle", + "dev-added-needle", + }, + expectedLookups: []combinedBranchExpectedLookup{ + {branch: "feature-b", pattern: "feature-b-renamed-needle", wantFiles: []string{"branch.txt"}}, + {branch: "release", pattern: "release-unchanged-needle", wantFiles: []string{"release.txt"}}, + {branch: "dev", pattern: "release-unchanged-needle", wantFiles: []string{"release.txt"}}, + {branch: "dev", pattern: "dev-added-needle", wantFiles: []string{"dev.txt"}}, + }, + expectedTombstones: []string{"branch.txt"}, + }, + { + name: "rename one branch and remove one branch", + initialBranches: []string{"feature-a", "release", "dev"}, + finalBranches: []string{"feature-b", "release"}, + initialFiles: map[string]combinedBranchFiles{ + "feature-a": {"feature.txt": "feature-a-stale-needle\n"}, + "release": {"release.txt": "release-kept-needle\n"}, + "dev": {"dev.txt": "dev-removed-needle\n"}, + }, + mutate: func(t *testing.T, repoDir string) { + combinedBranchRenameBranch(t, repoDir, "feature-a", "feature-b") + combinedBranchCheckoutWriteCommit(t, repoDir, "feature-b", combinedBranchFiles{ + "feature.txt": "feature-b-after-remove-needle\n", + }, "update feature-b") + combinedBranchRunGit(t, repoDir, "checkout", "release") + combinedBranchDeleteBranch(t, repoDir, "dev") + }, + patterns: []string{ + "feature-a-stale-needle", + "feature-b-after-remove-needle", + "release-kept-needle", + "dev-removed-needle", + }, + expectedLookups: []combinedBranchExpectedLookup{ + {branch: "feature-b", pattern: "feature-b-after-remove-needle", wantFiles: []string{"feature.txt"}}, + {branch: "release", pattern: "release-kept-needle", wantFiles: []string{"release.txt"}}, + }, + expectedTombstones: []string{"feature.txt", "dev.txt"}, + }, + { + name: "add one branch and remove one branch", + initialBranches: []string{"main", "old-release"}, + finalBranches: []string{"main", "new-release"}, + initialFiles: map[string]combinedBranchFiles{ + "main": {"main.txt": "main-kept-needle\n"}, + "old-release": {"old.txt": "old-release-stale-needle\n"}, + }, + mutate: func(t *testing.T, repoDir string) { + combinedBranchRunGit(t, repoDir, "checkout", "main") + combinedBranchDeleteBranch(t, repoDir, "old-release") + combinedBranchCheckoutNewBranchWriteCommit(t, repoDir, "new-release", "main", combinedBranchFiles{ + "new.txt": "new-release-added-needle\n", + }, "add new-release") + }, + patterns: []string{ + "main-kept-needle", + "old-release-stale-needle", + "new-release-added-needle", + }, + expectedLookups: []combinedBranchExpectedLookup{ + {branch: "main", pattern: "main-kept-needle", wantFiles: []string{"main.txt"}}, + {branch: "new-release", pattern: "main-kept-needle", wantFiles: []string{"main.txt"}}, + {branch: "new-release", pattern: "new-release-added-needle", wantFiles: []string{"new.txt"}}, + }, + expectedTombstones: []string{"old.txt"}, + }, + { + name: "rename multiple branches, add one branch, and remove one branch", + initialBranches: []string{"feature-a", "qa-a", "old-release", "main"}, + finalBranches: []string{"feature-b", "qa-b", "new-release", "main"}, + initialFiles: map[string]combinedBranchFiles{ + "feature-a": {"feature.txt": "feature-a-stale-needle\n"}, + "qa-a": {"qa.txt": "qa-a-stale-needle\n"}, + "old-release": {"old-release.txt": "old-release-stale-needle\n"}, + "main": {"main.txt": "main-stable-needle\n"}, + }, + mutate: func(t *testing.T, repoDir string) { + combinedBranchRenameBranch(t, repoDir, "feature-a", "feature-b") + combinedBranchRenameBranch(t, repoDir, "qa-a", "qa-b") + combinedBranchCheckoutWriteCommit(t, repoDir, "feature-b", combinedBranchFiles{ + "feature.txt": "feature-b-final-needle\n", + }, "update feature-b") + combinedBranchCheckoutWriteCommit(t, repoDir, "qa-b", combinedBranchFiles{ + "qa.txt": "qa-b-final-needle\n", + }, "update qa-b") + combinedBranchRunGit(t, repoDir, "checkout", "main") + combinedBranchDeleteBranch(t, repoDir, "old-release") + combinedBranchCheckoutNewBranchWriteCommit(t, repoDir, "new-release", "main", combinedBranchFiles{ + "new-release.txt": "new-release-final-needle\n", + }, "add new-release") + }, + patterns: []string{ + "feature-a-stale-needle", + "feature-b-final-needle", + "qa-a-stale-needle", + "qa-b-final-needle", + "old-release-stale-needle", + "main-stable-needle", + "new-release-final-needle", + }, + expectedLookups: []combinedBranchExpectedLookup{ + {branch: "feature-b", pattern: "feature-b-final-needle", wantFiles: []string{"feature.txt"}}, + {branch: "qa-b", pattern: "qa-b-final-needle", wantFiles: []string{"qa.txt"}}, + {branch: "new-release", pattern: "main-stable-needle", wantFiles: []string{"main.txt"}}, + {branch: "new-release", pattern: "new-release-final-needle", wantFiles: []string{"new-release.txt"}}, + {branch: "main", pattern: "main-stable-needle", wantFiles: []string{"main.txt"}}, + }, + expectedTombstones: []string{"feature.txt", "qa.txt", "old-release.txt"}, + }, + { + name: "branch count and order change together", + initialBranches: []string{"main", "release", "dev"}, + finalBranches: []string{"qa", "main", "release-renamed"}, + initialFiles: map[string]combinedBranchFiles{ + "main": {"main.txt": "main-order-kept-needle\n"}, + "release": {"release.txt": "release-before-order-needle\n"}, + "dev": {"dev.txt": "dev-order-removed-needle\n"}, + }, + mutate: func(t *testing.T, repoDir string) { + combinedBranchRenameBranch(t, repoDir, "release", "release-renamed") + combinedBranchCheckoutWriteCommit(t, repoDir, "release-renamed", combinedBranchFiles{ + "release.txt": "release-renamed-order-needle\n", + }, "update release-renamed") + combinedBranchCheckoutNewBranchWriteCommit(t, repoDir, "qa", "main", combinedBranchFiles{ + "qa.txt": "qa-order-added-needle\n", + }, "add qa") + combinedBranchRunGit(t, repoDir, "checkout", "main") + combinedBranchDeleteBranch(t, repoDir, "dev") + }, + patterns: []string{ + "main-order-kept-needle", + "release-before-order-needle", + "release-renamed-order-needle", + "dev-order-removed-needle", + "qa-order-added-needle", + }, + expectedLookups: []combinedBranchExpectedLookup{ + {branch: "qa", pattern: "main-order-kept-needle", wantFiles: []string{"main.txt"}}, + {branch: "qa", pattern: "qa-order-added-needle", wantFiles: []string{"qa.txt"}}, + {branch: "main", pattern: "main-order-kept-needle", wantFiles: []string{"main.txt"}}, + {branch: "release-renamed", pattern: "release-renamed-order-needle", wantFiles: []string{"release.txt"}}, + }, + expectedTombstones: []string{"release.txt", "dev.txt"}, + }, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + repoDir := combinedBranchCreateRepository(t, test.initialBranches, test.initialFiles) + indexDir := t.TempDir() + + combinedBranchRunIndex(t, repoDir, indexDir, test.initialBranches, false) + test.mutate(t, repoDir) + + deltaBuildCalled, normalBuildCalled := combinedBranchRunIndex(t, repoDir, indexDir, test.finalBranches, true) + if !deltaBuildCalled { + t.Error("expected delta build to be attempted") + } + if normalBuildCalled { + t.Error("expected combined branch update to stay on the delta path") + } + + cleanIndexDir := t.TempDir() + combinedBranchRunIndex(t, repoDir, cleanIndexDir, test.finalBranches, false) + + combinedBranchAssertMetadataMatchesClean(t, indexDir, cleanIndexDir, test.finalBranches, test.initialBranches) + + branchesToCheck := combinedBranchUnion(test.initialBranches, test.finalBranches) + combinedBranchAssertSearchEquivalent(t, indexDir, cleanIndexDir, branchesToCheck, test.patterns) + combinedBranchAssertExpectedLookups(t, indexDir, cleanIndexDir, test.expectedLookups) + combinedBranchAssertStaleBranchesAbsent(t, indexDir, cleanIndexDir, test.initialBranches, test.finalBranches, test.patterns) + + if !normalBuildCalled { + combinedBranchAssertTombstones(t, indexDir, test.expectedTombstones) + } + }) + } +} + +func combinedBranchCreateRepository(t *testing.T, initialBranches []string, initialFiles map[string]combinedBranchFiles) string { + t.Helper() + + repoDir := t.TempDir() + combinedBranchRunGit(t, repoDir, "init", "-b", "seed") + combinedBranchRunGit(t, repoDir, "commit", "--allow-empty", "-m", "seed") + + for _, branch := range initialBranches { + combinedBranchRunGit(t, repoDir, "checkout", "-B", branch, "seed") + combinedBranchWriteFiles(t, repoDir, initialFiles[branch]) + combinedBranchRunGit(t, repoDir, "add", "-A") + combinedBranchRunGit(t, repoDir, "commit", "-m", "initial "+branch) + } + + combinedBranchRunGit(t, repoDir, "checkout", initialBranches[0]) + return repoDir +} + +func combinedBranchRunIndex(t *testing.T, repoDir, indexDir string, branches []string, isDelta bool) (deltaBuildCalled, normalBuildCalled bool) { + t.Helper() + + opts := Options{ + RepoDir: filepath.Join(repoDir, ".git"), + Branches: append([]string(nil), branches...), + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{ + ID: combinedBranchRepoID, + Name: combinedBranchRepoName, + }, + IndexDir: indexDir, + DisableCTags: true, + IsDelta: isDelta, + }, + } + + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + if _, err := indexGitRepo(opts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }); err != nil { + t.Fatalf("IndexGitRepo(delta=%t, branches=%v): %v", isDelta, branches, err) + } + + return deltaBuildCalled, normalBuildCalled +} + +func combinedBranchAssertMetadataMatchesClean(t *testing.T, indexDir, cleanIndexDir string, finalBranches, initialBranches []string) { + t.Helper() + + gotRepo := combinedBranchIndexedRepository(t, indexDir) + cleanRepo := combinedBranchIndexedRepository(t, cleanIndexDir) + + if diff := cmp.Diff(finalBranches, combinedBranchNames(gotRepo.Branches)); diff != "" { + t.Fatalf("indexed branch names mismatch (-want +got):\n%s", diff) + } + if diff := cmp.Diff(cleanRepo.Branches, gotRepo.Branches); diff != "" { + t.Fatalf("indexed branch metadata differs from clean full rebuild (-want +got):\n%s", diff) + } + + finalSet := combinedBranchSet(finalBranches) + for _, branch := range initialBranches { + if !finalSet[branch] && combinedBranchContains(combinedBranchNames(gotRepo.Branches), branch) { + t.Fatalf("stale branch %q remained in repository metadata: %+v", branch, gotRepo.Branches) + } + } +} + +func combinedBranchAssertSearchEquivalent(t *testing.T, indexDir, cleanIndexDir string, branchesToCheck, patterns []string) { + t.Helper() + + for _, pattern := range patterns { + substr := &query.Substring{Pattern: pattern} + combinedBranchAssertQueryEquivalent(t, indexDir, cleanIndexDir, "unfiltered "+pattern, substr) + + for _, branch := range branchesToCheck { + branchQuery := query.NewAnd(&query.Branch{Pattern: branch, Exact: true}, substr) + combinedBranchAssertQueryEquivalent(t, indexDir, cleanIndexDir, "branch:"+branch+" "+pattern, branchQuery) + + branchesReposQuery := query.NewAnd(query.NewSingleBranchesRepos(branch, combinedBranchRepoID), substr) + combinedBranchAssertQueryEquivalent(t, indexDir, cleanIndexDir, "BranchesRepos:"+branch+" "+pattern, branchesReposQuery) + } + } +} + +func combinedBranchAssertExpectedLookups(t *testing.T, indexDir, cleanIndexDir string, lookups []combinedBranchExpectedLookup) { + t.Helper() + + for _, lookup := range lookups { + for _, target := range []struct { + name string + indexDir string + }{ + {name: "delta", indexDir: indexDir}, + {name: "clean", indexDir: cleanIndexDir}, + } { + branchQuery := query.NewAnd(&query.Branch{Pattern: lookup.branch, Exact: true}, &query.Substring{Pattern: lookup.pattern}) + combinedBranchAssertFileNames(t, target.indexDir, target.name+" branch:"+lookup.branch+" "+lookup.pattern, branchQuery, lookup.wantFiles) + + branchesReposQuery := query.NewAnd(query.NewSingleBranchesRepos(lookup.branch, combinedBranchRepoID), &query.Substring{Pattern: lookup.pattern}) + combinedBranchAssertFileNames(t, target.indexDir, target.name+" BranchesRepos:"+lookup.branch+" "+lookup.pattern, branchesReposQuery, lookup.wantFiles) + } + } +} + +func combinedBranchAssertStaleBranchesAbsent(t *testing.T, indexDir, cleanIndexDir string, initialBranches, finalBranches, patterns []string) { + t.Helper() + + finalSet := combinedBranchSet(finalBranches) + for _, branch := range initialBranches { + if finalSet[branch] { + continue + } + for _, pattern := range patterns { + for _, target := range []struct { + name string + indexDir string + }{ + {name: "delta", indexDir: indexDir}, + {name: "clean", indexDir: cleanIndexDir}, + } { + branchQuery := query.NewAnd(&query.Branch{Pattern: branch, Exact: true}, &query.Substring{Pattern: pattern}) + combinedBranchAssertFileNames(t, target.indexDir, target.name+" stale branch:"+branch+" "+pattern, branchQuery, nil) + + branchesReposQuery := query.NewAnd(query.NewSingleBranchesRepos(branch, combinedBranchRepoID), &query.Substring{Pattern: pattern}) + combinedBranchAssertFileNames(t, target.indexDir, target.name+" stale BranchesRepos:"+branch+" "+pattern, branchesReposQuery, nil) + } + } + } +} + +func combinedBranchAssertQueryEquivalent(t *testing.T, indexDir, cleanIndexDir, label string, q query.Q) { + t.Helper() + + got := combinedBranchSearch(t, indexDir, q) + want := combinedBranchSearch(t, cleanIndexDir, q) + if diff := cmp.Diff(want, got); diff != "" { + t.Fatalf("%s search differs from clean full rebuild (-want +got):\n%s", label, diff) + } +} + +func combinedBranchAssertFileNames(t *testing.T, indexDir, label string, q query.Q, want []string) { + t.Helper() + + got := combinedBranchFileNames(combinedBranchSearch(t, indexDir, q)) + if want == nil { + want = []string{} + } + sort.Strings(want) + if diff := cmp.Diff(want, got); diff != "" { + t.Fatalf("%s file names mismatch (-want +got):\n%s", label, diff) + } +} + +func combinedBranchAssertTombstones(t *testing.T, indexDir string, wantPaths []string) { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + ID: combinedBranchRepoID, + Name: combinedBranchRepoName, + }, + } + + got := map[string]struct{}{} + for _, shard := range opts.FindAllShards() { + repositories, _, err := index.ReadMetadataPathAlive(shard) + if err != nil { + t.Fatalf("ReadMetadataPathAlive(%q): %v", shard, err) + } + for _, repository := range repositories { + if repository.ID != combinedBranchRepoID { + continue + } + for path := range repository.FileTombstones { + got[path] = struct{}{} + } + } + } + + for _, path := range wantPaths { + if _, ok := got[path]; !ok { + t.Fatalf("missing expected tombstone for %q; all tombstones: %v", path, combinedBranchMapKeys(got)) + } + } +} + +type combinedBranchSearchHit struct { + FileName string + Branches []string + Version string + Content string +} + +func combinedBranchSearch(t *testing.T, indexDir string, q query.Q) []combinedBranchSearchHit { + t.Helper() + + searcher, err := search.NewDirectorySearcher(indexDir) + if err != nil { + t.Fatalf("NewDirectorySearcher(%s): %v", indexDir, err) + } + defer searcher.Close() + + result, err := searcher.Search(context.Background(), q, &zoekt.SearchOptions{ + Whole: true, + ShardMaxMatchCount: 1000, + TotalMaxMatchCount: 1000, + MaxDocDisplayCount: 1000, + MaxMatchDisplayCount: 1000, + }) + if err != nil { + t.Fatalf("Search(%s): %v", q, err) + } + + hits := make([]combinedBranchSearchHit, 0, len(result.Files)) + for _, file := range result.Files { + branches := append([]string(nil), file.Branches...) + sort.Strings(branches) + hits = append(hits, combinedBranchSearchHit{ + FileName: file.FileName, + Branches: branches, + Version: file.Version, + Content: string(file.Content), + }) + } + sort.Slice(hits, func(i, j int) bool { + if hits[i].FileName != hits[j].FileName { + return hits[i].FileName < hits[j].FileName + } + if hits[i].Content != hits[j].Content { + return hits[i].Content < hits[j].Content + } + if hits[i].Version != hits[j].Version { + return hits[i].Version < hits[j].Version + } + return strings.Join(hits[i].Branches, "\x00") < strings.Join(hits[j].Branches, "\x00") + }) + return hits +} + +func combinedBranchFileNames(hits []combinedBranchSearchHit) []string { + names := make([]string, 0, len(hits)) + for _, hit := range hits { + names = append(names, hit.FileName) + } + sort.Strings(names) + return names +} + +func combinedBranchIndexedRepository(t *testing.T, indexDir string) *zoekt.Repository { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + ID: combinedBranchRepoID, + Name: combinedBranchRepoName, + }, + } + repo, _, ok, err := opts.FindRepositoryMetadata() + if err != nil { + t.Fatalf("FindRepositoryMetadata(%s): %v", indexDir, err) + } + if !ok { + t.Fatalf("FindRepositoryMetadata(%s): repository not found", indexDir) + } + return repo +} + +func combinedBranchCheckoutWriteCommit(t *testing.T, repoDir, branch string, files combinedBranchFiles, message string) { + t.Helper() + + combinedBranchRunGit(t, repoDir, "checkout", branch) + combinedBranchWriteFiles(t, repoDir, files) + combinedBranchRunGit(t, repoDir, "add", "-A") + combinedBranchRunGit(t, repoDir, "commit", "-m", message) +} + +func combinedBranchCheckoutNewBranchWriteCommit(t *testing.T, repoDir, branch, startPoint string, files combinedBranchFiles, message string) { + t.Helper() + + combinedBranchRunGit(t, repoDir, "checkout", "-B", branch, startPoint) + combinedBranchWriteFiles(t, repoDir, files) + combinedBranchRunGit(t, repoDir, "add", "-A") + combinedBranchRunGit(t, repoDir, "commit", "-m", message) +} + +func combinedBranchWriteFiles(t *testing.T, repoDir string, files combinedBranchFiles) { + t.Helper() + + for name, content := range files { + path := filepath.Join(repoDir, name) + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(path), err) + } + if err := os.WriteFile(path, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", path, err) + } + } +} + +func combinedBranchRenameBranch(t *testing.T, repoDir, oldBranch, newBranch string) { + t.Helper() + combinedBranchRunGit(t, repoDir, "branch", "-m", oldBranch, newBranch) +} + +func combinedBranchDeleteBranch(t *testing.T, repoDir, branch string) { + t.Helper() + combinedBranchRunGit(t, repoDir, "branch", "-D", branch) +} + +func combinedBranchRunGit(t *testing.T, cwd string, args ...string) { + t.Helper() + + if err := os.MkdirAll(cwd, 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", cwd, err) + } + + cmd := exec.Command("git", args...) + cmd.Dir = cwd + cmd.Env = append(os.Environ(), + "GIT_CONFIG_GLOBAL=", + "GIT_CONFIG_SYSTEM=", + "GIT_COMMITTER_NAME=Kierkegaard", + "GIT_COMMITTER_EMAIL=soren@apache.com", + "GIT_AUTHOR_NAME=Kierkegaard", + "GIT_AUTHOR_EMAIL=soren@apache.com", + ) + out, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("git %s failed: %v\n%s", strings.Join(args, " "), err, out) + } +} + +func combinedBranchNames(branches []zoekt.RepositoryBranch) []string { + names := make([]string, 0, len(branches)) + for _, branch := range branches { + names = append(names, branch.Name) + } + return names +} + +func combinedBranchUnion(lists ...[]string) []string { + seen := map[string]struct{}{} + var union []string + for _, list := range lists { + for _, item := range list { + if _, ok := seen[item]; ok { + continue + } + seen[item] = struct{}{} + union = append(union, item) + } + } + return union +} + +func combinedBranchSet(items []string) map[string]bool { + set := make(map[string]bool, len(items)) + for _, item := range items { + set[item] = true + } + return set +} + +func combinedBranchContains(items []string, want string) bool { + for _, item := range items { + if item == want { + return true + } + } + return false +} + +func combinedBranchMapKeys(m map[string]struct{}) []string { + keys := make([]string, 0, len(m)) + for key := range m { + keys = append(keys, key) + } + sort.Strings(keys) + return keys +} diff --git a/gitindex/delta_multibranch_existing_test.go b/gitindex/delta_multibranch_existing_test.go new file mode 100644 index 000000000..5ee9808fc --- /dev/null +++ b/gitindex/delta_multibranch_existing_test.go @@ -0,0 +1,434 @@ +package gitindex + +import ( + "context" + "fmt" + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + "github.com/google/go-cmp/cmp" + "github.com/sourcegraph/zoekt" + "github.com/sourcegraph/zoekt/index" + "github.com/sourcegraph/zoekt/query" + "github.com/sourcegraph/zoekt/search" + "os" + "path/filepath" + "sort" + "strings" + "testing" +) + +// ---- delta_multibranch_baseline_test.go ---- + +func TestIndexGitRepo_MultiBranchDeltaBaselineExistingBehaviorControls(t *testing.T) { + t.Parallel() + + const ( + repoName = "repository" + repoID = 707 + ) + + branches := []string{"main", "release"} + + for _, tc := range []struct { + name string + initialMain map[string]string + initialRelease map[string]string + finalMain map[string]string + tombstones []string + checks []baselineSearchCheck + }{ + { + name: "changed path on one branch", + initialMain: map[string]string{ + "shared.txt": "baseline-shared-old\n", + }, + initialRelease: map[string]string{ + "shared.txt": "baseline-shared-old\n", + }, + finalMain: map[string]string{ + "shared.txt": "baseline-main-new\n", + }, + tombstones: []string{"shared.txt"}, + checks: []baselineSearchCheck{ + {pattern: "baseline-main-new", wantFiles: []string{"shared.txt"}}, + {branch: "main", pattern: "baseline-main-new", wantFiles: []string{"shared.txt"}}, + {branch: "release", pattern: "baseline-main-new"}, + {pattern: "baseline-shared-old", wantFiles: []string{"shared.txt"}}, + {branch: "main", pattern: "baseline-shared-old"}, + {branch: "release", pattern: "baseline-shared-old", wantFiles: []string{"shared.txt"}}, + }, + }, + { + name: "deletion on one branch while another keeps the path", + initialMain: map[string]string{ + "shared.txt": "baseline-delete-kept\n", + }, + initialRelease: map[string]string{ + "shared.txt": "baseline-delete-kept\n", + }, + finalMain: map[string]string{}, + tombstones: []string{"shared.txt"}, + checks: []baselineSearchCheck{ + {pattern: "baseline-delete-kept", wantFiles: []string{"shared.txt"}}, + {branch: "main", pattern: "baseline-delete-kept"}, + {branch: "release", pattern: "baseline-delete-kept", wantFiles: []string{"shared.txt"}}, + }, + }, + { + name: "file becomes identical across branches", + initialMain: map[string]string{ + "a.txt": "baseline-identical-alpha\n", + }, + initialRelease: map[string]string{ + "a.txt": "baseline-identical-beta\n", + }, + finalMain: map[string]string{ + "a.txt": "baseline-identical-beta\n", + }, + tombstones: []string{"a.txt"}, + checks: []baselineSearchCheck{ + {pattern: "baseline-identical-beta", wantFiles: []string{"a.txt"}}, + {branch: "main", pattern: "baseline-identical-beta", wantFiles: []string{"a.txt"}}, + {branch: "release", pattern: "baseline-identical-beta", wantFiles: []string{"a.txt"}}, + {pattern: "baseline-identical-alpha"}, + {branch: "main", pattern: "baseline-identical-alpha"}, + {branch: "release", pattern: "baseline-identical-alpha"}, + }, + }, + { + name: "file diverges from shared content", + initialMain: map[string]string{ + "a.txt": "baseline-shared-same\n", + }, + initialRelease: map[string]string{ + "a.txt": "baseline-shared-same\n", + }, + finalMain: map[string]string{ + "a.txt": "baseline-diverged-main\n", + }, + tombstones: []string{"a.txt"}, + checks: []baselineSearchCheck{ + {pattern: "baseline-diverged-main", wantFiles: []string{"a.txt"}}, + {branch: "main", pattern: "baseline-diverged-main", wantFiles: []string{"a.txt"}}, + {branch: "release", pattern: "baseline-diverged-main"}, + {pattern: "baseline-shared-same", wantFiles: []string{"a.txt"}}, + {branch: "main", pattern: "baseline-shared-same"}, + {branch: "release", pattern: "baseline-shared-same", wantFiles: []string{"a.txt"}}, + }, + }, + } { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + repositoryDir := t.TempDir() + baselineInitTwoBranchRepo(t, repositoryDir, tc.initialMain, tc.initialRelease) + + indexDir := t.TempDir() + opts := baselineIndexOptions(repositoryDir, indexDir, repoName, repoID, branches) + if _, err := IndexGitRepo(opts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + oldShards := baselineFindAllShards(t, opts.BuildOptions) + baselineReplaceBranchFilesAndCommit(t, repositoryDir, "main", tc.finalMain, "final main") + + deltaOpts := opts + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled := baselineIndexGitRepoWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if normalBuildCalled { + t.Fatal("expected exact same multi-branch set to use delta without normal-build fallback") + } + + cleanIndexDir := baselineCleanFullRebuild(t, opts) + baselineAssertRepositoryBranchesMatchClean(t, indexDir, cleanIndexDir, repoName, repoID, branches) + baselineAssertOldShardTombstones(t, oldShards, repoID, tc.tombstones) + + for _, check := range tc.checks { + baselineAssertSearchCheck(t, indexDir, cleanIndexDir, repoName, repoID, check) + } + }) + } +} + +type baselineSearchCheck struct { + branch string + pattern string + wantFiles []string +} + +type baselineSearchHit struct { + FileName string + Content string + Branches []string +} + +func baselineInitTwoBranchRepo(t *testing.T, repositoryDir string, mainFiles, releaseFiles map[string]string) { + t.Helper() + + runGit(t, repositoryDir, "init", "-b", "main") + baselineReplaceCurrentBranchFilesAndCommit(t, repositoryDir, mainFiles, "initial main") + runGit(t, repositoryDir, "checkout", "-b", "release") + baselineReplaceCurrentBranchFilesAndCommit(t, repositoryDir, releaseFiles, "initial release") + runGit(t, repositoryDir, "checkout", "main") +} + +func baselineReplaceBranchFilesAndCommit(t *testing.T, repositoryDir, branch string, files map[string]string, message string) { + t.Helper() + + runGit(t, repositoryDir, "checkout", branch) + baselineReplaceCurrentBranchFilesAndCommit(t, repositoryDir, files, message) +} + +func baselineReplaceCurrentBranchFilesAndCommit(t *testing.T, repositoryDir string, files map[string]string, message string) { + t.Helper() + + runGit(t, repositoryDir, "rm", "-r", "--ignore-unmatch", ".") + for name, content := range files { + path := filepath.Join(repositoryDir, name) + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(path), err) + } + if err := os.WriteFile(path, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", path, err) + } + } + runGit(t, repositoryDir, "add", "-A") + runGit(t, repositoryDir, "commit", "--allow-empty", "-m", message) +} + +func baselineIndexOptions(repositoryDir, indexDir, repoName string, repoID uint32, branches []string) Options { + return Options{ + RepoDir: filepath.Join(repositoryDir, ".git"), + Branches: append([]string(nil), branches...), + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{ + ID: repoID, + Name: repoName, + }, + IndexDir: indexDir, + DisableCTags: true, + }, + } +} + +func baselineIndexGitRepoWithPrepareSpies(t *testing.T, opts Options) (deltaBuildCalled, normalBuildCalled bool) { + t.Helper() + + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + if _, err := indexGitRepo(opts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }); err != nil { + t.Fatalf("IndexGitRepo: %v", err) + } + + return deltaBuildCalled, normalBuildCalled +} + +func baselineCleanFullRebuild(t *testing.T, opts Options) string { + t.Helper() + + cleanIndexDir := t.TempDir() + cleanOpts := opts + cleanOpts.BuildOptions.IndexDir = cleanIndexDir + cleanOpts.BuildOptions.IsDelta = false + cleanOpts.BuildOptions.RepositoryDescription.DeltaStats = nil + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean IndexGitRepo: %v", err) + } + return cleanIndexDir +} + +func baselineAssertRepositoryBranchesMatchClean(t *testing.T, deltaIndexDir, cleanIndexDir, repoName string, repoID uint32, wantBranches []string) { + t.Helper() + + deltaRepo := baselineIndexedRepository(t, deltaIndexDir, repoName, repoID) + cleanRepo := baselineIndexedRepository(t, cleanIndexDir, repoName, repoID) + + if got := baselineRepositoryBranchNames(deltaRepo.Branches); !cmp.Equal(got, wantBranches) { + t.Fatalf("delta branch names mismatch (-want +got):\n%s", cmp.Diff(wantBranches, got)) + } + if diff := cmp.Diff(cleanRepo.Branches, deltaRepo.Branches); diff != "" { + t.Fatalf("delta branch metadata differs from clean rebuild (-clean +delta):\n%s", diff) + } +} + +func baselineAssertOldShardTombstones(t *testing.T, oldShards []string, repoID uint32, wantPaths []string) { + t.Helper() + + for _, shard := range oldShards { + repositories, _, err := index.ReadMetadataPathAlive(shard) + if err != nil { + t.Fatalf("ReadMetadataPathAlive(%q): %v", shard, err) + } + + var repo *zoekt.Repository + for _, candidate := range repositories { + if candidate.ID == repoID { + repo = candidate + break + } + } + if repo == nil { + t.Fatalf("old shard %q no longer has alive repo ID %d metadata", shard, repoID) + } + + for _, path := range wantPaths { + if _, ok := repo.FileTombstones[path]; !ok { + t.Fatalf("old shard %q missing file tombstone %q in %+v", shard, path, repo.FileTombstones) + } + } + } +} + +func baselineAssertSearchCheck(t *testing.T, deltaIndexDir, cleanIndexDir, repoName string, repoID uint32, check baselineSearchCheck) { + t.Helper() + + deltaRepo := baselineIndexedRepository(t, deltaIndexDir, repoName, repoID) + cleanRepo := baselineIndexedRepository(t, cleanIndexDir, repoName, repoID) + + if check.branch == "" { + deltaHits := baselineSearchHits(t, deltaIndexDir, &query.Substring{Pattern: check.pattern}) + cleanHits := baselineSearchHits(t, cleanIndexDir, &query.Substring{Pattern: check.pattern}) + baselineAssertHits(t, "unfiltered "+check.pattern, deltaHits, cleanHits, check.wantFiles) + return + } + + parsedBranchQuery := baselineParseBranchQuery(t, check.branch, check.pattern) + deltaHits := baselineSearchHits(t, deltaIndexDir, parsedBranchQuery) + cleanHits := baselineSearchHits(t, cleanIndexDir, parsedBranchQuery) + baselineAssertHits(t, fmt.Sprintf("branch:%s %s", check.branch, check.pattern), deltaHits, cleanHits, check.wantFiles) + + deltaBranchesReposQuery := query.NewAnd( + query.NewSingleBranchesRepos(check.branch, deltaRepo.ID), + &query.Substring{Pattern: check.pattern}, + ) + cleanBranchesReposQuery := query.NewAnd( + query.NewSingleBranchesRepos(check.branch, cleanRepo.ID), + &query.Substring{Pattern: check.pattern}, + ) + deltaHits = baselineSearchHits(t, deltaIndexDir, deltaBranchesReposQuery) + cleanHits = baselineSearchHits(t, cleanIndexDir, cleanBranchesReposQuery) + baselineAssertHits(t, fmt.Sprintf("BranchesRepos(%s) %s", check.branch, check.pattern), deltaHits, cleanHits, check.wantFiles) +} + +func baselineParseBranchQuery(t *testing.T, branch, pattern string) query.Q { + t.Helper() + + q, err := query.Parse(fmt.Sprintf("branch:%s %s", branch, pattern)) + if err != nil { + t.Fatalf("query.Parse(branch:%s %s): %v", branch, pattern, err) + } + return q +} + +func baselineAssertHits(t *testing.T, label string, deltaHits, cleanHits []baselineSearchHit, wantFiles []string) { + t.Helper() + + if diff := cmp.Diff(cleanHits, deltaHits); diff != "" { + t.Fatalf("%s search differs from clean full rebuild (-clean +delta):\n%s", label, diff) + } + wantFiles = append([]string(nil), wantFiles...) + if wantFiles == nil { + wantFiles = []string{} + } + if got := baselineSearchHitFileNames(deltaHits); !cmp.Equal(got, wantFiles) { + t.Fatalf("%s file names mismatch (-want +got):\n%s", label, cmp.Diff(wantFiles, got)) + } +} + +func baselineSearchHits(t *testing.T, indexDir string, q query.Q) []baselineSearchHit { + t.Helper() + + searcher, err := search.NewDirectorySearcher(indexDir) + if err != nil { + t.Fatalf("NewDirectorySearcher(%q): %v", indexDir, err) + } + defer searcher.Close() + + result, err := searcher.Search(context.Background(), q, &zoekt.SearchOptions{Whole: true}) + if err != nil { + t.Fatalf("Search(%s): %v", q.String(), err) + } + + hits := make([]baselineSearchHit, 0, len(result.Files)) + for _, file := range result.Files { + branches := append([]string(nil), file.Branches...) + sort.Strings(branches) + hits = append(hits, baselineSearchHit{ + FileName: file.FileName, + Content: string(file.Content), + Branches: branches, + }) + } + sort.Slice(hits, func(i, j int) bool { + if hits[i].FileName != hits[j].FileName { + return hits[i].FileName < hits[j].FileName + } + if hits[i].Content != hits[j].Content { + return hits[i].Content < hits[j].Content + } + return strings.Join(hits[i].Branches, "\x00") < strings.Join(hits[j].Branches, "\x00") + }) + return hits +} + +func baselineSearchHitFileNames(hits []baselineSearchHit) []string { + names := make([]string, 0, len(hits)) + for _, hit := range hits { + names = append(names, hit.FileName) + } + sort.Strings(names) + return names +} + +func baselineFindAllShards(t *testing.T, opts index.Options) []string { + t.Helper() + + shards := opts.FindAllShards() + sort.Strings(shards) + if len(shards) == 0 { + t.Fatal("expected initial full build to write at least one shard") + } + return shards +} + +func baselineIndexedRepository(t *testing.T, indexDir, repoName string, repoID uint32) *zoekt.Repository { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + ID: repoID, + Name: repoName, + }, + } + repo, _, ok, err := opts.FindRepositoryMetadata() + if err != nil { + t.Fatalf("FindRepositoryMetadata(%q): %v", repoName, err) + } + if !ok { + t.Fatalf("FindRepositoryMetadata(%q): not found", repoName) + } + return repo +} + +func baselineRepositoryBranchNames(branches []zoekt.RepositoryBranch) []string { + names := make([]string, 0, len(branches)) + for _, branch := range branches { + names = append(names, branch.Name) + } + return names +} diff --git a/gitindex/delta_multibranch_head_query_test.go b/gitindex/delta_multibranch_head_query_test.go new file mode 100644 index 000000000..6288549cc --- /dev/null +++ b/gitindex/delta_multibranch_head_query_test.go @@ -0,0 +1,939 @@ +package gitindex + +import ( + "context" + "github.com/RoaringBitmap/roaring" + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + "github.com/google/go-cmp/cmp" + "github.com/sourcegraph/zoekt" + "github.com/sourcegraph/zoekt/index" + "github.com/sourcegraph/zoekt/query" + "github.com/sourcegraph/zoekt/search" + "os" + "os/exec" + "path/filepath" + "regexp/syntax" + "sort" + "strings" + "testing" +) + +// ---- delta_multibranch_head_worktree_test.go ---- + +const ( + headWorktreeRepoName = "head-worktree-repository" + headWorktreeRepoID = 9701 + + headWorktreeLinkedRepoAName = "head-worktree-linked-a" + headWorktreeLinkedRepoAID = 9702 + headWorktreeLinkedRepoBName = "head-worktree-linked-b" + headWorktreeLinkedRepoBID = 9703 +) + +func TestIndexGitRepo_DeltaHeadWorktreeSingleHEADResolvesBranchSwitch(t *testing.T) { + t.Parallel() + + repoDir := headWorktreeInitRepo(t) + indexDir := t.TempDir() + + initialOpts := headWorktreeOptions(repoDir, indexDir, headWorktreeRepoName, headWorktreeRepoID, []string{"HEAD"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + headWorktreeRenameCurrentBranch(t, repoDir, "feature-a", "feature-b") + headWorktreeReplaceFilesAndCommit(t, repoDir, map[string]string{ + "branch.txt": "feature-b-single-needle\n", + }, "feature-b single") + + deltaOpts := headWorktreeOptions(repoDir, indexDir, headWorktreeRepoName, headWorktreeRepoID, []string{"HEAD"}) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled := headWorktreeIndexWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if normalBuildCalled { + t.Fatal("expected resolved HEAD feature-a -> feature-b update to stay on the delta path") + } + + cleanIndexDir := headWorktreeCleanFullRebuild(t, deltaOpts) + headWorktreeAssertBranchMetadataMatchesClean(t, indexDir, cleanIndexDir, headWorktreeRepoName, headWorktreeRepoID, []string{"feature-b"}) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "feature-b branch content", + branch: "feature-b", + pattern: "feature-b-single-needle", + wantFiles: []string{"branch.txt"}, + }) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "feature-a old branch absent", + branch: "feature-a", + pattern: "feature-a-initial-needle", + }) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "HEAD branch name not stored after resolution", + branch: "HEAD", + pattern: "feature-b-single-needle", + }) +} + +func TestIndexGitRepo_DeltaHeadWorktreeMultiBranchHEADAndReleaseSwitchesHEADBranch(t *testing.T) { + t.Parallel() + + repoDir := headWorktreeInitRepo(t) + indexDir := t.TempDir() + + initialOpts := headWorktreeOptions(repoDir, indexDir, headWorktreeRepoName, headWorktreeRepoID, []string{"HEAD", "release"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + headWorktreeRenameCurrentBranch(t, repoDir, "feature-a", "feature-b") + headWorktreeReplaceFilesAndCommit(t, repoDir, map[string]string{ + "branch.txt": "feature-b-multi-needle\n", + }, "feature-b multi") + + deltaOpts := headWorktreeOptions(repoDir, indexDir, headWorktreeRepoName, headWorktreeRepoID, []string{"HEAD", "release"}) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled := headWorktreeIndexWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if normalBuildCalled { + t.Fatal("expected resolved HEAD feature-a -> feature-b with release to stay on the delta path") + } + + cleanIndexDir := headWorktreeCleanFullRebuild(t, deltaOpts) + headWorktreeAssertBranchMetadataMatchesClean(t, indexDir, cleanIndexDir, headWorktreeRepoName, headWorktreeRepoID, []string{"feature-b", "release"}) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "feature-b branch content", + branch: "feature-b", + pattern: "feature-b-multi-needle", + wantFiles: []string{"branch.txt"}, + }) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "release branch preserved", + branch: "release", + pattern: "release-initial-needle", + wantFiles: []string{"release.txt"}, + }) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "feature-a old branch absent", + branch: "feature-a", + pattern: "feature-a-initial-needle", + }) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "HEAD branch name not stored after resolution", + branch: "HEAD", + pattern: "feature-b-multi-needle", + }) +} + +func TestIndexGitRepo_DeltaHeadWorktreeMultiBranchHEADResolvingToDuplicateReleaseFallsBack(t *testing.T) { + t.Parallel() + + repoDir := headWorktreeInitRepo(t) + indexDir := t.TempDir() + + initialOpts := headWorktreeOptions(repoDir, indexDir, headWorktreeRepoName, headWorktreeRepoID, []string{"HEAD", "release"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + headWorktreeRunGit(t, repoDir, "checkout", "release") + + deltaOpts := headWorktreeOptions(repoDir, indexDir, headWorktreeRepoName, headWorktreeRepoID, []string{"HEAD", "release"}) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled := headWorktreeIndexWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted before conservative fallback") + } + if !normalBuildCalled { + t.Fatal("expected HEAD resolving to duplicate release branch to fall back to a normal rebuild") + } + + cleanIndexDir := headWorktreeCleanFullRebuild(t, deltaOpts) + headWorktreeAssertBranchMetadataMatchesClean(t, indexDir, cleanIndexDir, headWorktreeRepoName, headWorktreeRepoID, []string{"release"}) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "release content after duplicate HEAD dedupe", + branch: "release", + pattern: "release-initial-needle", + wantFiles: []string{"release.txt"}, + }) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "duplicate HEAD branch name not stored", + branch: "HEAD", + pattern: "release-initial-needle", + }) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "feature-a branch removed after fallback", + branch: "feature-a", + pattern: "feature-a-initial-needle", + }) +} + +func TestIndexGitRepo_DeltaHeadWorktreeDetachedHEADInMultiBranchIndexFallsBack(t *testing.T) { + t.Parallel() + + repoDir := headWorktreeInitRepo(t) + indexDir := t.TempDir() + + initialOpts := headWorktreeOptions(repoDir, indexDir, headWorktreeRepoName, headWorktreeRepoID, []string{"HEAD", "release"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + headWorktreeRunGit(t, repoDir, "checkout", "--detach", "feature-a") + headWorktreeReplaceFilesAndCommit(t, repoDir, map[string]string{ + "branch.txt": "detached-head-needle\n", + }, "detached head") + + deltaOpts := headWorktreeOptions(repoDir, indexDir, headWorktreeRepoName, headWorktreeRepoID, []string{"HEAD", "release"}) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled := headWorktreeIndexWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted before conservative fallback") + } + if !normalBuildCalled { + t.Fatal("expected detached HEAD in a multi-branch index to fall back to a normal rebuild") + } + + cleanIndexDir := headWorktreeCleanFullRebuild(t, deltaOpts) + headWorktreeAssertBranchMetadataMatchesClean(t, indexDir, cleanIndexDir, headWorktreeRepoName, headWorktreeRepoID, []string{"HEAD", "release"}) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "detached HEAD content remains queryable as HEAD", + branch: "HEAD", + pattern: "detached-head-needle", + wantFiles: []string{"branch.txt"}, + }) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "release branch preserved", + branch: "release", + pattern: "release-initial-needle", + wantFiles: []string{"release.txt"}, + }) + headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ + name: "feature-a branch name not kept for detached HEAD", + branch: "feature-a", + pattern: "feature-a-initial-needle", + }) +} + +func TestIndexGitRepo_HeadWorktreeTwoLinkedWorktreesSameIndexDirKeepResolvedIdentities(t *testing.T) { + t.Parallel() + + _, worktreeA, worktreeB := headWorktreeInitLinkedWorktrees(t) + indexDir := t.TempDir() + + optsA := headWorktreeOptions(worktreeA, indexDir, headWorktreeLinkedRepoAName, headWorktreeLinkedRepoAID, []string{"HEAD"}) + if _, err := IndexGitRepo(optsA); err != nil { + t.Fatalf("IndexGitRepo(worktree A): %v", err) + } + optsB := headWorktreeOptions(worktreeB, indexDir, headWorktreeLinkedRepoBName, headWorktreeLinkedRepoBID, []string{"HEAD"}) + if _, err := IndexGitRepo(optsB); err != nil { + t.Fatalf("IndexGitRepo(worktree B): %v", err) + } + + headWorktreeAssertBranchMetadata(t, indexDir, headWorktreeLinkedRepoAName, headWorktreeLinkedRepoAID, []string{"feature-a"}) + headWorktreeAssertBranchMetadata(t, indexDir, headWorktreeLinkedRepoBName, headWorktreeLinkedRepoBID, []string{"feature-b"}) + + headWorktreeAssertSearchFileNames(t, indexDir, "worktree A feature-a lookup", + query.NewAnd(query.NewSingleBranchesRepos("feature-a", headWorktreeLinkedRepoAID), headWorktreeContentQuery("worktree-a-needle")), + []string{"a.txt"}) + headWorktreeAssertSearchFileNames(t, indexDir, "worktree B feature-b lookup", + query.NewAnd(query.NewSingleBranchesRepos("feature-b", headWorktreeLinkedRepoBID), headWorktreeContentQuery("worktree-b-needle")), + []string{"b.txt"}) + headWorktreeAssertSearchFileNames(t, indexDir, "worktree A not conflated with feature-b", + query.NewAnd(query.NewSingleBranchesRepos("feature-b", headWorktreeLinkedRepoAID), &query.Const{Value: true}), + nil) + headWorktreeAssertSearchFileNames(t, indexDir, "worktree B not conflated with feature-a", + query.NewAnd(query.NewSingleBranchesRepos("feature-a", headWorktreeLinkedRepoBID), &query.Const{Value: true}), + nil) + headWorktreeAssertSearchFileNames(t, indexDir, "worktree A does not store HEAD", + query.NewAnd(query.NewSingleBranchesRepos("HEAD", headWorktreeLinkedRepoAID), &query.Const{Value: true}), + nil) + headWorktreeAssertSearchFileNames(t, indexDir, "worktree B does not store HEAD", + query.NewAnd(query.NewSingleBranchesRepos("HEAD", headWorktreeLinkedRepoBID), &query.Const{Value: true}), + nil) +} + +type headWorktreeLookup struct { + name string + branch string + pattern string + wantFiles []string +} + +type headWorktreeSearchHit struct { + Repository string + RepositoryID uint32 + FileName string + Branches []string + Version string + Content string +} + +func headWorktreeInitRepo(t *testing.T) string { + t.Helper() + + repoDir := t.TempDir() + headWorktreeRunGit(t, repoDir, "init", "-b", "feature-a") + headWorktreeReplaceFilesAndCommit(t, repoDir, map[string]string{ + "branch.txt": "feature-a-initial-needle\n", + }, "initial feature-a") + + headWorktreeRunGit(t, repoDir, "checkout", "-B", "release") + headWorktreeReplaceFilesAndCommit(t, repoDir, map[string]string{ + "release.txt": "release-initial-needle\n", + }, "initial release") + + headWorktreeRunGit(t, repoDir, "checkout", "feature-a") + return repoDir +} + +func headWorktreeInitLinkedWorktrees(t *testing.T) (repoDir, worktreeA, worktreeB string) { + t.Helper() + + root := t.TempDir() + repoDir = filepath.Join(root, "repo") + headWorktreeRunGit(t, root, "init", "-b", "main", "repo") + headWorktreeReplaceFilesAndCommit(t, repoDir, map[string]string{ + "seed.txt": "seed\n", + }, "seed") + + headWorktreeRunGit(t, repoDir, "checkout", "-B", "feature-a", "main") + headWorktreeReplaceFilesAndCommit(t, repoDir, map[string]string{ + "a.txt": "worktree-a-needle\n", + }, "feature-a") + + headWorktreeRunGit(t, repoDir, "checkout", "-B", "feature-b", "main") + headWorktreeReplaceFilesAndCommit(t, repoDir, map[string]string{ + "b.txt": "worktree-b-needle\n", + }, "feature-b") + + headWorktreeRunGit(t, repoDir, "checkout", "main") + worktreeA = filepath.Join(root, "worktree-a") + worktreeB = filepath.Join(root, "worktree-b") + headWorktreeRunGit(t, repoDir, "worktree", "add", worktreeA, "feature-a") + headWorktreeRunGit(t, repoDir, "worktree", "add", worktreeB, "feature-b") + return repoDir, worktreeA, worktreeB +} + +func headWorktreeOptions(repoDir, indexDir, repoName string, repoID uint32, branches []string) Options { + return Options{ + RepoDir: repoDir, + Branches: append([]string(nil), branches...), + ResolveHEADToBranch: true, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{ + ID: repoID, + Name: repoName, + }, + IndexDir: indexDir, + DisableCTags: true, + }, + } +} + +func headWorktreeIndexWithPrepareSpies(t *testing.T, opts Options) (deltaBuildCalled, normalBuildCalled bool) { + t.Helper() + + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + if _, err := indexGitRepo(opts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }); err != nil { + t.Fatalf("IndexGitRepo: %v", err) + } + + return deltaBuildCalled, normalBuildCalled +} + +func headWorktreeCleanFullRebuild(t *testing.T, opts Options) string { + t.Helper() + + cleanIndexDir := t.TempDir() + cleanOpts := opts + cleanOpts.BuildOptions.IndexDir = cleanIndexDir + cleanOpts.BuildOptions.IsDelta = false + cleanOpts.BuildOptions.RepositoryDescription.DeltaStats = nil + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean full IndexGitRepo: %v", err) + } + return cleanIndexDir +} + +func headWorktreeReplaceFilesAndCommit(t *testing.T, repoDir string, files map[string]string, message string) { + t.Helper() + + headWorktreeRunGit(t, repoDir, "rm", "-r", "--ignore-unmatch", ".") + for name, content := range files { + path := filepath.Join(repoDir, name) + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(path), err) + } + if err := os.WriteFile(path, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", path, err) + } + } + headWorktreeRunGit(t, repoDir, "add", "-A") + headWorktreeRunGit(t, repoDir, "commit", "--allow-empty", "-m", message) +} + +func headWorktreeRenameCurrentBranch(t *testing.T, repoDir, oldBranch, newBranch string) { + t.Helper() + headWorktreeRunGit(t, repoDir, "branch", "-m", oldBranch, newBranch) +} + +func headWorktreeAssertBranchMetadataMatchesClean(t *testing.T, indexDir, cleanIndexDir, repoName string, repoID uint32, wantBranches []string) { + t.Helper() + + headWorktreeAssertBranchMetadata(t, indexDir, repoName, repoID, wantBranches) + deltaRepo := headWorktreeIndexedRepository(t, indexDir, repoName, repoID) + cleanRepo := headWorktreeIndexedRepository(t, cleanIndexDir, repoName, repoID) + if diff := cmp.Diff(cleanRepo.Branches, deltaRepo.Branches); diff != "" { + t.Fatalf("delta branch metadata differs from clean full rebuild (-want +got):\n%s", diff) + } +} + +func headWorktreeAssertBranchMetadata(t *testing.T, indexDir, repoName string, repoID uint32, wantBranches []string) { + t.Helper() + + repo := headWorktreeIndexedRepository(t, indexDir, repoName, repoID) + if diff := cmp.Diff(wantBranches, headWorktreeBranchNames(repo.Branches)); diff != "" { + t.Fatalf("indexed branch names mismatch (-want +got):\n%s", diff) + } +} + +func headWorktreeAssertLookup(t *testing.T, indexDir, cleanIndexDir string, repoID uint32, lookup headWorktreeLookup) { + t.Helper() + + base := headWorktreeContentQuery(lookup.pattern) + if lookup.branch == "" { + headWorktreeAssertQueryMatchesClean(t, indexDir, cleanIndexDir, lookup.name+"/unfiltered", base, lookup.wantFiles) + return + } + + branchQuery := query.NewAnd(&query.Branch{Pattern: lookup.branch, Exact: true}, base) + headWorktreeAssertQueryMatchesClean(t, indexDir, cleanIndexDir, lookup.name+"/branch", branchQuery, lookup.wantFiles) + + branchesReposQuery := query.NewAnd(query.NewSingleBranchesRepos(lookup.branch, repoID), base) + headWorktreeAssertQueryMatchesClean(t, indexDir, cleanIndexDir, lookup.name+"/branches-repos", branchesReposQuery, lookup.wantFiles) +} + +func headWorktreeAssertQueryMatchesClean(t *testing.T, indexDir, cleanIndexDir, label string, q query.Q, wantFiles []string) { + t.Helper() + + deltaHits := headWorktreeSearchHits(t, indexDir, q) + cleanHits := headWorktreeSearchHits(t, cleanIndexDir, q) + if diff := cmp.Diff(cleanHits, deltaHits); diff != "" { + t.Fatalf("%s: delta search results differ from clean full rebuild (-want +got):\n%s", label, diff) + } + + gotFiles := headWorktreeFileNames(deltaHits) + wantFiles = append([]string(nil), wantFiles...) + if wantFiles == nil { + wantFiles = []string{} + } + sort.Strings(wantFiles) + if diff := cmp.Diff(wantFiles, gotFiles); diff != "" { + t.Fatalf("%s: search file names mismatch (-want +got):\n%s", label, diff) + } +} + +func headWorktreeAssertSearchFileNames(t *testing.T, indexDir, label string, q query.Q, wantFiles []string) { + t.Helper() + + gotFiles := headWorktreeFileNames(headWorktreeSearchHits(t, indexDir, q)) + wantFiles = append([]string(nil), wantFiles...) + if wantFiles == nil { + wantFiles = []string{} + } + sort.Strings(wantFiles) + if diff := cmp.Diff(wantFiles, gotFiles); diff != "" { + t.Fatalf("%s: search file names mismatch (-want +got):\n%s", label, diff) + } +} + +func headWorktreeSearchHits(t *testing.T, indexDir string, q query.Q) []headWorktreeSearchHit { + t.Helper() + + searcher, err := search.NewDirectorySearcher(indexDir) + if err != nil { + t.Fatalf("NewDirectorySearcher(%q): %v", indexDir, err) + } + defer searcher.Close() + + result, err := searcher.Search(context.Background(), q, &zoekt.SearchOptions{ + Whole: true, + ShardMaxMatchCount: 1000, + TotalMaxMatchCount: 1000, + MaxDocDisplayCount: 1000, + MaxMatchDisplayCount: 1000, + }) + if err != nil { + t.Fatalf("Search(%s): %v", q, err) + } + + hits := make([]headWorktreeSearchHit, 0, len(result.Files)) + for _, file := range result.Files { + branches := append([]string(nil), file.Branches...) + sort.Strings(branches) + hits = append(hits, headWorktreeSearchHit{ + Repository: file.Repository, + RepositoryID: file.RepositoryID, + FileName: file.FileName, + Branches: branches, + Version: file.Version, + Content: string(file.Content), + }) + } + sort.Slice(hits, func(i, j int) bool { + if hits[i].RepositoryID != hits[j].RepositoryID { + return hits[i].RepositoryID < hits[j].RepositoryID + } + if hits[i].Repository != hits[j].Repository { + return hits[i].Repository < hits[j].Repository + } + if hits[i].FileName != hits[j].FileName { + return hits[i].FileName < hits[j].FileName + } + if hits[i].Content != hits[j].Content { + return hits[i].Content < hits[j].Content + } + if hits[i].Version != hits[j].Version { + return hits[i].Version < hits[j].Version + } + return strings.Join(hits[i].Branches, "\x00") < strings.Join(hits[j].Branches, "\x00") + }) + return hits +} + +func headWorktreeContentQuery(pattern string) query.Q { + if pattern == "" { + return &query.Const{Value: true} + } + return &query.Substring{Pattern: pattern, Content: true} +} + +func headWorktreeFileNames(hits []headWorktreeSearchHit) []string { + names := make([]string, 0, len(hits)) + for _, hit := range hits { + names = append(names, hit.FileName) + } + sort.Strings(names) + return names +} + +func headWorktreeIndexedRepository(t *testing.T, indexDir, repoName string, repoID uint32) *zoekt.Repository { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + ID: repoID, + Name: repoName, + }, + } + repo, _, ok, err := opts.FindRepositoryMetadata() + if err != nil { + t.Fatalf("FindRepositoryMetadata(%q): %v", repoName, err) + } + if !ok { + t.Fatalf("FindRepositoryMetadata(%q): repository not found", repoName) + } + return repo +} + +func headWorktreeBranchNames(branches []zoekt.RepositoryBranch) []string { + names := make([]string, 0, len(branches)) + for _, branch := range branches { + names = append(names, branch.Name) + } + return names +} + +func headWorktreeRunGit(t *testing.T, cwd string, args ...string) { + t.Helper() + + if err := os.MkdirAll(cwd, 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", cwd, err) + } + + cmd := exec.Command("git", args...) + cmd.Dir = cwd + cmd.Env = append(os.Environ(), + "GIT_CONFIG_GLOBAL=", + "GIT_CONFIG_SYSTEM=", + "GIT_COMMITTER_NAME=Simone Weil", + "GIT_COMMITTER_EMAIL=simone@apache.com", + "GIT_AUTHOR_NAME=Simone Weil", + "GIT_AUTHOR_EMAIL=simone@apache.com", + ) + out, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("git %s failed: %v\n%s", strings.Join(args, " "), err, out) + } +} + +// ---- delta_multibranch_query_surfaces_test.go ---- + +const ( + querySurfaceRepoName = "query-surface-repository" + querySurfaceRepoID = 91046 + + querySurfaceMain = "main" + querySurfaceOldFeature = "feature-a" + querySurfaceNewFeature = "feature-b" + querySurfaceOldRelease = "old-release" + querySurfaceNewRelease = "new-release" +) + +func TestIndexGitRepo_DeltaMultiBranchQuerySurfaces(t *testing.T) { + t.Parallel() + + repoDir := querySurfaceInitRepo(t) + indexDir := t.TempDir() + + initialBranches := []string{querySurfaceOldFeature, querySurfaceOldRelease, querySurfaceMain} + initialOpts := querySurfaceOptions(repoDir, indexDir, initialBranches) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + querySurfaceMutateRenameAddRemove(t, repoDir) + + finalBranches := []string{querySurfaceNewFeature, querySurfaceNewRelease, querySurfaceMain} + deltaOpts := querySurfaceOptions(repoDir, indexDir, finalBranches) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled := querySurfaceIndexGitRepoWithPrepareSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Error("expected query surface scenario to attempt a delta build") + } + if normalBuildCalled { + t.Error("expected query surface scenario to stay on the delta path, got normal-build fallback") + } + + cleanIndexDir := t.TempDir() + cleanOpts := querySurfaceOptions(repoDir, cleanIndexDir, finalBranches) + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean IndexGitRepo: %v", err) + } + + querySurfaceAssertRepositoryBranchesMatchClean(t, indexDir, cleanIndexDir, finalBranches) + + querySurfaces := []struct { + name string + q query.Q + wantFiles []string + }{ + { + name: "plain substring across all branches", + q: &query.Substring{Pattern: "query-surface-final-common"}, + wantFiles: []string{"new_release_surface.go", "renamed_surface.go"}, + }, + { + name: "parsed branch query for renamed branch", + q: querySurfaceParseQuery(t, "branch:"+querySurfaceNewFeature+" query-surface-renamed-only"), + wantFiles: []string{"renamed_surface.go"}, + }, + { + name: "parsed branch query for removed branch", + q: querySurfaceParseQuery(t, "branch:"+querySurfaceOldRelease+" query-surface-old-release-only"), + wantFiles: nil, + }, + { + name: "BranchesRepos single branch and repo ID", + q: query.NewAnd( + query.NewSingleBranchesRepos(querySurfaceNewRelease, querySurfaceRepoID), + &query.Substring{Pattern: "query-surface-new-release-only", Content: true}, + ), + wantFiles: []string{"new_release_surface.go"}, + }, + { + name: "BranchesRepos multiple branch/repo pairs", + q: query.NewAnd( + &query.BranchesRepos{List: []query.BranchRepos{ + {Branch: querySurfaceNewFeature, Repos: roaring.BitmapOf(querySurfaceRepoID)}, + {Branch: querySurfaceNewRelease, Repos: roaring.BitmapOf(querySurfaceRepoID)}, + }}, + &query.Substring{Pattern: "query-surface-final-common", Content: true}, + ), + wantFiles: []string{"new_release_surface.go", "renamed_surface.go"}, + }, + { + name: "file-name query after branch rename add remove", + q: &query.Substring{Pattern: "surface.go", FileName: true}, + wantFiles: []string{"main_surface.go", "new_release_surface.go", "renamed_surface.go"}, + }, + { + name: "regex content query after branch rename add remove", + q: querySurfaceRegexpContentQuery(t, `regex-surface-(renamed|added)-2026`), + wantFiles: []string{"new_release_surface.go", "renamed_surface.go"}, + }, + { + name: "case-sensitive content query after branch rename add remove", + q: &query.Substring{Pattern: "CaseSurfaceExact", Content: true, CaseSensitive: true}, + wantFiles: []string{"new_release_surface.go", "renamed_surface.go"}, + }, + { + name: "case-sensitive content query rejects lowercase", + q: &query.Substring{Pattern: "casesurfaceexact", Content: true, CaseSensitive: true}, + wantFiles: nil, + }, + } + + for _, surface := range querySurfaces { + t.Run(surface.name, func(t *testing.T) { + querySurfaceAssertQueryMatchesClean(t, indexDir, cleanIndexDir, surface.q, surface.wantFiles) + }) + } +} + +type querySurfaceSearchHit struct { + FileName string + Content string + Branches []string + Version string +} + +func querySurfaceInitRepo(t *testing.T) string { + t.Helper() + + repoDir := t.TempDir() + runGit(t, repoDir, "init", "-b", querySurfaceMain) + querySurfaceWriteAndCommitFile(t, repoDir, "main_surface.go", "package main\n\nconst mainSurface = \"query-surface-main-stable\"\n", "main surface") + + runGit(t, repoDir, "checkout", "-b", querySurfaceOldFeature) + querySurfaceWriteAndCommitFile(t, repoDir, "feature_surface.go", "package main\n\nconst featureSurface = \"query-surface-feature-old\"\nconst regexSurface = \"regex-surface-old-feature-2026\"\n", "feature surface") + + runGit(t, repoDir, "checkout", querySurfaceMain) + runGit(t, repoDir, "checkout", "-b", querySurfaceOldRelease) + querySurfaceWriteAndCommitFile(t, repoDir, "old_release_surface.go", "package main\n\nconst oldReleaseSurface = \"query-surface-old-release-only\"\nconst caseSurface = \"CaseSurfaceExact\"\n", "old release surface") + + runGit(t, repoDir, "checkout", querySurfaceMain) + return repoDir +} + +func querySurfaceMutateRenameAddRemove(t *testing.T, repoDir string) { + t.Helper() + + runGit(t, repoDir, "checkout", querySurfaceOldFeature) + runGit(t, repoDir, "branch", "-m", querySurfaceNewFeature) + if err := os.Remove(filepath.Join(repoDir, "feature_surface.go")); err != nil { + t.Fatalf("Remove feature_surface.go: %v", err) + } + querySurfaceWriteAndCommitFile(t, repoDir, "renamed_surface.go", strings.Join([]string{ + "package main", + "", + "const renamedSurface = \"query-surface-renamed-only\"", + "const finalCommonSurface = \"query-surface-final-common\"", + "const regexSurface = \"regex-surface-renamed-2026\"", + "const caseSurface = \"CaseSurfaceExact\"", + "", + }, "\n"), "rename feature surface") + + runGit(t, repoDir, "checkout", querySurfaceMain) + runGit(t, repoDir, "checkout", "-b", querySurfaceNewRelease) + querySurfaceWriteAndCommitFile(t, repoDir, "new_release_surface.go", strings.Join([]string{ + "package main", + "", + "const newReleaseSurface = \"query-surface-new-release-only\"", + "const finalCommonSurface = \"query-surface-final-common\"", + "const regexSurface = \"regex-surface-added-2026\"", + "const caseSurface = \"CaseSurfaceExact\"", + "", + }, "\n"), "new release surface") + + runGit(t, repoDir, "checkout", querySurfaceMain) + runGit(t, repoDir, "branch", "-D", querySurfaceOldRelease) +} + +func querySurfaceWriteAndCommitFile(t *testing.T, repoDir, name, content, message string) { + t.Helper() + + file := filepath.Join(repoDir, name) + if err := os.MkdirAll(filepath.Dir(file), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(file), err) + } + if err := os.WriteFile(file, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", file, err) + } + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", message) +} + +func querySurfaceOptions(repoDir, indexDir string, branches []string) Options { + return Options{ + RepoDir: filepath.Join(repoDir, ".git"), + Branches: append([]string(nil), branches...), + BuildOptions: index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + Name: querySurfaceRepoName, + ID: querySurfaceRepoID, + }, + DisableCTags: true, + }, + } +} + +func querySurfaceIndexGitRepoWithPrepareSpies(t *testing.T, opts Options) (deltaBuildCalled, normalBuildCalled bool) { + t.Helper() + + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + if _, err := indexGitRepo(opts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }); err != nil { + t.Fatalf("IndexGitRepo: %v", err) + } + + return deltaBuildCalled, normalBuildCalled +} + +func querySurfaceAssertRepositoryBranchesMatchClean(t *testing.T, deltaIndexDir, cleanIndexDir string, wantBranches []string) { + t.Helper() + + deltaRepo := querySurfaceIndexedRepository(t, deltaIndexDir) + cleanRepo := querySurfaceIndexedRepository(t, cleanIndexDir) + + if got := querySurfaceBranchNames(deltaRepo.Branches); !cmp.Equal(got, wantBranches) { + t.Fatalf("delta branch names mismatch (-want +got):\n%s", cmp.Diff(wantBranches, got)) + } + if diff := cmp.Diff(cleanRepo.Branches, deltaRepo.Branches); diff != "" { + t.Fatalf("delta branch metadata differs from clean rebuild (-clean +delta):\n%s", diff) + } +} + +func querySurfaceAssertQueryMatchesClean(t *testing.T, deltaIndexDir, cleanIndexDir string, q query.Q, wantFiles []string) { + t.Helper() + + deltaHits := querySurfaceSearchHits(t, deltaIndexDir, q) + cleanHits := querySurfaceSearchHits(t, cleanIndexDir, q) + if diff := cmp.Diff(cleanHits, deltaHits); diff != "" { + t.Fatalf("%s search differs from clean full rebuild (-clean +delta):\n%s", q.String(), diff) + } + + gotFiles := querySurfaceSearchHitFileNames(deltaHits) + if wantFiles == nil { + wantFiles = []string{} + } + sort.Strings(wantFiles) + if diff := cmp.Diff(wantFiles, gotFiles); diff != "" { + t.Fatalf("%s file names mismatch (-want +got):\n%s", q.String(), diff) + } +} + +func querySurfaceSearchHits(t *testing.T, indexDir string, q query.Q) []querySurfaceSearchHit { + t.Helper() + + searcher, err := search.NewDirectorySearcher(indexDir) + if err != nil { + t.Fatalf("NewDirectorySearcher(%q): %v", indexDir, err) + } + defer searcher.Close() + + result, err := searcher.Search(context.Background(), q, &zoekt.SearchOptions{Whole: true}) + if err != nil { + t.Fatalf("Search(%s): %v", q.String(), err) + } + + hits := make([]querySurfaceSearchHit, 0, len(result.Files)) + for _, file := range result.Files { + branches := append([]string(nil), file.Branches...) + sort.Strings(branches) + hits = append(hits, querySurfaceSearchHit{ + FileName: file.FileName, + Content: string(file.Content), + Branches: branches, + Version: file.Version, + }) + } + sort.Slice(hits, func(i, j int) bool { + if hits[i].FileName != hits[j].FileName { + return hits[i].FileName < hits[j].FileName + } + if hits[i].Content != hits[j].Content { + return hits[i].Content < hits[j].Content + } + if hits[i].Version != hits[j].Version { + return hits[i].Version < hits[j].Version + } + return strings.Join(hits[i].Branches, "\x00") < strings.Join(hits[j].Branches, "\x00") + }) + return hits +} + +func querySurfaceSearchHitFileNames(hits []querySurfaceSearchHit) []string { + names := make([]string, 0, len(hits)) + for _, hit := range hits { + names = append(names, hit.FileName) + } + sort.Strings(names) + return names +} + +func querySurfaceIndexedRepository(t *testing.T, indexDir string) *zoekt.Repository { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + Name: querySurfaceRepoName, + ID: querySurfaceRepoID, + }, + } + repo, _, ok, err := opts.FindRepositoryMetadata() + if err != nil { + t.Fatalf("FindRepositoryMetadata: %v", err) + } + if !ok { + t.Fatalf("FindRepositoryMetadata: repository %q not found", querySurfaceRepoName) + } + return repo +} + +func querySurfaceBranchNames(branches []zoekt.RepositoryBranch) []string { + names := make([]string, 0, len(branches)) + for _, branch := range branches { + names = append(names, branch.Name) + } + return names +} + +func querySurfaceParseQuery(t *testing.T, raw string) query.Q { + t.Helper() + + q, err := query.Parse(raw) + if err != nil { + t.Fatalf("query.Parse(%q): %v", raw, err) + } + return q +} + +func querySurfaceRegexpContentQuery(t *testing.T, pattern string) query.Q { + t.Helper() + + re, err := syntax.Parse(pattern, syntax.Perl) + if err != nil { + t.Fatalf("syntax.Parse(%q): %v", pattern, err) + } + return &query.Regexp{Regexp: re, Content: true} +} diff --git a/gitindex/delta_multibranch_safety_stats_test.go b/gitindex/delta_multibranch_safety_stats_test.go new file mode 100644 index 000000000..8e21cd38c --- /dev/null +++ b/gitindex/delta_multibranch_safety_stats_test.go @@ -0,0 +1,1241 @@ +package gitindex + +import ( + "context" + "encoding/json" + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + "github.com/google/go-cmp/cmp" + "github.com/sourcegraph/zoekt" + "github.com/sourcegraph/zoekt/index" + "github.com/sourcegraph/zoekt/query" + "github.com/sourcegraph/zoekt/search" + "os" + "os/exec" + "path/filepath" + "sort" + "strings" + "testing" +) + +// ---- delta_multibranch_ambiguity_test.go ---- + +const ( + ambiguityRepoName = "ambiguity-repository" + ambiguityRepoID = 7733 +) + +type ambiguityTree map[string]string + +type ambiguitySearchHit struct { + FileName string + Content string + Branches []string + Version string +} + +func TestIndexGitRepo_DeltaMultiBranchAmbiguityFallsBack(t *testing.T) { + t.Parallel() + + for _, tc := range []struct { + name string + initialBranches []string + initialTrees map[string]ambiguityTree + mutate func(t *testing.T, repoDir string) + finalBranches []string + cleanBranches []string + compareBranches []string + patterns []string + requireFallback bool + }{ + { + name: "ambiguous rename without provenance", + initialBranches: []string{"foo", "bar"}, + initialTrees: map[string]ambiguityTree{ + "foo": {"foo.txt": "foo-original-needle\n"}, + "bar": {"bar.txt": "bar-stable-needle\n"}, + }, + mutate: func(t *testing.T, repoDir string) { + runGit(t, repoDir, "branch", "baz", "foo") + runGit(t, repoDir, "checkout", "bar") + runGit(t, repoDir, "branch", "-D", "foo") + }, + finalBranches: []string{"baz", "bar"}, + compareBranches: []string{"foo", "baz", "bar"}, + patterns: []string{"foo-original-needle", "bar-stable-needle"}, + requireFallback: true, + }, + { + name: "many-to-one branch rename", + initialBranches: []string{"a", "b"}, + initialTrees: map[string]ambiguityTree{ + "a": {"a.txt": "a-original-needle\n"}, + "b": {"b.txt": "b-original-needle\n"}, + }, + mutate: func(t *testing.T, repoDir string) { + runGit(t, repoDir, "branch", "c", "a") + runGit(t, repoDir, "checkout", "c") + runGit(t, repoDir, "branch", "-D", "a") + runGit(t, repoDir, "branch", "-D", "b") + }, + finalBranches: []string{"c"}, + compareBranches: []string{"a", "b", "c"}, + patterns: []string{"a-original-needle", "b-original-needle"}, + requireFallback: true, + }, + { + name: "one-to-many branch split", + initialBranches: []string{"a"}, + initialTrees: map[string]ambiguityTree{ + "a": { + "shared.txt": "split-shared-needle\n", + "c.txt": "split-c-original-needle\n", + }, + }, + mutate: func(t *testing.T, repoDir string) { + runGit(t, repoDir, "branch", "b", "a") + runGit(t, repoDir, "checkout", "-B", "c", "a") + ambiguityWriteCommit(t, repoDir, map[string]string{ + "c.txt": "split-c-final-needle\n", + }, nil, "modify c branch") + runGit(t, repoDir, "branch", "-D", "a") + }, + finalBranches: []string{"b", "c"}, + compareBranches: []string{"a", "b", "c"}, + patterns: []string{"split-shared-needle", "split-c-original-needle", "split-c-final-needle"}, + requireFallback: true, + }, + { + name: "duplicate final branch names after wildcard expansion", + initialBranches: []string{"main"}, + initialTrees: map[string]ambiguityTree{ + "main": {"main.txt": "duplicate-main-needle\n"}, + }, + mutate: func(t *testing.T, repoDir string) {}, + finalBranches: []string{"main", "m*"}, + cleanBranches: []string{"main"}, + compareBranches: []string{"main"}, + patterns: []string{"duplicate-main-needle"}, + }, + } { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + repoDir := ambiguityInitRepository(t, tc.initialBranches, tc.initialTrees) + indexDir := t.TempDir() + + initialOpts := ambiguityOptions(repoDir, indexDir, tc.initialBranches) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + tc.mutate(t, repoDir) + + deltaOpts := ambiguityOptions(repoDir, indexDir, tc.finalBranches) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled, err := ambiguityIndexWithSpies(t, deltaOpts) + if err != nil { + t.Fatalf("delta IndexGitRepo: %v", err) + } + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted before selecting the safe fallback") + } + if tc.requireFallback && !normalBuildCalled { + t.Fatalf("expected ambiguous branch mapping to fall back to a normal rebuild instead of accepting an arbitrary delta mapping") + } + + cleanBranches := tc.cleanBranches + if cleanBranches == nil { + cleanBranches = tc.finalBranches + } + cleanIndexDir := t.TempDir() + cleanOpts := ambiguityOptions(repoDir, cleanIndexDir, cleanBranches) + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean IndexGitRepo: %v", err) + } + + ambiguityAssertIndexMatchesClean(t, indexDir, cleanIndexDir, tc.compareBranches, tc.patterns) + ambiguityAssertNoDuplicateBranches(t, indexDir) + }) + } +} + +func TestIndexGitRepo_DeltaBranchRemovedAndReAddedSameNameUnrelatedHistory(t *testing.T) { + t.Parallel() + + repoDir := ambiguityInitRepository(t, []string{"release"}, map[string]ambiguityTree{ + "release": {"release.txt": "release-old-lineage-needle\n"}, + }) + indexDir := t.TempDir() + + initialOpts := ambiguityOptions(repoDir, indexDir, []string{"release"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + runGit(t, repoDir, "checkout", "--orphan", "replacement") + ambiguityRemoveAllTracked(t, repoDir) + ambiguityWriteCommit(t, repoDir, map[string]string{ + "release.txt": "release-new-lineage-needle\n", + }, nil, "replacement release") + runGit(t, repoDir, "branch", "-D", "release") + runGit(t, repoDir, "branch", "-m", "release") + + deltaOpts := ambiguityOptions(repoDir, indexDir, []string{"release"}) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, _, err := ambiguityIndexWithSpies(t, deltaOpts) + if err != nil { + t.Fatalf("delta IndexGitRepo: %v", err) + } + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted for same-name unrelated history") + } + + cleanIndexDir := t.TempDir() + cleanOpts := ambiguityOptions(repoDir, cleanIndexDir, []string{"release"}) + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean IndexGitRepo: %v", err) + } + + ambiguityAssertIndexMatchesClean(t, indexDir, cleanIndexDir, []string{"release"}, []string{ + "release-old-lineage-needle", + "release-new-lineage-needle", + }) +} + +func TestIndexGitRepo_DeltaOldCommitMissingFallsBack(t *testing.T) { + t.Parallel() + + repoDir := ambiguityInitRepository(t, []string{"release"}, map[string]ambiguityTree{ + "release": {"release.txt": "old-missing-commit-needle\n"}, + }) + indexDir := t.TempDir() + + initialOpts := ambiguityOptions(repoDir, indexDir, []string{"release"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + if err := os.RemoveAll(repoDir); err != nil { + t.Fatalf("RemoveAll(%q): %v", repoDir, err) + } + runGit(t, repoDir, "init", "-b", "release") + ambiguityWriteCommit(t, repoDir, map[string]string{ + "release.txt": "new-repository-needle\n", + }, nil, "new unrelated repository") + + deltaOpts := ambiguityOptions(repoDir, indexDir, []string{"release"}) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled, err := ambiguityIndexWithSpies(t, deltaOpts) + if err != nil { + t.Fatalf("delta IndexGitRepo: %v", err) + } + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted before detecting the missing old commit") + } + if !normalBuildCalled { + t.Fatal("expected missing old commit to fall back to a normal rebuild") + } + + cleanIndexDir := t.TempDir() + cleanOpts := ambiguityOptions(repoDir, cleanIndexDir, []string{"release"}) + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean IndexGitRepo: %v", err) + } + + ambiguityAssertIndexMatchesClean(t, indexDir, cleanIndexDir, []string{"release"}, []string{ + "old-missing-commit-needle", + "new-repository-needle", + }) +} + +func TestIndexGitRepo_DeltaNewCommitMissingErrorsWithoutPartialIndex(t *testing.T) { + t.Parallel() + + repoDir := ambiguityInitRepository(t, []string{"main"}, map[string]ambiguityTree{ + "main": {"main.txt": "new-missing-main-needle\n"}, + }) + indexDir := t.TempDir() + + initialOpts := ambiguityOptions(repoDir, indexDir, []string{"main"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + before := ambiguityIndexedRepository(t, indexDir) + + deltaOpts := ambiguityOptions(repoDir, indexDir, []string{"main", "missing"}) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled, err := ambiguityIndexWithSpies(t, deltaOpts) + if err == nil { + t.Fatal("expected missing new branch commit to return an explicit error") + } + if deltaBuildCalled || normalBuildCalled { + t.Fatalf("missing new branch should fail before any build preparation, got delta=%v normal=%v", deltaBuildCalled, normalBuildCalled) + } + + after := ambiguityIndexedRepository(t, indexDir) + if diff := cmp.Diff(before.Branches, after.Branches); diff != "" { + t.Fatalf("missing new branch changed repository metadata despite failing (-before +after):\n%s", diff) + } + if got := ambiguitySearch(t, indexDir, &query.Substring{Pattern: "new-missing-main-needle"}); len(got) != 1 { + t.Fatalf("existing index should remain searchable after failed missing-branch update, got %+v", got) + } +} + +func TestIndexGitRepo_DeltaCompoundShardBeforeBranchSetChangeFallsBack(t *testing.T) { + t.Parallel() + + repoDir := ambiguityInitRepository(t, []string{"main", "release"}, map[string]ambiguityTree{ + "main": {"main.txt": "compound-main-needle\n"}, + "release": {"release.txt": "compound-release-needle\n"}, + }) + indexDir := t.TempDir() + + initialOpts := ambiguityOptions(repoDir, indexDir, []string{"main", "release"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + ambiguityMergeIndexIntoCompoundShard(t, indexDir) + + runGit(t, repoDir, "branch", "stable", "release") + runGit(t, repoDir, "branch", "-D", "release") + + deltaOpts := ambiguityOptions(repoDir, indexDir, []string{"main", "stable"}) + deltaOpts.BuildOptions.IsDelta = true + deltaBuildCalled, normalBuildCalled, err := ambiguityIndexWithSpies(t, deltaOpts) + if err != nil { + t.Fatalf("delta IndexGitRepo: %v", err) + } + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted before rejecting the compound shard") + } + if !normalBuildCalled { + t.Fatal("expected compound shard before branch-set change to fall back to a normal rebuild") + } + + cleanIndexDir := t.TempDir() + cleanOpts := ambiguityOptions(repoDir, cleanIndexDir, []string{"main", "stable"}) + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean IndexGitRepo: %v", err) + } + + ambiguityAssertIndexMatchesClean(t, indexDir, cleanIndexDir, []string{"main", "release", "stable"}, []string{ + "compound-main-needle", + "compound-release-needle", + }) +} + +func ambiguityOptions(repoDir, indexDir string, branches []string) Options { + return Options{ + RepoDir: filepath.Join(repoDir, ".git"), + Branches: append([]string(nil), branches...), + DeltaAdmissionMode: DeltaAdmissionModeStatsV1, + DeltaAdmissionThresholds: DeltaAdmissionThresholds{ + MaxDeltaIndexedBytesRatio: 100, + MaxPhysicalLiveBytesRatio: 100, + MaxTombstonePathRatio: 100, + MaxShardFanoutRatio: 100, + }, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{ + ID: ambiguityRepoID, + Name: ambiguityRepoName, + }, + IndexDir: indexDir, + DisableCTags: true, + }, + } +} + +func ambiguityIndexWithSpies(t *testing.T, opts Options) (deltaBuildCalled, normalBuildCalled bool, err error) { + t.Helper() + + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + _, err = indexGitRepo(opts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }) + return deltaBuildCalled, normalBuildCalled, err +} + +func ambiguityInitRepository(t *testing.T, branches []string, trees map[string]ambiguityTree) string { + t.Helper() + + repoDir := t.TempDir() + runGit(t, repoDir, "init", "-b", "main") + runGit(t, repoDir, "commit", "--allow-empty", "-m", "base") + base := ambiguityGitOutput(t, repoDir, "rev-parse", "HEAD") + + for _, branch := range branches { + runGit(t, repoDir, "checkout", "-B", branch, base) + ambiguityRemoveAllTracked(t, repoDir) + ambiguityWriteCommit(t, repoDir, map[string]string(trees[branch]), nil, branch+" initial") + } + runGit(t, repoDir, "checkout", branches[0]) + return repoDir +} + +func ambiguityWriteCommit(t *testing.T, repoDir string, files map[string]string, deletes []string, message string) { + t.Helper() + + for name, content := range files { + path := filepath.Join(repoDir, name) + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(path), err) + } + if err := os.WriteFile(path, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", path, err) + } + } + for _, name := range deletes { + path := filepath.Join(repoDir, name) + if err := os.Remove(path); err != nil { + t.Fatalf("Remove(%q): %v", path, err) + } + } + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "--allow-empty", "-m", message) +} + +func ambiguityRemoveAllTracked(t *testing.T, repoDir string) { + t.Helper() + runGit(t, repoDir, "rm", "-r", "-f", "--ignore-unmatch", ".") +} + +func ambiguityIndexedRepository(t *testing.T, indexDir string) *zoekt.Repository { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + ID: ambiguityRepoID, + Name: ambiguityRepoName, + }, + } + repo, _, ok, err := opts.FindRepositoryMetadata() + if err != nil { + t.Fatalf("FindRepositoryMetadata(%q): %v", indexDir, err) + } + if !ok { + t.Fatalf("FindRepositoryMetadata(%q): repository not found", indexDir) + } + return repo +} + +func ambiguityAssertIndexMatchesClean(t *testing.T, indexDir, cleanIndexDir string, branches, patterns []string) { + t.Helper() + + gotRepo := ambiguityIndexedRepository(t, indexDir) + wantRepo := ambiguityIndexedRepository(t, cleanIndexDir) + if diff := cmp.Diff(wantRepo.Branches, gotRepo.Branches); diff != "" { + t.Fatalf("repository branches mismatch against clean rebuild (-want +got):\n%s", diff) + } + + ambiguityAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "unfiltered/all", &query.Const{Value: true}) + for _, pattern := range patterns { + ambiguityAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "unfiltered/"+pattern, &query.Substring{Pattern: pattern}) + } + + for _, branch := range branches { + ambiguityAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "branch/"+branch+"/all", &query.Branch{Pattern: branch, Exact: true}) + ambiguityAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "branchesrepos/"+branch+"/all", query.NewSingleBranchesRepos(branch, ambiguityRepoID)) + for _, pattern := range patterns { + substr := &query.Substring{Pattern: pattern} + ambiguityAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "branch/"+branch+"/"+pattern, query.NewAnd(&query.Branch{Pattern: branch, Exact: true}, substr)) + ambiguityAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "branchesrepos/"+branch+"/"+pattern, query.NewAnd(query.NewSingleBranchesRepos(branch, ambiguityRepoID), substr)) + } + } +} + +func ambiguityAssertQueryMatchesClean(t *testing.T, indexDir, cleanIndexDir, label string, q query.Q) { + t.Helper() + + got := ambiguitySearch(t, indexDir, q) + want := ambiguitySearch(t, cleanIndexDir, q) + if diff := cmp.Diff(want, got); diff != "" { + t.Fatalf("%s mismatch against clean rebuild (-want +got):\n%s", label, diff) + } +} + +func ambiguitySearch(t *testing.T, indexDir string, q query.Q) []ambiguitySearchHit { + t.Helper() + + searcher, err := search.NewDirectorySearcher(indexDir) + if err != nil { + t.Fatalf("NewDirectorySearcher(%q): %v", indexDir, err) + } + defer searcher.Close() + + result, err := searcher.Search(context.Background(), q, &zoekt.SearchOptions{ + Whole: true, + ShardMaxMatchCount: 1000, + TotalMaxMatchCount: 1000, + MaxDocDisplayCount: 1000, + MaxMatchDisplayCount: 1000, + }) + if err != nil { + t.Fatalf("Search(%s): %v", q.String(), err) + } + + hits := make([]ambiguitySearchHit, 0, len(result.Files)) + for _, file := range result.Files { + branches := append([]string(nil), file.Branches...) + sort.Strings(branches) + hits = append(hits, ambiguitySearchHit{ + FileName: file.FileName, + Content: string(file.Content), + Branches: branches, + Version: file.Version, + }) + } + sort.Slice(hits, func(i, j int) bool { + if hits[i].FileName != hits[j].FileName { + return hits[i].FileName < hits[j].FileName + } + if hits[i].Content != hits[j].Content { + return hits[i].Content < hits[j].Content + } + if hits[i].Version != hits[j].Version { + return hits[i].Version < hits[j].Version + } + return strings.Join(hits[i].Branches, "\x00") < strings.Join(hits[j].Branches, "\x00") + }) + return hits +} + +func ambiguityAssertNoDuplicateBranches(t *testing.T, indexDir string) { + t.Helper() + + repo := ambiguityIndexedRepository(t, indexDir) + seen := map[string]struct{}{} + for _, branch := range repo.Branches { + if _, ok := seen[branch.Name]; ok { + t.Fatalf("repository metadata contains duplicate branch %q: %+v", branch.Name, repo.Branches) + } + seen[branch.Name] = struct{}{} + } + + for _, hit := range ambiguitySearch(t, indexDir, &query.Const{Value: true}) { + seen := map[string]struct{}{} + for _, branch := range hit.Branches { + if _, ok := seen[branch]; ok { + t.Fatalf("search hit %q contains duplicate branch %q: %+v", hit.FileName, branch, hit.Branches) + } + seen[branch] = struct{}{} + } + } +} + +func ambiguityMergeIndexIntoCompoundShard(t *testing.T, indexDir string) { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + ID: ambiguityRepoID, + Name: ambiguityRepoName, + }, + } + shards := opts.FindAllShards() + if len(shards) == 0 { + t.Fatal("expected at least one simple shard before merging") + } + + files := make([]index.IndexFile, 0, len(shards)) + for _, shard := range shards { + file, err := os.Open(shard) + if err != nil { + t.Fatalf("Open(%q): %v", shard, err) + } + indexFile, err := index.NewIndexFile(file) + if err != nil { + _ = file.Close() + t.Fatalf("NewIndexFile(%q): %v", shard, err) + } + files = append(files, indexFile) + } + + tmpName, dstName, err := index.Merge(indexDir, files...) + for _, file := range files { + file.Close() + } + if err != nil { + t.Fatalf("Merge(%q): %v", indexDir, err) + } + if err := os.Rename(tmpName, dstName); err != nil { + t.Fatalf("Rename(%q, %q): %v", tmpName, dstName, err) + } + + for _, shard := range shards { + paths, err := index.IndexFilePaths(shard) + if err != nil { + t.Fatalf("IndexFilePaths(%q): %v", shard, err) + } + for _, path := range paths { + if err := os.Remove(path); err != nil { + t.Fatalf("Remove(%q): %v", path, err) + } + } + } + + compoundShards, err := filepath.Glob(filepath.Join(indexDir, "compound-*.zoekt")) + if err != nil { + t.Fatalf("Glob compound shards: %v", err) + } + if len(compoundShards) != 1 { + t.Fatalf("got compound shards %v, want exactly one", compoundShards) + } +} + +func ambiguityGitOutput(t *testing.T, cwd string, args ...string) string { + t.Helper() + + out, err := ambiguityGitCombinedOutput(cwd, args...) + if err != nil { + t.Fatalf("git %s failed: %v\n%s", strings.Join(args, " "), err, out) + } + return strings.TrimSpace(string(out)) +} + +func ambiguityGitCombinedOutput(cwd string, args ...string) ([]byte, error) { + cmd := exec.Command("git", args...) + cmd.Dir = cwd + cmd.Env = append(os.Environ(), + "GIT_CONFIG_GLOBAL=", + "GIT_CONFIG_SYSTEM=", + "GIT_COMMITTER_NAME=Kierkegaard", + "GIT_COMMITTER_EMAIL=soren@apache.com", + "GIT_AUTHOR_NAME=Kierkegaard", + "GIT_AUTHOR_EMAIL=soren@apache.com", + ) + return cmd.CombinedOutput() +} + +// ---- delta_multibranch_stats_test.go ---- + +const ( + statsBranchRepoID = 9127 + statsBranchRepoName = "stats-branch-repository" +) + +type statsBranchFiles map[string]string + +type statsBranchSearchHit struct { + FileName string + Content string + Branches []string +} + +func TestIndexGitRepo_DeltaMultiBranchStatsAfterBranchRename(t *testing.T) { + t.Parallel() + + repoDir := statsBranchCreateRepository(t, []string{"feature-a", "release"}, map[string]statsBranchFiles{ + "feature-a": { + "branch.txt": "stats-rename-old-feature\n", + }, + "release": { + "release.txt": "stats-rename-release\n", + }, + }) + indexDir := t.TempDir() + + statsBranchRunIndex(t, repoDir, indexDir, []string{"feature-a", "release"}, false, "") + oldShards := statsBranchFindAllShards(t, indexDir) + + statsBranchRenameBranch(t, repoDir, "feature-a", "feature-b") + statsBranchCheckoutWriteCommit(t, repoDir, "feature-b", statsBranchFiles{ + "branch.txt": "stats-rename-new-feature\n", + }, "modify renamed branch") + + deltaCalled, normalCalled := statsBranchRunIndex(t, repoDir, indexDir, []string{"feature-b", "release"}, true, "") + if !deltaCalled { + t.Error("expected branch rename to attempt a delta build") + } + if normalCalled { + t.Error("expected branch rename to stay on the delta path without a normal-build fallback") + } + + cleanIndexDir := t.TempDir() + statsBranchRunIndex(t, repoDir, cleanIndexDir, []string{"feature-b", "release"}, false, "") + + statsBranchAssertRepositoryBranchesMatchClean(t, indexDir, cleanIndexDir, []string{"feature-b", "release"}) + statsBranchAssertLiveStatsMatchClean(t, indexDir, cleanIndexDir) + statsBranchAssertDeltaDebt(t, indexDir, 1, []string{"branch.txt"}) + statsBranchAssertOldShardTombstones(t, oldShards, []string{"branch.txt"}) + + statsBranchAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "new feature content", &query.Substring{Pattern: "stats-rename-new-feature"}) + statsBranchAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "release content", &query.Substring{Pattern: "stats-rename-release"}) + statsBranchAssertFileNames(t, indexDir, "old branch has no branch-query results", statsBranchBranchQuery("feature-a", &query.Const{Value: true}), nil) + statsBranchAssertFileNames(t, indexDir, "old branch has no BranchesRepos results", statsBranchBranchesReposQuery("feature-a", &query.Const{Value: true}), nil) + statsBranchAssertFileNames(t, indexDir, "renamed branch sees new file", statsBranchBranchQuery("feature-b", &query.Substring{Pattern: "stats-rename-new-feature"}), []string{"branch.txt"}) + statsBranchAssertFileNames(t, indexDir, "renamed branch sees new file through BranchesRepos", statsBranchBranchesReposQuery("feature-b", &query.Substring{Pattern: "stats-rename-new-feature"}), []string{"branch.txt"}) +} + +func TestIndexGitRepo_DeltaMultiBranchStatsAfterAddIdenticalBranch(t *testing.T) { + t.Parallel() + + repoDir := statsBranchCreateRepository(t, []string{"main"}, map[string]statsBranchFiles{ + "main": { + "one.txt": "stats-add-identical-one\n", + "two.txt": "stats-add-identical-two\n", + }, + }) + indexDir := t.TempDir() + + statsBranchRunIndex(t, repoDir, indexDir, []string{"main"}, false, "") + initialRepo := statsBranchIndexedRepository(t, indexDir) + if initialRepo.DeltaStats == nil { + t.Fatal("initial full build DeltaStats is nil") + } + initialLiveDocumentCount := initialRepo.DeltaStats.LiveDocumentCount + oldShards := statsBranchFindAllShards(t, indexDir) + + runGit(t, repoDir, "branch", "release", "main") + + deltaCalled, normalCalled := statsBranchRunIndex(t, repoDir, indexDir, []string{"main", "release"}, true, "") + if !deltaCalled { + t.Error("expected identical branch addition to attempt a delta build") + } + if normalCalled { + t.Error("expected identical branch addition to stay on the delta path without a normal-build fallback") + } + + cleanIndexDir := t.TempDir() + statsBranchRunIndex(t, repoDir, cleanIndexDir, []string{"main", "release"}, false, "") + + statsBranchAssertRepositoryBranchesMatchClean(t, indexDir, cleanIndexDir, []string{"main", "release"}) + statsBranchAssertLiveStatsMatchClean(t, indexDir, cleanIndexDir) + repo := statsBranchIndexedRepository(t, indexDir) + if got := repo.DeltaStats.LiveDocumentCount; got != initialLiveDocumentCount { + t.Fatalf("identical branch add LiveDocumentCount = %d, want unchanged from initial %d", got, initialLiveDocumentCount) + } + statsBranchAssertDeltaDebt(t, indexDir, 1, []string{"one.txt", "two.txt"}) + statsBranchAssertOldShardTombstones(t, oldShards, []string{"one.txt", "two.txt"}) + + statsBranchAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "all files", &query.Const{Value: true}) + statsBranchAssertFileNames(t, indexDir, "main all files", statsBranchBranchQuery("main", &query.Const{Value: true}), []string{"one.txt", "two.txt"}) + statsBranchAssertFileNames(t, indexDir, "release all files", statsBranchBranchQuery("release", &query.Const{Value: true}), []string{"one.txt", "two.txt"}) + statsBranchAssertFileNames(t, indexDir, "release all files through BranchesRepos", statsBranchBranchesReposQuery("release", &query.Const{Value: true}), []string{"one.txt", "two.txt"}) +} + +func TestIndexGitRepo_DeltaMultiBranchStatsAfterRemoveBranchOnlyFiles(t *testing.T) { + t.Parallel() + + repoDir := statsBranchCreateRepository(t, []string{"main", "release"}, map[string]statsBranchFiles{ + "main": { + "main.txt": "stats-remove-main\n", + }, + "release": { + "release-only.txt": "stats-remove-release-only\n", + }, + }) + indexDir := t.TempDir() + + statsBranchRunIndex(t, repoDir, indexDir, []string{"main", "release"}, false, "") + oldShards := statsBranchFindAllShards(t, indexDir) + + runGit(t, repoDir, "checkout", "main") + runGit(t, repoDir, "branch", "-D", "release") + + deltaCalled, normalCalled := statsBranchRunIndex(t, repoDir, indexDir, []string{"main"}, true, "") + if !deltaCalled { + t.Error("expected branch removal to attempt a delta build") + } + if normalCalled { + t.Error("expected branch removal to stay on the delta path without a normal-build fallback") + } + + cleanIndexDir := t.TempDir() + statsBranchRunIndex(t, repoDir, cleanIndexDir, []string{"main"}, false, "") + + statsBranchAssertRepositoryBranchesMatchClean(t, indexDir, cleanIndexDir, []string{"main"}) + statsBranchAssertLiveStatsMatchClean(t, indexDir, cleanIndexDir) + statsBranchAssertDeltaDebt(t, indexDir, 1, []string{"release-only.txt"}) + statsBranchAssertOldShardTombstones(t, oldShards, []string{"release-only.txt"}) + + statsBranchAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "all files", &query.Const{Value: true}) + statsBranchAssertFileNames(t, indexDir, "release-only content absent unfiltered", &query.Substring{Pattern: "stats-remove-release-only"}, nil) + statsBranchAssertFileNames(t, indexDir, "removed branch has no branch-query results", statsBranchBranchQuery("release", &query.Const{Value: true}), nil) + statsBranchAssertFileNames(t, indexDir, "removed branch has no BranchesRepos results", statsBranchBranchesReposQuery("release", &query.Const{Value: true}), nil) + statsBranchAssertFileNames(t, indexDir, "main branch remains searchable", statsBranchBranchQuery("main", &query.Substring{Pattern: "stats-remove-main"}), []string{"main.txt"}) +} + +func TestIndexGitRepo_DeltaMultiBranchStatsAdmissionLogAcceptedBranchSetDelta(t *testing.T) { + t.Parallel() + + repoDir := statsBranchCreateRepository(t, []string{"feature-a", "release"}, map[string]statsBranchFiles{ + "feature-a": { + "branch.txt": "stats-log-feature\n", + }, + "release": { + "release.txt": "stats-log-release\n", + }, + }) + indexDir := t.TempDir() + logPath := filepath.Join(t.TempDir(), "delta-admission.jsonl") + + statsBranchRunIndex(t, repoDir, indexDir, []string{"feature-a", "release"}, false, "") + statsBranchRenameBranch(t, repoDir, "feature-a", "feature-b") + + deltaCalled, normalCalled := statsBranchRunIndex(t, repoDir, indexDir, []string{"feature-b", "release"}, true, logPath) + if !deltaCalled { + t.Error("expected branch-set update to attempt a delta build") + } + if normalCalled { + t.Error("expected branch-set update to be accepted as a delta build") + } + + entries := statsBranchReadAdmissionLogObjects(t, logPath) + if len(entries) != 1 { + t.Fatalf("got %d admission log entries, want 1", len(entries)) + } + entry := entries[0] + statsBranchAssertJSONBool(t, entry, "accepted", true) + statsBranchAssertJSONString(t, entry, "reason", "accepted") + statsBranchAssertJSONNumber(t, entry, "old_branch_count", 2) + statsBranchAssertJSONNumber(t, entry, "new_branch_count", 2) + statsBranchAssertJSONPresent(t, entry, "branch_mapping") + statsBranchAssertJSONPresent(t, entry, "candidate_indexed_bytes") + statsBranchAssertJSONPresent(t, entry, "candidate_document_count") + statsBranchAssertJSONPresent(t, entry, "write_bytes_ratio") + statsBranchAssertJSONPresent(t, entry, "physical_live_ratio") +} + +func TestIndexGitRepo_DeltaMultiBranchStatsAdmissionLogAmbiguousMappingFallback(t *testing.T) { + t.Parallel() + + repoDir := statsBranchCreateRepository(t, []string{"foo", "bar"}, map[string]statsBranchFiles{ + "foo": { + "foo.txt": "stats-ambiguous-foo\n", + }, + "bar": { + "bar.txt": "stats-ambiguous-bar\n", + }, + }) + indexDir := t.TempDir() + logPath := filepath.Join(t.TempDir(), "delta-admission.jsonl") + + statsBranchRunIndex(t, repoDir, indexDir, []string{"foo", "bar"}, false, "") + runGit(t, repoDir, "checkout", "foo") + runGit(t, repoDir, "branch", "-m", "baz") + + deltaCalled, normalCalled := statsBranchRunIndex(t, repoDir, indexDir, []string{"baz", "bar"}, true, logPath) + if !deltaCalled { + t.Error("expected ambiguous branch mapping to attempt a delta build") + } + if !normalCalled { + t.Error("expected ambiguous branch mapping to fall back to a normal rebuild") + } + + entries := statsBranchReadAdmissionLogObjects(t, logPath) + if len(entries) != 1 { + t.Fatalf("got %d admission log entries, want 1", len(entries)) + } + entry := entries[0] + statsBranchAssertJSONBool(t, entry, "accepted", false) + statsBranchAssertJSONStringContains(t, entry, "reason", "ambiguous branch mapping") + statsBranchAssertJSONNumber(t, entry, "old_branch_count", 2) + statsBranchAssertJSONNumber(t, entry, "new_branch_count", 2) + statsBranchAssertJSONPresent(t, entry, "branch_mapping") +} + +func statsBranchRunIndex(t *testing.T, repoDir, indexDir string, branches []string, isDelta bool, logPath string) (deltaBuildCalled, normalBuildCalled bool) { + t.Helper() + + opts := statsBranchOptions(repoDir, indexDir, branches) + opts.BuildOptions.IsDelta = isDelta + opts.DeltaAdmissionLogPath = logPath + if !isDelta { + if _, err := IndexGitRepo(opts); err != nil { + t.Fatalf("IndexGitRepo: %v", err) + } + return false, false + } + + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + if _, err := indexGitRepo(opts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }); err != nil { + t.Fatalf("delta IndexGitRepo: %v", err) + } + return deltaBuildCalled, normalBuildCalled +} + +func statsBranchOptions(repoDir, indexDir string, branches []string) Options { + return Options{ + RepoDir: filepath.Join(repoDir, ".git"), + Branches: append([]string(nil), branches...), + DeltaAdmissionMode: DeltaAdmissionModeStatsV1, + DeltaAdmissionThresholds: DeltaAdmissionThresholds{ + MaxDeltaIndexedBytesRatio: 100, + MaxPhysicalLiveBytesRatio: 100, + MaxTombstonePathRatio: 100, + MaxShardFanoutRatio: 100, + }, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{ + ID: statsBranchRepoID, + Name: statsBranchRepoName, + }, + IndexDir: indexDir, + DisableCTags: true, + }, + } +} + +func statsBranchCreateRepository(t *testing.T, branches []string, trees map[string]statsBranchFiles) string { + t.Helper() + + repoDir := t.TempDir() + runGit(t, repoDir, "init", "-b", "seed") + runGit(t, repoDir, "commit", "--allow-empty", "-m", "seed") + for _, branch := range branches { + runGit(t, repoDir, "checkout", "-B", branch, "seed") + statsBranchWriteFiles(t, repoDir, trees[branch]) + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "--allow-empty", "-m", "initial "+branch) + } + runGit(t, repoDir, "checkout", branches[0]) + return repoDir +} + +func statsBranchCheckoutWriteCommit(t *testing.T, repoDir, branch string, files statsBranchFiles, message string) { + t.Helper() + + runGit(t, repoDir, "checkout", branch) + statsBranchWriteFiles(t, repoDir, files) + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", message) +} + +func statsBranchWriteFiles(t *testing.T, repoDir string, files statsBranchFiles) { + t.Helper() + + for name, content := range files { + path := filepath.Join(repoDir, name) + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(path), err) + } + if err := os.WriteFile(path, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", path, err) + } + } +} + +func statsBranchRenameBranch(t *testing.T, repoDir, oldBranch, newBranch string) { + t.Helper() + + runGit(t, repoDir, "branch", "-m", oldBranch, newBranch) +} + +func statsBranchIndexedRepository(t *testing.T, indexDir string) *zoekt.Repository { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + ID: statsBranchRepoID, + Name: statsBranchRepoName, + }, + } + repo, _, ok, err := opts.FindRepositoryMetadata() + if err != nil { + t.Fatalf("FindRepositoryMetadata: %v", err) + } + if !ok { + t.Fatalf("FindRepositoryMetadata: repository %q not found", statsBranchRepoName) + } + return repo +} + +func statsBranchFindAllShards(t *testing.T, indexDir string) []string { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + ID: statsBranchRepoID, + Name: statsBranchRepoName, + }, + } + shards := opts.FindAllShards() + sort.Strings(shards) + if len(shards) == 0 { + t.Fatal("expected at least one shard") + } + return shards +} + +func statsBranchAssertRepositoryBranchesMatchClean(t *testing.T, indexDir, cleanIndexDir string, wantBranches []string) { + t.Helper() + + gotRepo := statsBranchIndexedRepository(t, indexDir) + cleanRepo := statsBranchIndexedRepository(t, cleanIndexDir) + if diff := cmp.Diff(wantBranches, statsBranchRepositoryBranchNames(gotRepo.Branches)); diff != "" { + t.Fatalf("delta branch names mismatch (-want +got):\n%s", diff) + } + if diff := cmp.Diff(cleanRepo.Branches, gotRepo.Branches); diff != "" { + t.Fatalf("delta branch metadata differs from clean rebuild (-clean +delta):\n%s", diff) + } +} + +func statsBranchAssertLiveStatsMatchClean(t *testing.T, indexDir, cleanIndexDir string) { + t.Helper() + + got := statsBranchIndexedRepository(t, indexDir) + want := statsBranchIndexedRepository(t, cleanIndexDir) + if got.DeltaStats == nil { + t.Fatal("delta index DeltaStats is nil") + } + if want.DeltaStats == nil { + t.Fatal("clean index DeltaStats is nil") + } + + gotLive := statsBranchLiveStats{ + LiveIndexedBytes: got.DeltaStats.LiveIndexedBytes, + LiveDocumentCount: got.DeltaStats.LiveDocumentCount, + LivePathCount: got.DeltaStats.LivePathCount, + } + wantLive := statsBranchLiveStats{ + LiveIndexedBytes: want.DeltaStats.LiveIndexedBytes, + LiveDocumentCount: want.DeltaStats.LiveDocumentCount, + LivePathCount: want.DeltaStats.LivePathCount, + } + if diff := cmp.Diff(wantLive, gotLive); diff != "" { + t.Fatalf("delta live stats differ from clean full rebuild (-want +got):\n%s", diff) + } +} + +type statsBranchLiveStats struct { + LiveIndexedBytes uint64 + LiveDocumentCount uint64 + LivePathCount uint64 +} + +func statsBranchAssertDeltaDebt(t *testing.T, indexDir string, wantLayers uint64, wantTombstones []string) { + t.Helper() + + repo := statsBranchIndexedRepository(t, indexDir) + if repo.DeltaStats == nil { + t.Fatal("DeltaStats is nil") + } + if got := repo.DeltaStats.DeltaLayerCount; got != wantLayers { + t.Fatalf("DeltaLayerCount = %d, want %d", got, wantLayers) + } + if got, want := repo.DeltaStats.TombstonePathCount, uint64(len(wantTombstones)); got < want { + t.Fatalf("TombstonePathCount = %d, want at least %d", got, want) + } + if repo.DeltaStats.PhysicalDocumentCount <= repo.DeltaStats.LiveDocumentCount { + t.Fatalf("expected physical document debt, got physical %d live %d", repo.DeltaStats.PhysicalDocumentCount, repo.DeltaStats.LiveDocumentCount) + } + if repo.DeltaStats.PhysicalIndexedBytes <= repo.DeltaStats.LiveIndexedBytes { + t.Fatalf("expected physical byte debt, got physical %d live %d", repo.DeltaStats.PhysicalIndexedBytes, repo.DeltaStats.LiveIndexedBytes) + } +} + +func statsBranchAssertOldShardTombstones(t *testing.T, oldShards []string, wantPaths []string) { + t.Helper() + + for _, shard := range oldShards { + repositories, _, err := index.ReadMetadataPathAlive(shard) + if err != nil { + t.Fatalf("ReadMetadataPathAlive(%q): %v", shard, err) + } + var repo *zoekt.Repository + for _, candidate := range repositories { + if candidate.ID == statsBranchRepoID { + repo = candidate + break + } + } + if repo == nil { + t.Fatalf("old shard %q no longer has alive repo ID %d metadata", shard, statsBranchRepoID) + } + for _, path := range wantPaths { + if _, ok := repo.FileTombstones[path]; !ok { + t.Fatalf("old shard %q missing file tombstone %q in %+v", shard, path, repo.FileTombstones) + } + } + } +} + +func statsBranchAssertQueryMatchesClean(t *testing.T, indexDir, cleanIndexDir, label string, q query.Q) { + t.Helper() + + got := statsBranchSearch(t, indexDir, q) + want := statsBranchSearch(t, cleanIndexDir, q) + if diff := cmp.Diff(want, got); diff != "" { + t.Fatalf("%s differs from clean rebuild (-clean +delta):\n%s", label, diff) + } +} + +func statsBranchAssertFileNames(t *testing.T, indexDir, label string, q query.Q, want []string) { + t.Helper() + + got := statsBranchFileNames(statsBranchSearch(t, indexDir, q)) + want = append([]string(nil), want...) + if want == nil { + want = []string{} + } + sort.Strings(want) + if diff := cmp.Diff(want, got); diff != "" { + t.Fatalf("%s file names mismatch (-want +got):\n%s", label, diff) + } +} + +func statsBranchSearch(t *testing.T, indexDir string, q query.Q) []statsBranchSearchHit { + t.Helper() + + searcher, err := search.NewDirectorySearcher(indexDir) + if err != nil { + t.Fatalf("NewDirectorySearcher(%q): %v", indexDir, err) + } + defer searcher.Close() + + result, err := searcher.Search(context.Background(), q, &zoekt.SearchOptions{Whole: true}) + if err != nil { + t.Fatalf("Search(%s): %v", q.String(), err) + } + + hits := make([]statsBranchSearchHit, 0, len(result.Files)) + for _, file := range result.Files { + branches := append([]string(nil), file.Branches...) + sort.Strings(branches) + hits = append(hits, statsBranchSearchHit{ + FileName: file.FileName, + Content: string(file.Content), + Branches: branches, + }) + } + sort.Slice(hits, func(i, j int) bool { + if hits[i].FileName != hits[j].FileName { + return hits[i].FileName < hits[j].FileName + } + if hits[i].Content != hits[j].Content { + return hits[i].Content < hits[j].Content + } + return strings.Join(hits[i].Branches, "\x00") < strings.Join(hits[j].Branches, "\x00") + }) + return hits +} + +func statsBranchFileNames(hits []statsBranchSearchHit) []string { + names := make([]string, 0, len(hits)) + for _, hit := range hits { + names = append(names, hit.FileName) + } + sort.Strings(names) + return names +} + +func statsBranchBranchQuery(branch string, q query.Q) query.Q { + return query.NewAnd(&query.Branch{Pattern: branch, Exact: true}, q) +} + +func statsBranchBranchesReposQuery(branch string, q query.Q) query.Q { + return query.NewAnd(query.NewSingleBranchesRepos(branch, statsBranchRepoID), q) +} + +func statsBranchRepositoryBranchNames(branches []zoekt.RepositoryBranch) []string { + names := make([]string, 0, len(branches)) + for _, branch := range branches { + names = append(names, branch.Name) + } + return names +} + +func statsBranchReadAdmissionLogObjects(t *testing.T, path string) []map[string]any { + t.Helper() + + blob, err := os.ReadFile(path) + if err != nil { + t.Fatalf("ReadFile(%q): %v", path, err) + } + var entries []map[string]any + for _, line := range strings.Split(strings.TrimSpace(string(blob)), "\n") { + if line == "" { + continue + } + var entry map[string]any + if err := json.Unmarshal([]byte(line), &entry); err != nil { + t.Fatalf("Unmarshal(%q): %v", line, err) + } + entries = append(entries, entry) + } + return entries +} + +func statsBranchAssertJSONPresent(t *testing.T, entry map[string]any, key string) { + t.Helper() + + if _, ok := entry[key]; !ok { + t.Fatalf("admission log missing key %q in %#v", key, entry) + } +} + +func statsBranchAssertJSONBool(t *testing.T, entry map[string]any, key string, want bool) { + t.Helper() + + got, ok := entry[key].(bool) + if !ok { + t.Fatalf("admission log key %q = %#v, want bool", key, entry[key]) + } + if got != want { + t.Fatalf("admission log key %q = %v, want %v", key, got, want) + } +} + +func statsBranchAssertJSONString(t *testing.T, entry map[string]any, key, want string) { + t.Helper() + + got, ok := entry[key].(string) + if !ok { + t.Fatalf("admission log key %q = %#v, want string", key, entry[key]) + } + if got != want { + t.Fatalf("admission log key %q = %q, want %q", key, got, want) + } +} + +func statsBranchAssertJSONStringContains(t *testing.T, entry map[string]any, key, want string) { + t.Helper() + + got, ok := entry[key].(string) + if !ok { + t.Fatalf("admission log key %q = %#v, want string", key, entry[key]) + } + if !strings.Contains(got, want) { + t.Fatalf("admission log key %q = %q, want substring %q", key, got, want) + } +} + +func statsBranchAssertJSONNumber(t *testing.T, entry map[string]any, key string, want float64) { + t.Helper() + + got, ok := entry[key].(float64) + if !ok { + t.Fatalf("admission log key %q = %#v, want number", key, entry[key]) + } + if got != want { + t.Fatalf("admission log key %q = %v, want %v", key, got, want) + } +} diff --git a/gitindex/index_test.go b/gitindex/index_test.go index 66dc71340..3766bb016 100644 --- a/gitindex/index_test.go +++ b/gitindex/index_test.go @@ -173,6 +173,338 @@ func TestIndexGitRepo_Worktree(t *testing.T) { } } +func TestIndexGitRepo_ResolveHEADToBranch_Worktree(t *testing.T) { + t.Parallel() + + _, worktreeDir := initGitWorktree(t, "file1.go", "package main\n\nfunc main() {}\n") + indexDir := t.TempDir() + + opts := Options{ + RepoDir: worktreeDir, + Branches: []string{"HEAD"}, + ResolveHEADToBranch: true, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repo"}, + IndexDir: indexDir, + }, + } + + if _, err := IndexGitRepo(opts); err != nil { + t.Fatalf("IndexGitRepo(worktree): %v", err) + } + + repo := indexedRepositoryForTest(t, indexDir, "repo") + if got, want := repositoryBranchNames(repo.Branches), []string{"worktree-branch"}; !cmp.Equal(got, want) { + t.Fatalf("indexed branch names mismatch (-want +got):\n%s", cmp.Diff(want, got)) + } +} + +func TestIndexGitRepo_ResolveHEADToBranch_DisabledPreservesHEAD(t *testing.T) { + t.Parallel() + + _, worktreeDir := initGitWorktree(t, "file1.go", "package main\n\nfunc main() {}\n") + indexDir := t.TempDir() + + opts := Options{ + RepoDir: worktreeDir, + Branches: []string{"HEAD"}, + ResolveHEADToBranch: false, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repo"}, + IndexDir: indexDir, + }, + } + + if _, err := IndexGitRepo(opts); err != nil { + t.Fatalf("IndexGitRepo(worktree): %v", err) + } + + repo := indexedRepositoryForTest(t, indexDir, "repo") + if got, want := repositoryBranchNames(repo.Branches), []string{"HEAD"}; !cmp.Equal(got, want) { + t.Fatalf("indexed branch names mismatch (-want +got):\n%s", cmp.Diff(want, got)) + } +} + +func TestIndexGitRepo_ResolveHEADToBranch_DetachedHEADPreservesHEAD(t *testing.T) { + t.Parallel() + + repoDir, _ := initGitWorktree(t, "file1.go", "package main\n\nfunc main() {}\n") + runGit(t, repoDir, "checkout", "--detach") + indexDir := t.TempDir() + + opts := Options{ + RepoDir: repoDir, + Branches: []string{"HEAD"}, + ResolveHEADToBranch: true, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repo"}, + IndexDir: indexDir, + }, + } + + if _, err := IndexGitRepo(opts); err != nil { + t.Fatalf("IndexGitRepo(detached HEAD): %v", err) + } + + repo := indexedRepositoryForTest(t, indexDir, "repo") + if got, want := repositoryBranchNames(repo.Branches), []string{"HEAD"}; !cmp.Equal(got, want) { + t.Fatalf("indexed branch names mismatch (-want +got):\n%s", cmp.Diff(want, got)) + } +} + +func TestIndexGitRepo_ResolveHEADToBranch_BareRepo(t *testing.T) { + t.Parallel() + + repoDir, _ := initGitWorktree(t, "file1.go", "package main\n\nfunc main() {}\n") + bareDir := cloneBareRepo(t, repoDir) + indexDir := t.TempDir() + + opts := Options{ + RepoDir: bareDir, + Branches: []string{"HEAD"}, + ResolveHEADToBranch: true, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repo"}, + IndexDir: indexDir, + }, + } + + if _, err := IndexGitRepo(opts); err != nil { + t.Fatalf("IndexGitRepo(bare HEAD): %v", err) + } + + repo := indexedRepositoryForTest(t, indexDir, "repo") + if got, want := repositoryBranchNames(repo.Branches), []string{"main"}; !cmp.Equal(got, want) { + t.Fatalf("indexed branch names mismatch (-want +got):\n%s", cmp.Diff(want, got)) + } +} + +func TestIndexGitRepo_ResolveHEADToBranch_DedupesResolvedBranch(t *testing.T) { + t.Parallel() + + _, worktreeDir := initGitWorktree(t, "file1.go", "package main\n\nfunc main() {}\n") + + for _, tc := range []struct { + name string + branches []string + }{ + {name: "explicit branch", branches: []string{"HEAD", "worktree-branch"}}, + {name: "wildcard branch", branches: []string{"HEAD", "worktree-*"}}, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + indexDir := t.TempDir() + opts := Options{ + RepoDir: worktreeDir, + Branches: tc.branches, + ResolveHEADToBranch: true, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repo"}, + IndexDir: indexDir, + }, + } + + if _, err := IndexGitRepo(opts); err != nil { + t.Fatalf("IndexGitRepo(%v): %v", tc.branches, err) + } + + repo := indexedRepositoryForTest(t, indexDir, "repo") + if got, want := repositoryBranchNames(repo.Branches), []string{"worktree-branch"}; !cmp.Equal(got, want) { + t.Fatalf("indexed branch names mismatch (-want +got):\n%s", cmp.Diff(want, got)) + } + }) + } +} + +func TestIndexGitRepo_ResolveHEADToBranch_MigratesLegacyHEADOnIncremental(t *testing.T) { + t.Parallel() + + _, worktreeDir := initGitWorktree(t, "file1.go", "package main\n\nfunc main() {}\n") + indexDir := t.TempDir() + + legacyOpts := Options{ + RepoDir: worktreeDir, + Branches: []string{"HEAD"}, + ResolveHEADToBranch: false, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repo"}, + IndexDir: indexDir, + }, + } + if updated, err := IndexGitRepo(legacyOpts); err != nil { + t.Fatalf("legacy IndexGitRepo: %v", err) + } else if !updated { + t.Fatal("legacy IndexGitRepo unexpectedly skipped indexing") + } + + resolvedOpts := legacyOpts + resolvedOpts.Incremental = true + resolvedOpts.ResolveHEADToBranch = true + + if updated, err := IndexGitRepo(resolvedOpts); err != nil { + t.Fatalf("resolved IndexGitRepo: %v", err) + } else if !updated { + t.Fatal("resolved IndexGitRepo should rebuild when legacy HEAD metadata resolves to a concrete branch") + } + + repo := indexedRepositoryForTest(t, indexDir, "repo") + if got, want := repositoryBranchNames(repo.Branches), []string{"worktree-branch"}; !cmp.Equal(got, want) { + t.Fatalf("indexed branch names mismatch (-want +got):\n%s", cmp.Diff(want, got)) + } +} + +func TestIndexGitRepo_ResolveHEADToBranch_DeltaFallsBackWhenCheckoutBranchChanges(t *testing.T) { + t.Parallel() + + repositoryDir := t.TempDir() + runGit(t, repositoryDir, "init", "-b", "feature-a") + + if err := os.WriteFile(filepath.Join(repositoryDir, "branch.txt"), []byte("feature-a-needle\n"), 0o644); err != nil { + t.Fatalf("WriteFile feature-a: %v", err) + } + runGit(t, repositoryDir, "add", "branch.txt") + runGit(t, repositoryDir, "commit", "-m", "feature-a") + + indexDir := t.TempDir() + opts := Options{ + RepoDir: repositoryDir, + Branches: []string{"HEAD"}, + ResolveHEADToBranch: true, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repo"}, + IndexDir: indexDir, + DisableCTags: true, + }, + } + if _, err := IndexGitRepo(opts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + runGit(t, repositoryDir, "checkout", "-b", "feature-b") + if err := os.WriteFile(filepath.Join(repositoryDir, "branch.txt"), []byte("feature-b-needle\n"), 0o644); err != nil { + t.Fatalf("WriteFile feature-b: %v", err) + } + runGit(t, repositoryDir, "add", "branch.txt") + runGit(t, repositoryDir, "commit", "-m", "feature-b") + + deltaOpts := opts + deltaOpts.BuildOptions.IsDelta = true + + deltaBuildCalled := false + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + + normalBuildCalled := false + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + if _, err := indexGitRepo(deltaOpts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }); err != nil { + t.Fatalf("delta IndexGitRepo: %v", err) + } + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if !normalBuildCalled { + t.Fatal("expected checkout branch change to fall back to normal build") + } + + repo := indexedRepositoryForTest(t, indexDir, "repo") + if got, want := repositoryBranchNames(repo.Branches), []string{"feature-b"}; !cmp.Equal(got, want) { + t.Fatalf("indexed branch names mismatch (-want +got):\n%s", cmp.Diff(want, got)) + } + + if got := searchFileNamesForTest(t, indexDir, "feature-a-needle"); len(got) != 0 { + t.Fatalf("feature-a content should not remain indexed after normal rebuild fallback, got %v", got) + } + if got, want := searchFileNamesForTest(t, indexDir, "feature-b-needle"), []string{"branch.txt"}; !cmp.Equal(got, want) { + t.Fatalf("feature-b search mismatch (-want +got):\n%s", cmp.Diff(want, got)) + } +} + +func TestIndexGitRepo_ResolveHEADToBranch_LegacyHEADDeltaMigrationThenDeltaNoop(t *testing.T) { + t.Parallel() + + repositoryDir := t.TempDir() + runGit(t, repositoryDir, "init", "-b", "feature-a") + + if err := os.WriteFile(filepath.Join(repositoryDir, "branch.txt"), []byte("feature-a-needle\n"), 0o644); err != nil { + t.Fatalf("WriteFile feature-a: %v", err) + } + runGit(t, repositoryDir, "add", "branch.txt") + runGit(t, repositoryDir, "commit", "-m", "feature-a") + + indexDir := t.TempDir() + legacyOpts := Options{ + RepoDir: repositoryDir, + Branches: []string{"HEAD"}, + ResolveHEADToBranch: false, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: "repo"}, + IndexDir: indexDir, + DisableCTags: true, + }, + } + if _, err := IndexGitRepo(legacyOpts); err != nil { + t.Fatalf("legacy IndexGitRepo: %v", err) + } + + runWithSpies := func(t *testing.T, opts Options) (deltaBuildCalled, normalBuildCalled bool) { + t.Helper() + + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + if _, err := indexGitRepo(opts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }); err != nil { + t.Fatalf("IndexGitRepo: %v", err) + } + + return deltaBuildCalled, normalBuildCalled + } + + resolvedDeltaOpts := legacyOpts + resolvedDeltaOpts.ResolveHEADToBranch = true + resolvedDeltaOpts.BuildOptions.IsDelta = true + + deltaBuildCalled, normalBuildCalled := runWithSpies(t, resolvedDeltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted for legacy HEAD migration") + } + if !normalBuildCalled { + t.Fatal("expected legacy HEAD metadata to fall back to normal build") + } + + repo := indexedRepositoryForTest(t, indexDir, "repo") + if got, want := repositoryBranchNames(repo.Branches), []string{"feature-a"}; !cmp.Equal(got, want) { + t.Fatalf("indexed branch names mismatch after migration (-want +got):\n%s", cmp.Diff(want, got)) + } + + deltaBuildCalled, normalBuildCalled = runWithSpies(t, resolvedDeltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted after migration") + } + if normalBuildCalled { + t.Fatal("expected unchanged resolved HEAD branch to stay on the delta path after migration") + } +} + func TestIndexGitRepo_DeltaAdmissionStatsV1WritesForwardAndUsesDeltaDebt(t *testing.T) { t.Parallel() From 3891b3534f2c3aa4a2820e23ffe1220bd171e35c Mon Sep 17 00:00:00 2001 From: Eugene Brevdo Date: Fri, 17 Apr 2026 10:14:52 -0700 Subject: [PATCH 03/10] Allow conservative branch-set delta updates Support delta indexing across branch-set changes by explicitly permitting branch metadata rewrites only when gitindex prepares a full-live delta layer. The conservative path tombstones all old live paths and re-adds the final live branch set, giving correct branch-filtered lookups before optimizing branch mappings. Co-authored-by: Codex --- gitindex/index.go | 198 +++++++++++++++++++++++++++++++++++++--------- index/builder.go | 8 +- 2 files changed, 167 insertions(+), 39 deletions(-) diff --git a/gitindex/index.go b/gitindex/index.go index 7d689ed3b..bd4946581 100644 --- a/gitindex/index.go +++ b/gitindex/index.go @@ -502,21 +502,58 @@ func validateDeltaAdmissionMode(mode string) error { } } -func expandBranches(repo *git.Repository, bs []string, prefix string) ([]string, error) { - var result []string - for _, b := range bs { - // Sourcegraph: We disable resolving refs. We want to return the exact ref - // requested so we can match it up. - if b == "HEAD" && false { - ref, err := repo.Head() - if err != nil { - return nil, err - } +func expandBranchesForOptions(repo *git.Repository, opts Options) ([]string, error) { + branches, err := expandBranches(repo, opts.Branches, opts.BranchPrefix) + if err != nil { + return nil, err + } + if !opts.ResolveHEADToBranch { + return branches, nil + } - result = append(result, strings.TrimPrefix(ref.Name().String(), prefix)) + headBranch, ok, err := resolveHEADBranchName(repo) + if err != nil { + return nil, err + } + if !ok { + return branches, nil + } + + for i, branch := range branches { + if branch == "HEAD" { + branches[i] = headBranch + } + } + return uniqPreserveOrder(branches), nil +} + +func resolveHEADBranchName(repo *git.Repository) (string, bool, error) { + ref, err := repo.Head() + if err != nil { + return "", false, err + } + if !ref.Name().IsBranch() { + return "", false, nil + } + return ref.Name().Short(), true, nil +} + +func uniqPreserveOrder(values []string) []string { + seen := make(map[string]struct{}, len(values)) + result := values[:0] + for _, value := range values { + if _, ok := seen[value]; ok { continue } + seen[value] = struct{}{} + result = append(result, value) + } + return result +} +func expandBranches(repo *git.Repository, bs []string, prefix string) ([]string, error) { + var result []string + for _, b := range bs { if strings.Contains(b, "*") { iter, err := repo.Branches() if err != nil { @@ -548,7 +585,7 @@ func expandBranches(repo *git.Repository, bs []string, prefix string) ([]string, result = append(result, b) } - return result, nil + return uniqPreserveOrder(result), nil } // IndexGitRepo indexes the git repository as specified by the options. @@ -603,7 +640,7 @@ func indexGitRepo(opts Options, config gitIndexConfig) (bool, error) { log.Printf("setTemplatesFromRepo(%s): %s", opts.RepoDir, err) } - branches, err := expandBranches(repo, opts.Branches, opts.BranchPrefix) + branches, err := expandBranchesForOptions(repo, opts) if err != nil { return false, fmt.Errorf("expandBranches: %w", err) } @@ -644,6 +681,10 @@ func indexGitRepo(opts Options, config gitIndexConfig) (bool, error) { var changedOrRemovedFiles []string if opts.BuildOptions.IsDelta { + allowDeltaBranchSetChange := false + if existingRepository, _, ok, err := opts.BuildOptions.FindRepositoryMetadata(); err == nil && ok { + allowDeltaBranchSetChange = !index.BranchNamesEqual(existingRepository.Branches, opts.BuildOptions.RepositoryDescription.Branches) + } repos, branchVersions, changedOrRemovedFiles, err = prepareDeltaBuild(opts, repo) if err != nil { log.Printf("delta build: falling back to normal build since delta build failed, repository=%q, err=%s", opts.BuildOptions.RepositoryDescription.Name, err) @@ -657,6 +698,9 @@ func indexGitRepo(opts Options, config gitIndexConfig) (bool, error) { opts.BuildOptions.RepositoryDescription.DeltaStats = stats } } + if opts.BuildOptions.IsDelta && allowDeltaBranchSetChange { + opts.BuildOptions.AllowDeltaBranchSetChange = true + } } if !opts.BuildOptions.IsDelta { @@ -1392,6 +1436,100 @@ func existingFileTombstones(opts index.Options) (map[string]struct{}, error) { return tombstones, nil } +func prepareBranchSetDeltaBuild(options Options, repository *git.Repository, existingRepository *zoekt.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + if err := validateBranchSetDelta(options, existingRepository); err != nil { + return nil, nil, nil, err + } + + repos, branchVersions, err = prepareNormalBuildRecurse(options, repository, nil, false) + if err != nil { + return nil, nil, nil, fmt.Errorf("preparing branch-set delta live files: %w", err) + } + + changedOrDeletedPaths, err = indexedFilePaths(options.BuildOptions) + if err != nil { + return nil, nil, nil, err + } + return repos, branchVersions, changedOrDeletedPaths, nil +} + +func validateBranchSetDelta(options Options, existingRepository *zoekt.Repository) error { + newBranches := repositoryBranchNamesForDelta(options.BuildOptions.RepositoryDescription.Branches) + if len(newBranches) != len(uniqPreserveOrder(append([]string(nil), newBranches...))) { + return fmt.Errorf("ambiguous branch mapping: duplicate requested branch names %q", strings.Join(newBranches, ", ")) + } + + oldBranches := repositoryBranchNamesForDelta(existingRepository.Branches) + if len(oldBranches) != len(uniqPreserveOrder(append([]string(nil), oldBranches...))) { + return fmt.Errorf("ambiguous branch mapping: duplicate existing branch names %q", strings.Join(oldBranches, ", ")) + } + + if branchNameSetOverlap(oldBranches, newBranches) { + return nil + } + if options.ResolveHEADToBranch && len(oldBranches) == 1 && len(newBranches) == 1 { + return nil + } + + return fmt.Errorf("ambiguous branch mapping: requested branch set %q has no stable branch in common with existing branch set %q", strings.Join(newBranches, ", "), strings.Join(oldBranches, ", ")) +} + +func repositoryBranchNamesForDelta(branches []zoekt.RepositoryBranch) []string { + names := make([]string, 0, len(branches)) + for _, branch := range branches { + names = append(names, branch.Name) + } + return names +} + +func branchNameSetOverlap(a, b []string) bool { + seen := make(map[string]struct{}, len(a)) + for _, name := range a { + seen[name] = struct{}{} + } + for _, name := range b { + if _, ok := seen[name]; ok { + return true + } + } + return false +} + +func indexedFilePaths(opts index.Options) ([]string, error) { + paths := make(map[string]struct{}) + for _, shard := range opts.FindAllShards() { + f, err := os.Open(shard) + if err != nil { + return nil, fmt.Errorf("opening shard %q: %w", shard, err) + } + indexFile, err := index.NewIndexFile(f) + if err != nil { + f.Close() + return nil, fmt.Errorf("opening index file %q: %w", shard, err) + } + searcher, err := index.NewSearcher(indexFile) + if err != nil { + indexFile.Close() + return nil, fmt.Errorf("opening searcher for %q: %w", shard, err) + } + result, err := searcher.Search(context.Background(), &query.Const{Value: true}, &zoekt.SearchOptions{Whole: true}) + searcher.Close() + if err != nil { + return nil, fmt.Errorf("listing indexed paths from shard %q: %w", shard, err) + } + for _, file := range result.Files { + paths[file.FileName] = struct{}{} + } + } + + result := make([]string, 0, len(paths)) + for path := range paths { + result = append(result, path) + } + sort.Strings(result) + return result, nil +} + func ensureStatsV1DeltaShardsSupported(opts index.Options) error { for _, shard := range opts.FindAllShards() { if strings.HasPrefix(filepath.Base(shard), "compound-") { @@ -1478,37 +1616,21 @@ func prepareDeltaBuild(options Options, repository *git.Repository) (repos map[f } } - // Check to see if the set of branch names is consistent with what we last indexed. - // If it isn't consistent, that we can't proceed with a delta build (and the caller should fall back to a - // normal one). - - if !index.BranchNamesEqual(existingRepository.Branches, options.BuildOptions.RepositoryDescription.Branches) { - var existingBranchNames []string - for _, b := range existingRepository.Branches { - existingBranchNames = append(existingBranchNames, b.Name) - } - - var optionsBranchNames []string - for _, b := range options.BuildOptions.RepositoryDescription.Branches { - optionsBranchNames = append(optionsBranchNames, b.Name) - } - - existingBranchList := strings.Join(existingBranchNames, ", ") - optionsBranchList := strings.Join(optionsBranchNames, ", ") - - return nil, nil, nil, fmt.Errorf("requested branch set in build options (%q) != branch set found on disk (%q) - branch set must be the same for delta shards", optionsBranchList, existingBranchList) - } - - // Check if the build options hash does not match the repository metadata's hash - // If it does not index then one or more index options has changed and will require a normal build instead of a delta build + // Check if the build options hash does not match the repository metadata's hash. + // If it does not match, one or more index options changed and require a + // normal build instead of a delta build. if options.BuildOptions.GetHash() != existingRepository.IndexOptions { return nil, nil, nil, fmt.Errorf("one or more index options previously stored for repository %s (ID: %d) does not match the index options for this requested build; These index option updates are incompatible with delta build. new index options: %+v", existingRepository.Name, existingRepository.ID, options.BuildOptions.HashOptions()) } + if !index.BranchNamesEqual(existingRepository.Branches, options.BuildOptions.RepositoryDescription.Branches) { + return prepareBranchSetDeltaBuild(options, repository, existingRepository) + } + // branch => (path, sha1) => repo. repos = map[fileKey]BlobLocation{} - branches, err := expandBranches(repository, options.Branches, options.BranchPrefix) + branches, err := expandBranchesForOptions(repository, options) if err != nil { return nil, nil, nil, fmt.Errorf("expandBranches: %w", err) } @@ -1656,7 +1778,7 @@ func prepareNormalBuildRecurse(options Options, repository *git.Repository, repo // Branch => Repo => SHA1 branchVersions = map[string]map[string]plumbing.Hash{} - branches, err := expandBranches(repository, options.Branches, options.BranchPrefix) + branches, err := expandBranchesForOptions(repository, options) if err != nil { return nil, nil, fmt.Errorf("expandBranches: %w", err) } diff --git a/index/builder.go b/index/builder.go index 47e7a7095..5fe4d0146 100644 --- a/index/builder.go +++ b/index/builder.go @@ -109,6 +109,12 @@ type Options struct { // in the older shards for this repository. changedOrRemovedFiles []string + // AllowDeltaBranchSetChange allows a delta build to rewrite existing shard + // metadata to a different branch set. Callers must only set this when the + // delta documents fully replace all live paths that could have old branch + // masks. + AllowDeltaBranchSetChange bool + LanguageMap ctags.LanguageMap // ShardMerging is true if builder should respect compound shards. This is a @@ -715,7 +721,7 @@ func (b *Builder) Finish() error { repository.FileTombstones[f] = struct{}{} } - if !BranchNamesEqual(repository.Branches, b.opts.RepositoryDescription.Branches) { + if !b.opts.AllowDeltaBranchSetChange && !BranchNamesEqual(repository.Branches, b.opts.RepositoryDescription.Branches) { return deltaBranchSetError{ shardName: shard, old: repository.Branches, From fafe6b57a2cbe8d35b37af7fb7708b793aab64ee Mon Sep 17 00:00:00 2001 From: Eugene Brevdo Date: Fri, 17 Apr 2026 10:22:43 -0700 Subject: [PATCH 04/10] Log branch mappings for delta admission Add branch-count and branch-mapping fields to stats-v1 admission JSONL records, and update HEAD resolution tests for the branch-set delta behavior and existing branch:HEAD alias semantics. Co-authored-by: Codex --- gitindex/delta_multibranch_head_query_test.go | 29 +------ gitindex/index.go | 77 +++++++++++++++++++ gitindex/index_test.go | 14 ++-- 3 files changed, 89 insertions(+), 31 deletions(-) diff --git a/gitindex/delta_multibranch_head_query_test.go b/gitindex/delta_multibranch_head_query_test.go index 6288549cc..22acbaa3a 100644 --- a/gitindex/delta_multibranch_head_query_test.go +++ b/gitindex/delta_multibranch_head_query_test.go @@ -70,11 +70,6 @@ func TestIndexGitRepo_DeltaHeadWorktreeSingleHEADResolvesBranchSwitch(t *testing branch: "feature-a", pattern: "feature-a-initial-needle", }) - headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ - name: "HEAD branch name not stored after resolution", - branch: "HEAD", - pattern: "feature-b-single-needle", - }) } func TestIndexGitRepo_DeltaHeadWorktreeMultiBranchHEADAndReleaseSwitchesHEADBranch(t *testing.T) { @@ -122,11 +117,6 @@ func TestIndexGitRepo_DeltaHeadWorktreeMultiBranchHEADAndReleaseSwitchesHEADBran branch: "feature-a", pattern: "feature-a-initial-needle", }) - headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ - name: "HEAD branch name not stored after resolution", - branch: "HEAD", - pattern: "feature-b-multi-needle", - }) } func TestIndexGitRepo_DeltaHeadWorktreeMultiBranchHEADResolvingToDuplicateReleaseFallsBack(t *testing.T) { @@ -148,8 +138,8 @@ func TestIndexGitRepo_DeltaHeadWorktreeMultiBranchHEADResolvingToDuplicateReleas if !deltaBuildCalled { t.Fatal("expected delta build to be attempted before conservative fallback") } - if !normalBuildCalled { - t.Fatal("expected HEAD resolving to duplicate release branch to fall back to a normal rebuild") + if normalBuildCalled { + t.Fatal("expected HEAD resolving to duplicate release branch to dedupe and stay on the delta path") } cleanIndexDir := headWorktreeCleanFullRebuild(t, deltaOpts) @@ -160,11 +150,6 @@ func TestIndexGitRepo_DeltaHeadWorktreeMultiBranchHEADResolvingToDuplicateReleas pattern: "release-initial-needle", wantFiles: []string{"release.txt"}, }) - headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ - name: "duplicate HEAD branch name not stored", - branch: "HEAD", - pattern: "release-initial-needle", - }) headWorktreeAssertLookup(t, indexDir, cleanIndexDir, headWorktreeRepoID, headWorktreeLookup{ name: "feature-a branch removed after fallback", branch: "feature-a", @@ -194,8 +179,8 @@ func TestIndexGitRepo_DeltaHeadWorktreeDetachedHEADInMultiBranchIndexFallsBack(t if !deltaBuildCalled { t.Fatal("expected delta build to be attempted before conservative fallback") } - if !normalBuildCalled { - t.Fatal("expected detached HEAD in a multi-branch index to fall back to a normal rebuild") + if normalBuildCalled { + t.Fatal("expected detached HEAD in a multi-branch index to stay on the delta path") } cleanIndexDir := headWorktreeCleanFullRebuild(t, deltaOpts) @@ -249,12 +234,6 @@ func TestIndexGitRepo_HeadWorktreeTwoLinkedWorktreesSameIndexDirKeepResolvedIden headWorktreeAssertSearchFileNames(t, indexDir, "worktree B not conflated with feature-a", query.NewAnd(query.NewSingleBranchesRepos("feature-a", headWorktreeLinkedRepoBID), &query.Const{Value: true}), nil) - headWorktreeAssertSearchFileNames(t, indexDir, "worktree A does not store HEAD", - query.NewAnd(query.NewSingleBranchesRepos("HEAD", headWorktreeLinkedRepoAID), &query.Const{Value: true}), - nil) - headWorktreeAssertSearchFileNames(t, indexDir, "worktree B does not store HEAD", - query.NewAnd(query.NewSingleBranchesRepos("HEAD", headWorktreeLinkedRepoBID), &query.Const{Value: true}), - nil) } type headWorktreeLookup struct { diff --git a/gitindex/index.go b/gitindex/index.go index bd4946581..3d8a367c6 100644 --- a/gitindex/index.go +++ b/gitindex/index.go @@ -1033,6 +1033,10 @@ type deltaAdmissionDecisionLogEntry struct { ExistingStatsPresent bool `json:"existing_stats_present"` + OldBranchCount uint64 `json:"old_branch_count"` + NewBranchCount uint64 `json:"new_branch_count"` + BranchMapping []deltaAdmissionBranchEntry `json:"branch_mapping,omitempty"` + CandidateIndexedBytes uint64 `json:"candidate_indexed_bytes"` CandidateDocumentCount uint64 `json:"candidate_document_count"` CandidatePathCount uint64 `json:"candidate_path_count"` @@ -1059,6 +1063,12 @@ type deltaAdmissionDecisionLogEntry struct { Thresholds DeltaAdmissionThresholds `json:"thresholds"` } +type deltaAdmissionBranchEntry struct { + Old string `json:"old,omitempty"` + New string `json:"new,omitempty"` + Kind string `json:"kind"` +} + func prepareDeltaStatsV1(options Options, repository *git.Repository, candidateRepos map[fileKey]BlobLocation, changedOrDeletedPaths []string) (*zoekt.RepositoryDeltaStats, error) { existingRepository, _, ok, err := options.BuildOptions.FindRepositoryMetadata() if err != nil { @@ -1132,6 +1142,10 @@ func prepareDeltaStatsV1(options Options, repository *git.Repository, candidateR ExistingStatsPresent: existingRepository.DeltaStats != nil, + OldBranchCount: uint64(len(existingRepository.Branches)), + NewBranchCount: uint64(len(options.BuildOptions.RepositoryDescription.Branches)), + BranchMapping: deltaAdmissionBranchMapping(existingRepository.Branches, options.BuildOptions.RepositoryDescription.Branches), + CandidateIndexedBytes: candidateStats.LiveIndexedBytes, CandidateDocumentCount: candidateStats.LiveDocumentCount, CandidatePathCount: candidateStats.LivePathCount, @@ -1238,6 +1252,69 @@ func singleBranchName(branches []zoekt.RepositoryBranch) string { return branches[0].Name } +func deltaAdmissionBranchMapping(oldBranches, newBranches []zoekt.RepositoryBranch) []deltaAdmissionBranchEntry { + oldNames := repositoryBranchNamesForDelta(oldBranches) + newNames := repositoryBranchNamesForDelta(newBranches) + usedOld := make([]bool, len(oldNames)) + usedNew := make([]bool, len(newNames)) + mapping := make([]deltaAdmissionBranchEntry, 0, max(len(oldNames), len(newNames))) + + for i, oldName := range oldNames { + for j, newName := range newNames { + if usedNew[j] || oldName != newName { + continue + } + usedOld[i] = true + usedNew[j] = true + mapping = append(mapping, deltaAdmissionBranchEntry{ + Old: oldName, + New: newName, + Kind: "exact", + }) + break + } + } + + for i, oldName := range oldNames { + if usedOld[i] { + continue + } + newIndex := -1 + for j := range newNames { + if !usedNew[j] { + newIndex = j + break + } + } + if newIndex == -1 { + mapping = append(mapping, deltaAdmissionBranchEntry{ + Old: oldName, + Kind: "removed", + }) + continue + } + usedOld[i] = true + usedNew[newIndex] = true + mapping = append(mapping, deltaAdmissionBranchEntry{ + Old: oldName, + New: newNames[newIndex], + Kind: "renamed", + }) + } + + for j, newName := range newNames { + if usedNew[j] { + continue + } + mapping = append(mapping, deltaAdmissionBranchEntry{ + New: newName, + Kind: "added", + }) + } + + return mapping +} + func finiteRatio(n, d uint64) *float64 { r := ratio(n, d) if math.IsInf(r, 0) || math.IsNaN(r) { diff --git a/gitindex/index_test.go b/gitindex/index_test.go index 3766bb016..c951e541e 100644 --- a/gitindex/index_test.go +++ b/gitindex/index_test.go @@ -354,7 +354,7 @@ func TestIndexGitRepo_ResolveHEADToBranch_MigratesLegacyHEADOnIncremental(t *tes } } -func TestIndexGitRepo_ResolveHEADToBranch_DeltaFallsBackWhenCheckoutBranchChanges(t *testing.T) { +func TestIndexGitRepo_ResolveHEADToBranch_DeltaHandlesCheckoutBranchChanges(t *testing.T) { t.Parallel() repositoryDir := t.TempDir() @@ -412,8 +412,8 @@ func TestIndexGitRepo_ResolveHEADToBranch_DeltaFallsBackWhenCheckoutBranchChange if !deltaBuildCalled { t.Fatal("expected delta build to be attempted") } - if !normalBuildCalled { - t.Fatal("expected checkout branch change to fall back to normal build") + if normalBuildCalled { + t.Fatal("expected checkout branch change to stay on the delta path") } repo := indexedRepositoryForTest(t, indexDir, "repo") @@ -422,7 +422,7 @@ func TestIndexGitRepo_ResolveHEADToBranch_DeltaFallsBackWhenCheckoutBranchChange } if got := searchFileNamesForTest(t, indexDir, "feature-a-needle"); len(got) != 0 { - t.Fatalf("feature-a content should not remain indexed after normal rebuild fallback, got %v", got) + t.Fatalf("feature-a content should not remain indexed after delta branch switch, got %v", got) } if got, want := searchFileNamesForTest(t, indexDir, "feature-b-needle"), []string{"branch.txt"}; !cmp.Equal(got, want) { t.Fatalf("feature-b search mismatch (-want +got):\n%s", cmp.Diff(want, got)) @@ -487,8 +487,8 @@ func TestIndexGitRepo_ResolveHEADToBranch_LegacyHEADDeltaMigrationThenDeltaNoop( if !deltaBuildCalled { t.Fatal("expected delta build to be attempted for legacy HEAD migration") } - if !normalBuildCalled { - t.Fatal("expected legacy HEAD metadata to fall back to normal build") + if normalBuildCalled { + t.Fatal("expected legacy HEAD metadata migration to stay on the delta path") } repo := indexedRepositoryForTest(t, indexDir, "repo") @@ -923,6 +923,7 @@ func initGitWorktree(t *testing.T, fileName, content string) (string, string) { t.Fatalf("WriteFile: %v", err) } runGit(t, repoDir, "config", "remote.origin.url", "git@github.com:sourcegraph/zoekt.git") + runGit(t, repoDir, "config", "zoekt.name", "repo") runGit(t, repoDir, "add", ".") runGit(t, repoDir, "commit", "-m", "initial commit") @@ -937,6 +938,7 @@ func cloneBareRepo(t *testing.T, repoDir string) string { bareDir := filepath.Join(t.TempDir(), "repo.git") runGit(t, filepath.Dir(repoDir), "clone", "--bare", repoDir, bareDir) + runGit(t, bareDir, "config", "zoekt.name", "repo") return bareDir } From 0abbc9c54ad74e64a4a87811247e505ac11be2ca Mon Sep 17 00:00:00 2001 From: Eugene Brevdo Date: Fri, 17 Apr 2026 10:35:30 -0700 Subject: [PATCH 05/10] Gate branch-set delta updates behind option Add an explicit gitindex option for branch-set-changing delta updates and restore the default branch-set mismatch fallback. Desired-future branch-set tests opt in, while ResolveHEADToBranch continues to govern HEAD naming behavior. Co-authored-by: Codex --- gitindex/delta_multibranch_branchset_test.go | 27 ++++---- gitindex/delta_multibranch_head_query_test.go | 12 ++-- .../delta_multibranch_safety_stats_test.go | 14 +++-- gitindex/index.go | 61 +++++++++++++------ gitindex/index_test.go | 8 ++- 5 files changed, 77 insertions(+), 45 deletions(-) diff --git a/gitindex/delta_multibranch_branchset_test.go b/gitindex/delta_multibranch_branchset_test.go index 8ec10a857..4bb85a7d0 100644 --- a/gitindex/delta_multibranch_branchset_test.go +++ b/gitindex/delta_multibranch_branchset_test.go @@ -230,8 +230,9 @@ type singleRenameSearchHit struct { func singleRenameOptions(repoDir, indexDir string, branches []string) Options { return Options{ - RepoDir: filepath.Join(repoDir, ".git"), - Branches: append([]string(nil), branches...), + RepoDir: filepath.Join(repoDir, ".git"), + Branches: append([]string(nil), branches...), + AllowDeltaBranchSetChange: true, BuildOptions: index.Options{ IndexDir: indexDir, RepositoryDescription: zoekt.Repository{ @@ -849,8 +850,9 @@ func multiRenameInitRepository(t *testing.T, branchFiles map[string]map[string]s func multiRenameIndexOptions(repositoryDir, indexDir string, branches []string) Options { return Options{ - RepoDir: filepath.Join(repositoryDir, ".git"), - Branches: append([]string(nil), branches...), + RepoDir: filepath.Join(repositoryDir, ".git"), + Branches: append([]string(nil), branches...), + AllowDeltaBranchSetChange: true, BuildOptions: index.Options{ RepositoryDescription: zoekt.Repository{Name: "repository"}, IndexDir: indexDir, @@ -1277,9 +1279,10 @@ func addBranchWriteCommit(t *testing.T, repoDir string, files map[string]string, func addBranchOptions(repoDir, indexDir string, branches []string) Options { return Options{ - RepoDir: filepath.Join(repoDir, ".git"), - Branches: branches, - DeltaAdmissionMode: DeltaAdmissionModeStatsV1, + RepoDir: filepath.Join(repoDir, ".git"), + Branches: branches, + AllowDeltaBranchSetChange: true, + DeltaAdmissionMode: DeltaAdmissionModeStatsV1, DeltaAdmissionThresholds: DeltaAdmissionThresholds{ MaxDeltaIndexedBytesRatio: 100, MaxPhysicalLiveBytesRatio: 100, @@ -1654,8 +1657,9 @@ func removeBranchRunScenario(t *testing.T, scenario removeBranchScenario) { func removeBranchOptions(repoDir, indexDir string, branches []string) Options { return Options{ - RepoDir: filepath.Join(repoDir, ".git"), - Branches: append([]string(nil), branches...), + RepoDir: filepath.Join(repoDir, ".git"), + Branches: append([]string(nil), branches...), + AllowDeltaBranchSetChange: true, BuildOptions: index.Options{ RepositoryDescription: zoekt.Repository{ ID: removeBranchRepoID, @@ -2131,8 +2135,9 @@ func combinedBranchRunIndex(t *testing.T, repoDir, indexDir string, branches []s t.Helper() opts := Options{ - RepoDir: filepath.Join(repoDir, ".git"), - Branches: append([]string(nil), branches...), + RepoDir: filepath.Join(repoDir, ".git"), + Branches: append([]string(nil), branches...), + AllowDeltaBranchSetChange: true, BuildOptions: index.Options{ RepositoryDescription: zoekt.Repository{ ID: combinedBranchRepoID, diff --git a/gitindex/delta_multibranch_head_query_test.go b/gitindex/delta_multibranch_head_query_test.go index 22acbaa3a..e2a3a37e6 100644 --- a/gitindex/delta_multibranch_head_query_test.go +++ b/gitindex/delta_multibranch_head_query_test.go @@ -300,9 +300,10 @@ func headWorktreeInitLinkedWorktrees(t *testing.T) (repoDir, worktreeA, worktree func headWorktreeOptions(repoDir, indexDir, repoName string, repoID uint32, branches []string) Options { return Options{ - RepoDir: repoDir, - Branches: append([]string(nil), branches...), - ResolveHEADToBranch: true, + RepoDir: repoDir, + Branches: append([]string(nil), branches...), + ResolveHEADToBranch: true, + AllowDeltaBranchSetChange: true, BuildOptions: index.Options{ RepositoryDescription: zoekt.Repository{ ID: repoID, @@ -752,8 +753,9 @@ func querySurfaceWriteAndCommitFile(t *testing.T, repoDir, name, content, messag func querySurfaceOptions(repoDir, indexDir string, branches []string) Options { return Options{ - RepoDir: filepath.Join(repoDir, ".git"), - Branches: append([]string(nil), branches...), + RepoDir: filepath.Join(repoDir, ".git"), + Branches: append([]string(nil), branches...), + AllowDeltaBranchSetChange: true, BuildOptions: index.Options{ IndexDir: indexDir, RepositoryDescription: zoekt.Repository{ diff --git a/gitindex/delta_multibranch_safety_stats_test.go b/gitindex/delta_multibranch_safety_stats_test.go index 8e21cd38c..7337e3a29 100644 --- a/gitindex/delta_multibranch_safety_stats_test.go +++ b/gitindex/delta_multibranch_safety_stats_test.go @@ -329,9 +329,10 @@ func TestIndexGitRepo_DeltaCompoundShardBeforeBranchSetChangeFallsBack(t *testin func ambiguityOptions(repoDir, indexDir string, branches []string) Options { return Options{ - RepoDir: filepath.Join(repoDir, ".git"), - Branches: append([]string(nil), branches...), - DeltaAdmissionMode: DeltaAdmissionModeStatsV1, + RepoDir: filepath.Join(repoDir, ".git"), + Branches: append([]string(nil), branches...), + AllowDeltaBranchSetChange: true, + DeltaAdmissionMode: DeltaAdmissionModeStatsV1, DeltaAdmissionThresholds: DeltaAdmissionThresholds{ MaxDeltaIndexedBytesRatio: 100, MaxPhysicalLiveBytesRatio: 100, @@ -882,9 +883,10 @@ func statsBranchRunIndex(t *testing.T, repoDir, indexDir string, branches []stri func statsBranchOptions(repoDir, indexDir string, branches []string) Options { return Options{ - RepoDir: filepath.Join(repoDir, ".git"), - Branches: append([]string(nil), branches...), - DeltaAdmissionMode: DeltaAdmissionModeStatsV1, + RepoDir: filepath.Join(repoDir, ".git"), + Branches: append([]string(nil), branches...), + AllowDeltaBranchSetChange: true, + DeltaAdmissionMode: DeltaAdmissionModeStatsV1, DeltaAdmissionThresholds: DeltaAdmissionThresholds{ MaxDeltaIndexedBytesRatio: 100, MaxPhysicalLiveBytesRatio: 100, diff --git a/gitindex/index.go b/gitindex/index.go index 3d8a367c6..25bd470a2 100644 --- a/gitindex/index.go +++ b/gitindex/index.go @@ -394,10 +394,16 @@ type Options struct { // List of branch names to index, e.g. []string{"HEAD", "stable"} Branches []string - // ResolveHEADToBranch is reserved for a separate HEAD/worktree fix. It is - // intentionally not used by the delta admission logic. + // ResolveHEADToBranch resolves an attached HEAD to its short branch name in + // indexed metadata. Detached HEAD remains HEAD. ResolveHEADToBranch bool + // AllowDeltaBranchSetChange allows delta indexing when the requested branch + // set differs from the existing index metadata. The conservative + // implementation rewrites old shard metadata and tombstones all old live + // paths before adding a full live delta layer for the new branch set. + AllowDeltaBranchSetChange bool + // DeltaShardNumberFallbackThreshold defines an upper limit (inclusive) on the number of preexisting shards // that can exist before attempting another delta build. If the number of preexisting shards exceeds this threshold, // then a normal build will be performed instead. @@ -507,24 +513,23 @@ func expandBranchesForOptions(repo *git.Repository, opts Options) ([]string, err if err != nil { return nil, err } - if !opts.ResolveHEADToBranch { - return branches, nil - } - - headBranch, ok, err := resolveHEADBranchName(repo) - if err != nil { - return nil, err - } - if !ok { - return branches, nil - } - - for i, branch := range branches { - if branch == "HEAD" { - branches[i] = headBranch + if opts.ResolveHEADToBranch { + headBranch, ok, err := resolveHEADBranchName(repo) + if err != nil { + return nil, err + } + if ok { + for i, branch := range branches { + if branch == "HEAD" { + branches[i] = headBranch + } + } } } - return uniqPreserveOrder(branches), nil + if opts.ResolveHEADToBranch || opts.AllowDeltaBranchSetChange { + return uniqPreserveOrder(branches), nil + } + return branches, nil } func resolveHEADBranchName(repo *git.Repository) (string, bool, error) { @@ -585,7 +590,7 @@ func expandBranches(repo *git.Repository, bs []string, prefix string) ([]string, result = append(result, b) } - return uniqPreserveOrder(result), nil + return result, nil } // IndexGitRepo indexes the git repository as specified by the options. @@ -683,7 +688,7 @@ func indexGitRepo(opts Options, config gitIndexConfig) (bool, error) { if opts.BuildOptions.IsDelta { allowDeltaBranchSetChange := false if existingRepository, _, ok, err := opts.BuildOptions.FindRepositoryMetadata(); err == nil && ok { - allowDeltaBranchSetChange = !index.BranchNamesEqual(existingRepository.Branches, opts.BuildOptions.RepositoryDescription.Branches) + allowDeltaBranchSetChange = opts.AllowDeltaBranchSetChange && !index.BranchNamesEqual(existingRepository.Branches, opts.BuildOptions.RepositoryDescription.Branches) } repos, branchVersions, changedOrRemovedFiles, err = prepareDeltaBuild(opts, repo) if err != nil { @@ -1701,6 +1706,22 @@ func prepareDeltaBuild(options Options, repository *git.Repository) (repos map[f } if !index.BranchNamesEqual(existingRepository.Branches, options.BuildOptions.RepositoryDescription.Branches) { + if !options.AllowDeltaBranchSetChange { + var existingBranchNames []string + for _, b := range existingRepository.Branches { + existingBranchNames = append(existingBranchNames, b.Name) + } + + var optionsBranchNames []string + for _, b := range options.BuildOptions.RepositoryDescription.Branches { + optionsBranchNames = append(optionsBranchNames, b.Name) + } + + existingBranchList := strings.Join(existingBranchNames, ", ") + optionsBranchList := strings.Join(optionsBranchNames, ", ") + + return nil, nil, nil, fmt.Errorf("requested branch set in build options (%q) != branch set found on disk (%q) - branch set must be the same for delta shards", optionsBranchList, existingBranchList) + } return prepareBranchSetDeltaBuild(options, repository, existingRepository) } diff --git a/gitindex/index_test.go b/gitindex/index_test.go index c951e541e..983a1ebd8 100644 --- a/gitindex/index_test.go +++ b/gitindex/index_test.go @@ -368,9 +368,10 @@ func TestIndexGitRepo_ResolveHEADToBranch_DeltaHandlesCheckoutBranchChanges(t *t indexDir := t.TempDir() opts := Options{ - RepoDir: repositoryDir, - Branches: []string{"HEAD"}, - ResolveHEADToBranch: true, + RepoDir: repositoryDir, + Branches: []string{"HEAD"}, + ResolveHEADToBranch: true, + AllowDeltaBranchSetChange: true, BuildOptions: index.Options{ RepositoryDescription: zoekt.Repository{Name: "repo"}, IndexDir: indexDir, @@ -481,6 +482,7 @@ func TestIndexGitRepo_ResolveHEADToBranch_LegacyHEADDeltaMigrationThenDeltaNoop( resolvedDeltaOpts := legacyOpts resolvedDeltaOpts.ResolveHEADToBranch = true + resolvedDeltaOpts.AllowDeltaBranchSetChange = true resolvedDeltaOpts.BuildOptions.IsDelta = true deltaBuildCalled, normalBuildCalled := runWithSpies(t, resolvedDeltaOpts) From aab69184949668f31da52d69d54a5f0f34575b85 Mon Sep 17 00:00:00 2001 From: Eugene Brevdo Date: Fri, 17 Apr 2026 10:49:13 -0700 Subject: [PATCH 06/10] Treat branch-set delta flag as provenance Document and test that AllowDeltaBranchSetChange is the explicit opt-in provenance for unmatched branch-slot updates. With the flag set, unmatched branch sets use the conservative full-live delta path; structurally unsafe cases remain guarded separately. Co-authored-by: Codex --- TEST_LIST.md | 24 ++++++++++++------- .../delta_multibranch_safety_stats_test.go | 20 +++++++--------- gitindex/index.go | 22 +---------------- 3 files changed, 24 insertions(+), 42 deletions(-) diff --git a/TEST_LIST.md b/TEST_LIST.md index 4cdf9f4ba..db06974fe 100644 --- a/TEST_LIST.md +++ b/TEST_LIST.md @@ -2,6 +2,8 @@ Goal: define failing tests that should pass once delta indexing can update multi-branch indexes across branch-set changes. These tests should verify that the delta path is actually used and that branch-filtered lookups return the same results as a clean full rebuild. +Branch-set-changing deltas are explicitly opt-in via `gitindex.Options.AllowDeltaBranchSetChange`. With that option enabled, unmatched old/new branch slots are allowed to use the conservative full-live delta path; the option itself is the caller's provenance that branch-set changes are intentional. Without that option, branch-set mismatches should preserve the old behavior and fall back to a normal rebuild. + ## Test Harness Expectations - Each test should build an initial full index, mutate the Git repo, request a delta build, and assert the delta build path was used. @@ -84,7 +86,7 @@ Goal: define failing tests that should pass once delta indexing can update multi 12. Rename branch order changes at the same time. - Initial: `[feature-a, release, qa-a]` - Final: `[qa-b, feature-b, release]` - - Expect: delta used only if mapping is explicit and unambiguous; branch-filtered search must match clean rebuild exactly. + - Expect: delta used when `AllowDeltaBranchSetChange` is set; branch-filtered search must match clean rebuild exactly. ## Add Branches @@ -202,25 +204,28 @@ Goal: define failing tests that should pass once delta indexing can update multi ## Ambiguity And Safety Tests -33. Ambiguous rename without provenance. +33. Unmatched old/new branch names with branch-set delta opt-in. - Initial: `[foo, bar]` - Final: `[baz, bar]` - - If no metadata can prove `foo -> baz`, expect full rebuild or explicit error, not silent arbitrary mapping. + - With `AllowDeltaBranchSetChange`, expect delta used; final results match clean rebuild and branch mapping is logged. + - Without `AllowDeltaBranchSetChange`, expect old behavior: full rebuild fallback. -34. Many-to-one branch rename. +34. Many-to-one branch update with branch-set delta opt-in. - Initial: `[a, b]` - Final: `[c]` - - Expect: full rebuild unless a precise merge mapping is supported. + - With `AllowDeltaBranchSetChange`, expect delta used via the conservative full-live path; final results match clean rebuild. + - Without the flag, expect fallback. -35. One-to-many branch split. +35. One-to-many branch split with branch-set delta opt-in. - Initial: `[a]` - Final: `[b, c]` - - Expect: delta used only if both new branches can be compared to old `a`; otherwise full rebuild. + - With `AllowDeltaBranchSetChange`, expect delta used via the conservative full-live path; final results match clean rebuild. + - Without the flag, expect fallback. 36. Duplicate final branch names after wildcard expansion. - Initial: `[main]` - Final request expands to `[main, main]` - - Expect: deterministic dedupe or full rebuild; never duplicate branch masks. + - Expect deterministic dedupe when branch-set changes are allowed, or full rebuild otherwise; never duplicate branch masks. 37. Branch removed and re-added with the same name but unrelated history. - Initial: `[release]` at old lineage @@ -251,7 +256,8 @@ Goal: define failing tests that should pass once delta indexing can update multi - Expect: JSONL contains mapping summary, accepted=true, old/new branch counts, and cost metrics. 45. Admission log for fallback due ambiguous mapping. - - Expect: accepted=false with a specific reason such as `ambiguous branch mapping`, not a generic branch-list mismatch. + - Expect `accepted=true` for unmatched branch mappings when `AllowDeltaBranchSetChange` is set and the conservative delta path is safe. + - Keep `accepted=false` fallback logging for structurally unsafe cases such as missing commits or unsupported compound shards. ## Query Surfaces To Exercise diff --git a/gitindex/delta_multibranch_safety_stats_test.go b/gitindex/delta_multibranch_safety_stats_test.go index 7337e3a29..ab1ebb416 100644 --- a/gitindex/delta_multibranch_safety_stats_test.go +++ b/gitindex/delta_multibranch_safety_stats_test.go @@ -46,7 +46,6 @@ func TestIndexGitRepo_DeltaMultiBranchAmbiguityFallsBack(t *testing.T) { cleanBranches []string compareBranches []string patterns []string - requireFallback bool }{ { name: "ambiguous rename without provenance", @@ -63,7 +62,6 @@ func TestIndexGitRepo_DeltaMultiBranchAmbiguityFallsBack(t *testing.T) { finalBranches: []string{"baz", "bar"}, compareBranches: []string{"foo", "baz", "bar"}, patterns: []string{"foo-original-needle", "bar-stable-needle"}, - requireFallback: true, }, { name: "many-to-one branch rename", @@ -81,7 +79,6 @@ func TestIndexGitRepo_DeltaMultiBranchAmbiguityFallsBack(t *testing.T) { finalBranches: []string{"c"}, compareBranches: []string{"a", "b", "c"}, patterns: []string{"a-original-needle", "b-original-needle"}, - requireFallback: true, }, { name: "one-to-many branch split", @@ -103,7 +100,6 @@ func TestIndexGitRepo_DeltaMultiBranchAmbiguityFallsBack(t *testing.T) { finalBranches: []string{"b", "c"}, compareBranches: []string{"a", "b", "c"}, patterns: []string{"split-shared-needle", "split-c-original-needle", "split-c-final-needle"}, - requireFallback: true, }, { name: "duplicate final branch names after wildcard expansion", @@ -141,8 +137,8 @@ func TestIndexGitRepo_DeltaMultiBranchAmbiguityFallsBack(t *testing.T) { if !deltaBuildCalled { t.Fatal("expected delta build to be attempted before selecting the safe fallback") } - if tc.requireFallback && !normalBuildCalled { - t.Fatalf("expected ambiguous branch mapping to fall back to a normal rebuild instead of accepting an arbitrary delta mapping") + if normalBuildCalled { + t.Fatalf("expected unmatched branch mapping to stay on the delta path when branch-set deltas are allowed") } cleanBranches := tc.cleanBranches @@ -812,7 +808,7 @@ func TestIndexGitRepo_DeltaMultiBranchStatsAdmissionLogAcceptedBranchSetDelta(t statsBranchAssertJSONPresent(t, entry, "physical_live_ratio") } -func TestIndexGitRepo_DeltaMultiBranchStatsAdmissionLogAmbiguousMappingFallback(t *testing.T) { +func TestIndexGitRepo_DeltaMultiBranchStatsAdmissionLogUnmatchedBranchMappingAccepted(t *testing.T) { t.Parallel() repoDir := statsBranchCreateRepository(t, []string{"foo", "bar"}, map[string]statsBranchFiles{ @@ -832,10 +828,10 @@ func TestIndexGitRepo_DeltaMultiBranchStatsAdmissionLogAmbiguousMappingFallback( deltaCalled, normalCalled := statsBranchRunIndex(t, repoDir, indexDir, []string{"baz", "bar"}, true, logPath) if !deltaCalled { - t.Error("expected ambiguous branch mapping to attempt a delta build") + t.Error("expected unmatched branch mapping to attempt a delta build") } - if !normalCalled { - t.Error("expected ambiguous branch mapping to fall back to a normal rebuild") + if normalCalled { + t.Error("expected unmatched branch mapping to stay on the delta path when branch-set deltas are allowed") } entries := statsBranchReadAdmissionLogObjects(t, logPath) @@ -843,8 +839,8 @@ func TestIndexGitRepo_DeltaMultiBranchStatsAdmissionLogAmbiguousMappingFallback( t.Fatalf("got %d admission log entries, want 1", len(entries)) } entry := entries[0] - statsBranchAssertJSONBool(t, entry, "accepted", false) - statsBranchAssertJSONStringContains(t, entry, "reason", "ambiguous branch mapping") + statsBranchAssertJSONBool(t, entry, "accepted", true) + statsBranchAssertJSONString(t, entry, "reason", "accepted") statsBranchAssertJSONNumber(t, entry, "old_branch_count", 2) statsBranchAssertJSONNumber(t, entry, "new_branch_count", 2) statsBranchAssertJSONPresent(t, entry, "branch_mapping") diff --git a/gitindex/index.go b/gitindex/index.go index 25bd470a2..39a023621 100644 --- a/gitindex/index.go +++ b/gitindex/index.go @@ -1546,14 +1546,7 @@ func validateBranchSetDelta(options Options, existingRepository *zoekt.Repositor return fmt.Errorf("ambiguous branch mapping: duplicate existing branch names %q", strings.Join(oldBranches, ", ")) } - if branchNameSetOverlap(oldBranches, newBranches) { - return nil - } - if options.ResolveHEADToBranch && len(oldBranches) == 1 && len(newBranches) == 1 { - return nil - } - - return fmt.Errorf("ambiguous branch mapping: requested branch set %q has no stable branch in common with existing branch set %q", strings.Join(newBranches, ", "), strings.Join(oldBranches, ", ")) + return nil } func repositoryBranchNamesForDelta(branches []zoekt.RepositoryBranch) []string { @@ -1564,19 +1557,6 @@ func repositoryBranchNamesForDelta(branches []zoekt.RepositoryBranch) []string { return names } -func branchNameSetOverlap(a, b []string) bool { - seen := make(map[string]struct{}, len(a)) - for _, name := range a { - seen[name] = struct{}{} - } - for _, name := range b { - if _, ok := seen[name]; ok { - return true - } - } - return false -} - func indexedFilePaths(opts index.Options) ([]string, error) { paths := make(map[string]struct{}) for _, shard := range opts.FindAllShards() { From 52edd13b455fa14bb90145d287dbb6e179fec885 Mon Sep 17 00:00:00 2001 From: Eugene Brevdo Date: Fri, 17 Apr 2026 11:00:42 -0700 Subject: [PATCH 07/10] Fix delta stats proto roundtrip Preserve RepositoryDeltaStats across webserver protobuf conversion and keep the worktree test helper from overriding origin-derived repository identity. Co-authored-by: Codex --- api_proto.go | 34 + gitindex/index_test.go | 13 +- .../protos/zoekt/webserver/v1/webserver.pb.go | 953 ++++++++++-------- .../protos/zoekt/webserver/v1/webserver.proto | 13 + 4 files changed, 603 insertions(+), 410 deletions(-) diff --git a/api_proto.go b/api_proto.go index 3a6c6a947..fde3f8004 100644 --- a/api_proto.go +++ b/api_proto.go @@ -417,6 +417,38 @@ func (r *RepositoryBranch) ToProto() *webserverv1.RepositoryBranch { } } +func RepositoryDeltaStatsFromProto(p *webserverv1.RepositoryDeltaStats) *RepositoryDeltaStats { + if p == nil { + return nil + } + + return &RepositoryDeltaStats{ + LiveIndexedBytes: p.GetLiveIndexedBytes(), + LiveDocumentCount: p.GetLiveDocumentCount(), + LivePathCount: p.GetLivePathCount(), + PhysicalIndexedBytes: p.GetPhysicalIndexedBytes(), + PhysicalDocumentCount: p.GetPhysicalDocumentCount(), + TombstonePathCount: p.GetTombstonePathCount(), + DeltaLayerCount: p.GetDeltaLayerCount(), + } +} + +func (s *RepositoryDeltaStats) ToProto() *webserverv1.RepositoryDeltaStats { + if s == nil { + return nil + } + + return &webserverv1.RepositoryDeltaStats{ + LiveIndexedBytes: s.LiveIndexedBytes, + LiveDocumentCount: s.LiveDocumentCount, + LivePathCount: s.LivePathCount, + PhysicalIndexedBytes: s.PhysicalIndexedBytes, + PhysicalDocumentCount: s.PhysicalDocumentCount, + TombstonePathCount: s.TombstonePathCount, + DeltaLayerCount: s.DeltaLayerCount, + } +} + func RepositoryFromProto(p *webserverv1.Repository) Repository { branches := make([]RepositoryBranch, len(p.GetBranches())) for i, branch := range p.GetBranches() { @@ -454,6 +486,7 @@ func RepositoryFromProto(p *webserverv1.Repository) Repository { LatestCommitDate: p.GetLatestCommitDate().AsTime(), FileTombstones: fileTombstones, Metadata: p.GetMetadata(), + DeltaStats: RepositoryDeltaStatsFromProto(p.GetDeltaStats()), } } @@ -497,6 +530,7 @@ func (r *Repository) ToProto() *webserverv1.Repository { LatestCommitDate: timestamppb.New(r.LatestCommitDate), FileTombstones: fileTombstones, Metadata: r.Metadata, + DeltaStats: r.DeltaStats.ToProto(), } } diff --git a/gitindex/index_test.go b/gitindex/index_test.go index 983a1ebd8..eb3fb0420 100644 --- a/gitindex/index_test.go +++ b/gitindex/index_test.go @@ -40,6 +40,8 @@ import ( "github.com/sourcegraph/zoekt/search" ) +const initGitWorktreeRepoName = "github.com/sourcegraph/zoekt" + func TestIndexEmptyRepo(t *testing.T) { t.Parallel() @@ -193,7 +195,7 @@ func TestIndexGitRepo_ResolveHEADToBranch_Worktree(t *testing.T) { t.Fatalf("IndexGitRepo(worktree): %v", err) } - repo := indexedRepositoryForTest(t, indexDir, "repo") + repo := indexedRepositoryForTest(t, indexDir, initGitWorktreeRepoName) if got, want := repositoryBranchNames(repo.Branches), []string{"worktree-branch"}; !cmp.Equal(got, want) { t.Fatalf("indexed branch names mismatch (-want +got):\n%s", cmp.Diff(want, got)) } @@ -219,7 +221,7 @@ func TestIndexGitRepo_ResolveHEADToBranch_DisabledPreservesHEAD(t *testing.T) { t.Fatalf("IndexGitRepo(worktree): %v", err) } - repo := indexedRepositoryForTest(t, indexDir, "repo") + repo := indexedRepositoryForTest(t, indexDir, initGitWorktreeRepoName) if got, want := repositoryBranchNames(repo.Branches), []string{"HEAD"}; !cmp.Equal(got, want) { t.Fatalf("indexed branch names mismatch (-want +got):\n%s", cmp.Diff(want, got)) } @@ -246,7 +248,7 @@ func TestIndexGitRepo_ResolveHEADToBranch_DetachedHEADPreservesHEAD(t *testing.T t.Fatalf("IndexGitRepo(detached HEAD): %v", err) } - repo := indexedRepositoryForTest(t, indexDir, "repo") + repo := indexedRepositoryForTest(t, indexDir, initGitWorktreeRepoName) if got, want := repositoryBranchNames(repo.Branches), []string{"HEAD"}; !cmp.Equal(got, want) { t.Fatalf("indexed branch names mismatch (-want +got):\n%s", cmp.Diff(want, got)) } @@ -309,7 +311,7 @@ func TestIndexGitRepo_ResolveHEADToBranch_DedupesResolvedBranch(t *testing.T) { t.Fatalf("IndexGitRepo(%v): %v", tc.branches, err) } - repo := indexedRepositoryForTest(t, indexDir, "repo") + repo := indexedRepositoryForTest(t, indexDir, initGitWorktreeRepoName) if got, want := repositoryBranchNames(repo.Branches), []string{"worktree-branch"}; !cmp.Equal(got, want) { t.Fatalf("indexed branch names mismatch (-want +got):\n%s", cmp.Diff(want, got)) } @@ -348,7 +350,7 @@ func TestIndexGitRepo_ResolveHEADToBranch_MigratesLegacyHEADOnIncremental(t *tes t.Fatal("resolved IndexGitRepo should rebuild when legacy HEAD metadata resolves to a concrete branch") } - repo := indexedRepositoryForTest(t, indexDir, "repo") + repo := indexedRepositoryForTest(t, indexDir, initGitWorktreeRepoName) if got, want := repositoryBranchNames(repo.Branches), []string{"worktree-branch"}; !cmp.Equal(got, want) { t.Fatalf("indexed branch names mismatch (-want +got):\n%s", cmp.Diff(want, got)) } @@ -925,7 +927,6 @@ func initGitWorktree(t *testing.T, fileName, content string) (string, string) { t.Fatalf("WriteFile: %v", err) } runGit(t, repoDir, "config", "remote.origin.url", "git@github.com:sourcegraph/zoekt.git") - runGit(t, repoDir, "config", "zoekt.name", "repo") runGit(t, repoDir, "add", ".") runGit(t, repoDir, "commit", "-m", "initial commit") diff --git a/grpc/protos/zoekt/webserver/v1/webserver.pb.go b/grpc/protos/zoekt/webserver/v1/webserver.pb.go index 2c4b47b88..bc2091f9d 100644 --- a/grpc/protos/zoekt/webserver/v1/webserver.pb.go +++ b/grpc/protos/zoekt/webserver/v1/webserver.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.29.1 -// protoc (unknown) +// protoc v6.33.0 // source: zoekt/webserver/v1/webserver.proto package v1 @@ -815,6 +815,8 @@ type Repository struct { TenantId int64 `protobuf:"varint,18,opt,name=tenant_id,json=tenantId,proto3" json:"tenant_id,omitempty"` // Additional metadata about the repository. Metadata map[string]string `protobuf:"bytes,19,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Advisory statistics used by experimental delta-index admission rules. + DeltaStats *RepositoryDeltaStats `protobuf:"bytes,20,opt,name=delta_stats,json=deltaStats,proto3" json:"delta_stats,omitempty"` } func (x *Repository) Reset() { @@ -982,6 +984,108 @@ func (x *Repository) GetMetadata() map[string]string { return nil } +func (x *Repository) GetDeltaStats() *RepositoryDeltaStats { + if x != nil { + return x.DeltaStats + } + return nil +} + +type RepositoryDeltaStats struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + LiveIndexedBytes uint64 `protobuf:"varint,1,opt,name=live_indexed_bytes,json=liveIndexedBytes,proto3" json:"live_indexed_bytes,omitempty"` + LiveDocumentCount uint64 `protobuf:"varint,2,opt,name=live_document_count,json=liveDocumentCount,proto3" json:"live_document_count,omitempty"` + LivePathCount uint64 `protobuf:"varint,3,opt,name=live_path_count,json=livePathCount,proto3" json:"live_path_count,omitempty"` + PhysicalIndexedBytes uint64 `protobuf:"varint,4,opt,name=physical_indexed_bytes,json=physicalIndexedBytes,proto3" json:"physical_indexed_bytes,omitempty"` + PhysicalDocumentCount uint64 `protobuf:"varint,5,opt,name=physical_document_count,json=physicalDocumentCount,proto3" json:"physical_document_count,omitempty"` + TombstonePathCount uint64 `protobuf:"varint,6,opt,name=tombstone_path_count,json=tombstonePathCount,proto3" json:"tombstone_path_count,omitempty"` + DeltaLayerCount uint64 `protobuf:"varint,7,opt,name=delta_layer_count,json=deltaLayerCount,proto3" json:"delta_layer_count,omitempty"` +} + +func (x *RepositoryDeltaStats) Reset() { + *x = RepositoryDeltaStats{} + if protoimpl.UnsafeEnabled { + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RepositoryDeltaStats) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RepositoryDeltaStats) ProtoMessage() {} + +func (x *RepositoryDeltaStats) ProtoReflect() protoreflect.Message { + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RepositoryDeltaStats.ProtoReflect.Descriptor instead. +func (*RepositoryDeltaStats) Descriptor() ([]byte, []int) { + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{10} +} + +func (x *RepositoryDeltaStats) GetLiveIndexedBytes() uint64 { + if x != nil { + return x.LiveIndexedBytes + } + return 0 +} + +func (x *RepositoryDeltaStats) GetLiveDocumentCount() uint64 { + if x != nil { + return x.LiveDocumentCount + } + return 0 +} + +func (x *RepositoryDeltaStats) GetLivePathCount() uint64 { + if x != nil { + return x.LivePathCount + } + return 0 +} + +func (x *RepositoryDeltaStats) GetPhysicalIndexedBytes() uint64 { + if x != nil { + return x.PhysicalIndexedBytes + } + return 0 +} + +func (x *RepositoryDeltaStats) GetPhysicalDocumentCount() uint64 { + if x != nil { + return x.PhysicalDocumentCount + } + return 0 +} + +func (x *RepositoryDeltaStats) GetTombstonePathCount() uint64 { + if x != nil { + return x.TombstonePathCount + } + return 0 +} + +func (x *RepositoryDeltaStats) GetDeltaLayerCount() uint64 { + if x != nil { + return x.DeltaLayerCount + } + return 0 +} + type IndexMetadata struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -1000,7 +1104,7 @@ type IndexMetadata struct { func (x *IndexMetadata) Reset() { *x = IndexMetadata{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[10] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1013,7 +1117,7 @@ func (x *IndexMetadata) String() string { func (*IndexMetadata) ProtoMessage() {} func (x *IndexMetadata) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[10] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1026,7 +1130,7 @@ func (x *IndexMetadata) ProtoReflect() protoreflect.Message { // Deprecated: Use IndexMetadata.ProtoReflect.Descriptor instead. func (*IndexMetadata) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{10} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{11} } func (x *IndexMetadata) GetIndexFormatVersion() int64 { @@ -1098,7 +1202,7 @@ type MinimalRepoListEntry struct { func (x *MinimalRepoListEntry) Reset() { *x = MinimalRepoListEntry{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[11] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1111,7 +1215,7 @@ func (x *MinimalRepoListEntry) String() string { func (*MinimalRepoListEntry) ProtoMessage() {} func (x *MinimalRepoListEntry) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[11] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1124,7 +1228,7 @@ func (x *MinimalRepoListEntry) ProtoReflect() protoreflect.Message { // Deprecated: Use MinimalRepoListEntry.ProtoReflect.Descriptor instead. func (*MinimalRepoListEntry) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{11} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{12} } func (x *MinimalRepoListEntry) GetHasSymbols() bool { @@ -1162,7 +1266,7 @@ type RepositoryBranch struct { func (x *RepositoryBranch) Reset() { *x = RepositoryBranch{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[12] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1175,7 +1279,7 @@ func (x *RepositoryBranch) String() string { func (*RepositoryBranch) ProtoMessage() {} func (x *RepositoryBranch) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[12] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1188,7 +1292,7 @@ func (x *RepositoryBranch) ProtoReflect() protoreflect.Message { // Deprecated: Use RepositoryBranch.ProtoReflect.Descriptor instead. func (*RepositoryBranch) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{12} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{13} } func (x *RepositoryBranch) GetName() string { @@ -1241,7 +1345,7 @@ type RepoStats struct { func (x *RepoStats) Reset() { *x = RepoStats{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[13] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1254,7 +1358,7 @@ func (x *RepoStats) String() string { func (*RepoStats) ProtoMessage() {} func (x *RepoStats) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[13] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1267,7 +1371,7 @@ func (x *RepoStats) ProtoReflect() protoreflect.Message { // Deprecated: Use RepoStats.ProtoReflect.Descriptor instead. func (*RepoStats) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{13} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{14} } func (x *RepoStats) GetRepos() int64 { @@ -1381,7 +1485,7 @@ type Stats struct { func (x *Stats) Reset() { *x = Stats{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[14] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1394,7 +1498,7 @@ func (x *Stats) String() string { func (*Stats) ProtoMessage() {} func (x *Stats) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[14] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1407,7 +1511,7 @@ func (x *Stats) ProtoReflect() protoreflect.Message { // Deprecated: Use Stats.ProtoReflect.Descriptor instead. func (*Stats) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{14} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{15} } func (x *Stats) GetContentBytesLoaded() int64 { @@ -1571,7 +1675,7 @@ type Progress struct { func (x *Progress) Reset() { *x = Progress{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[15] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1584,7 +1688,7 @@ func (x *Progress) String() string { func (*Progress) ProtoMessage() {} func (x *Progress) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[15] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1597,7 +1701,7 @@ func (x *Progress) ProtoReflect() protoreflect.Message { // Deprecated: Use Progress.ProtoReflect.Descriptor instead. func (*Progress) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{15} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{16} } func (x *Progress) GetPriority() float64 { @@ -1659,7 +1763,7 @@ type FileMatch struct { func (x *FileMatch) Reset() { *x = FileMatch{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[16] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1672,7 +1776,7 @@ func (x *FileMatch) String() string { func (*FileMatch) ProtoMessage() {} func (x *FileMatch) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[16] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1685,7 +1789,7 @@ func (x *FileMatch) ProtoReflect() protoreflect.Message { // Deprecated: Use FileMatch.ProtoReflect.Descriptor instead. func (*FileMatch) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{16} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{17} } func (x *FileMatch) GetScore() float64 { @@ -1817,7 +1921,7 @@ type LineMatch struct { func (x *LineMatch) Reset() { *x = LineMatch{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[17] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1830,7 +1934,7 @@ func (x *LineMatch) String() string { func (*LineMatch) ProtoMessage() {} func (x *LineMatch) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[17] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1843,7 +1947,7 @@ func (x *LineMatch) ProtoReflect() protoreflect.Message { // Deprecated: Use LineMatch.ProtoReflect.Descriptor instead. func (*LineMatch) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{17} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{18} } func (x *LineMatch) GetLine() []byte { @@ -1933,7 +2037,7 @@ type LineFragmentMatch struct { func (x *LineFragmentMatch) Reset() { *x = LineFragmentMatch{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[18] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1946,7 +2050,7 @@ func (x *LineFragmentMatch) String() string { func (*LineFragmentMatch) ProtoMessage() {} func (x *LineFragmentMatch) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[18] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1959,7 +2063,7 @@ func (x *LineFragmentMatch) ProtoReflect() protoreflect.Message { // Deprecated: Use LineFragmentMatch.ProtoReflect.Descriptor instead. func (*LineFragmentMatch) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{18} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{19} } func (x *LineFragmentMatch) GetLineOffset() int64 { @@ -2004,7 +2108,7 @@ type SymbolInfo struct { func (x *SymbolInfo) Reset() { *x = SymbolInfo{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[19] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2017,7 +2121,7 @@ func (x *SymbolInfo) String() string { func (*SymbolInfo) ProtoMessage() {} func (x *SymbolInfo) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[19] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2030,7 +2134,7 @@ func (x *SymbolInfo) ProtoReflect() protoreflect.Message { // Deprecated: Use SymbolInfo.ProtoReflect.Descriptor instead. func (*SymbolInfo) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{19} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{20} } func (x *SymbolInfo) GetSym() string { @@ -2089,7 +2193,7 @@ type ChunkMatch struct { func (x *ChunkMatch) Reset() { *x = ChunkMatch{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[20] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2102,7 +2206,7 @@ func (x *ChunkMatch) String() string { func (*ChunkMatch) ProtoMessage() {} func (x *ChunkMatch) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[20] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2115,7 +2219,7 @@ func (x *ChunkMatch) ProtoReflect() protoreflect.Message { // Deprecated: Use ChunkMatch.ProtoReflect.Descriptor instead. func (*ChunkMatch) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{20} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{21} } func (x *ChunkMatch) GetContent() []byte { @@ -2188,7 +2292,7 @@ type Range struct { func (x *Range) Reset() { *x = Range{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[21] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2201,7 +2305,7 @@ func (x *Range) String() string { func (*Range) ProtoMessage() {} func (x *Range) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[21] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2214,7 +2318,7 @@ func (x *Range) ProtoReflect() protoreflect.Message { // Deprecated: Use Range.ProtoReflect.Descriptor instead. func (*Range) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{21} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{22} } func (x *Range) GetStart() *Location { @@ -2247,7 +2351,7 @@ type Location struct { func (x *Location) Reset() { *x = Location{} if protoimpl.UnsafeEnabled { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[22] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2260,7 +2364,7 @@ func (x *Location) String() string { func (*Location) ProtoMessage() {} func (x *Location) ProtoReflect() protoreflect.Message { - mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[22] + mi := &file_zoekt_webserver_v1_webserver_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2273,7 +2377,7 @@ func (x *Location) ProtoReflect() protoreflect.Message { // Deprecated: Use Location.ProtoReflect.Descriptor instead. func (*Location) Descriptor() ([]byte, []int) { - return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{22} + return file_zoekt_webserver_v1_webserver_proto_rawDescGZIP(), []int{23} } func (x *Location) GetByteOffset() uint32 { @@ -2439,7 +2543,7 @@ var file_zoekt_webserver_v1_webserver_proto_rawDesc = []byte{ 0x74, 0x61, 0x12, 0x33, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x53, 0x74, 0x61, 0x74, 0x73, - 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x73, 0x22, 0x96, 0x08, 0x0a, 0x0a, 0x52, 0x65, 0x70, 0x6f, + 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x73, 0x22, 0xe1, 0x08, 0x0a, 0x0a, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, @@ -2491,292 +2595,319 @@ var file_zoekt_webserver_v1_webserver_proto_rawDesc = []byte{ 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x1a, 0x5d, 0x0a, 0x0f, 0x53, 0x75, 0x62, 0x52, 0x65, 0x70, - 0x6f, 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x7a, 0x6f, 0x65, - 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, - 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x3c, 0x0a, 0x0e, 0x52, 0x61, 0x77, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x02, 0x38, 0x01, 0x1a, 0x3b, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x49, 0x0a, 0x0b, 0x64, 0x65, 0x6c, 0x74, 0x61, 0x5f, + 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x7a, 0x6f, + 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, + 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x44, 0x65, 0x6c, 0x74, 0x61, + 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, 0x0a, 0x64, 0x65, 0x6c, 0x74, 0x61, 0x53, 0x74, 0x61, 0x74, + 0x73, 0x1a, 0x5d, 0x0a, 0x0f, 0x53, 0x75, 0x62, 0x52, 0x65, 0x70, 0x6f, 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, - 0x22, 0xd6, 0x03, 0x0a, 0x0d, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x12, 0x30, 0x0a, 0x14, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x66, 0x6f, 0x72, 0x6d, - 0x61, 0x74, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, - 0x52, 0x12, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x56, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x32, 0x0a, 0x15, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x66, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x03, 0x52, 0x13, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x37, 0x0a, 0x18, 0x69, 0x6e, 0x64, 0x65, - 0x78, 0x5f, 0x6d, 0x69, 0x6e, 0x5f, 0x72, 0x65, 0x61, 0x64, 0x65, 0x72, 0x5f, 0x76, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x15, 0x69, 0x6e, 0x64, 0x65, - 0x78, 0x4d, 0x69, 0x6e, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x12, 0x39, 0x0a, 0x0a, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, - 0x70, 0x52, 0x09, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x1f, 0x0a, 0x0b, - 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x5f, 0x61, 0x73, 0x63, 0x69, 0x69, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x0a, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x41, 0x73, 0x63, 0x69, 0x69, 0x12, 0x55, 0x0a, - 0x0c, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x18, 0x06, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, - 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x4d, - 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0b, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, - 0x65, 0x4d, 0x61, 0x70, 0x12, 0x23, 0x0a, 0x0d, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x5f, 0x76, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x7a, 0x6f, 0x65, - 0x6b, 0x74, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, - 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x1a, 0x3e, 0x0a, 0x10, 0x4c, 0x61, 0x6e, - 0x67, 0x75, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, - 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, - 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xa1, 0x01, 0x0a, 0x14, 0x4d, 0x69, - 0x6e, 0x69, 0x6d, 0x61, 0x6c, 0x52, 0x65, 0x70, 0x6f, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x1f, 0x0a, 0x0b, 0x68, 0x61, 0x73, 0x5f, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, - 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x68, 0x61, 0x73, 0x53, 0x79, 0x6d, 0x62, - 0x6f, 0x6c, 0x73, 0x12, 0x40, 0x0a, 0x08, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x18, - 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, + 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, - 0x69, 0x74, 0x6f, 0x72, 0x79, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x08, 0x62, 0x72, 0x61, - 0x6e, 0x63, 0x68, 0x65, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x74, - 0x69, 0x6d, 0x65, 0x5f, 0x75, 0x6e, 0x69, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0d, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x54, 0x69, 0x6d, 0x65, 0x55, 0x6e, 0x69, 0x78, 0x22, 0x40, 0x0a, - 0x10, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x42, 0x72, 0x61, 0x6e, 0x63, - 0x68, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, - 0xcd, 0x02, 0x0a, 0x09, 0x52, 0x65, 0x70, 0x6f, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x14, 0x0a, - 0x05, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x72, 0x65, - 0x70, 0x6f, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x68, 0x61, 0x72, 0x64, 0x73, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x03, 0x52, 0x06, 0x73, 0x68, 0x61, 0x72, 0x64, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x64, - 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, - 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6e, 0x64, - 0x65, 0x78, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x03, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, - 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x77, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x75, - 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0d, 0x6e, 0x65, 0x77, 0x4c, 0x69, 0x6e, - 0x65, 0x73, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x42, 0x0a, 0x1e, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x5f, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x5f, 0x6e, 0x65, 0x77, 0x5f, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, - 0x1a, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x4e, 0x65, - 0x77, 0x4c, 0x69, 0x6e, 0x65, 0x73, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x42, 0x0a, 0x1e, 0x6f, - 0x74, 0x68, 0x65, 0x72, 0x5f, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x5f, 0x6e, 0x65, - 0x77, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x08, 0x20, - 0x01, 0x28, 0x04, 0x52, 0x1a, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, - 0x65, 0x73, 0x4e, 0x65, 0x77, 0x4c, 0x69, 0x6e, 0x65, 0x73, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, - 0xa9, 0x07, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x30, 0x0a, 0x14, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x6f, 0x61, 0x64, 0x65, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x12, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x42, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x12, 0x2c, 0x0a, 0x12, 0x69, - 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x6f, 0x61, 0x64, 0x65, - 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x10, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x42, 0x79, - 0x74, 0x65, 0x73, 0x4c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x72, 0x61, - 0x73, 0x68, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x63, 0x72, 0x61, 0x73, - 0x68, 0x65, 0x73, 0x12, 0x35, 0x0a, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1d, 0x0a, 0x0a, 0x66, 0x69, - 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, - 0x66, 0x69, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x34, 0x0a, 0x16, 0x73, 0x68, 0x61, - 0x72, 0x64, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, - 0x72, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x14, 0x73, 0x68, 0x61, 0x72, 0x64, - 0x46, 0x69, 0x6c, 0x65, 0x73, 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x65, 0x64, 0x12, - 0x29, 0x0a, 0x10, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, - 0x72, 0x65, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0f, 0x66, 0x69, 0x6c, 0x65, 0x73, - 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x65, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x66, 0x69, - 0x6c, 0x65, 0x73, 0x5f, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x03, - 0x52, 0x0b, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x4c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x12, 0x23, 0x0a, - 0x0d, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x73, 0x6b, 0x69, 0x70, 0x70, 0x65, 0x64, 0x18, 0x09, - 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x53, 0x6b, 0x69, 0x70, 0x70, - 0x65, 0x64, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x68, 0x61, 0x72, 0x64, 0x73, 0x5f, 0x73, 0x63, 0x61, - 0x6e, 0x6e, 0x65, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0d, 0x73, 0x68, 0x61, 0x72, - 0x64, 0x73, 0x53, 0x63, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x68, 0x61, - 0x72, 0x64, 0x73, 0x5f, 0x73, 0x6b, 0x69, 0x70, 0x70, 0x65, 0x64, 0x18, 0x0b, 0x20, 0x01, 0x28, - 0x03, 0x52, 0x0d, 0x73, 0x68, 0x61, 0x72, 0x64, 0x73, 0x53, 0x6b, 0x69, 0x70, 0x70, 0x65, 0x64, - 0x12, 0x32, 0x0a, 0x15, 0x73, 0x68, 0x61, 0x72, 0x64, 0x73, 0x5f, 0x73, 0x6b, 0x69, 0x70, 0x70, - 0x65, 0x64, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x03, 0x52, - 0x13, 0x73, 0x68, 0x61, 0x72, 0x64, 0x73, 0x53, 0x6b, 0x69, 0x70, 0x70, 0x65, 0x64, 0x46, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x63, 0x6f, - 0x75, 0x6e, 0x74, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x6d, 0x61, 0x74, 0x63, 0x68, - 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x6e, 0x67, 0x72, 0x61, 0x6d, 0x5f, 0x6d, - 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, 0x6e, 0x67, - 0x72, 0x61, 0x6d, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x04, 0x77, 0x61, - 0x69, 0x74, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x77, 0x61, 0x69, 0x74, 0x12, 0x51, 0x0a, 0x17, 0x6d, 0x61, 0x74, - 0x63, 0x68, 0x5f, 0x74, 0x72, 0x65, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x15, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x54, 0x72, 0x65, 0x65, - 0x43, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x45, 0x0a, 0x11, - 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x74, 0x72, 0x65, 0x65, 0x5f, 0x73, 0x65, 0x61, 0x72, 0x63, - 0x68, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x52, 0x0f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x54, 0x72, 0x65, 0x65, 0x53, 0x65, 0x61, - 0x72, 0x63, 0x68, 0x12, 0x2d, 0x0a, 0x12, 0x72, 0x65, 0x67, 0x65, 0x78, 0x70, 0x73, 0x5f, 0x63, - 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x65, 0x64, 0x18, 0x10, 0x20, 0x01, 0x28, 0x03, 0x52, - 0x11, 0x72, 0x65, 0x67, 0x65, 0x78, 0x70, 0x73, 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, - 0x65, 0x64, 0x12, 0x42, 0x0a, 0x0c, 0x66, 0x6c, 0x75, 0x73, 0x68, 0x5f, 0x72, 0x65, 0x61, 0x73, - 0x6f, 0x6e, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1f, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, - 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x46, 0x6c, - 0x75, 0x73, 0x68, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x52, 0x0b, 0x66, 0x6c, 0x75, 0x73, 0x68, - 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x6e, 0x67, 0x72, 0x61, 0x6d, 0x5f, - 0x6c, 0x6f, 0x6f, 0x6b, 0x75, 0x70, 0x73, 0x18, 0x12, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, 0x6e, - 0x67, 0x72, 0x61, 0x6d, 0x4c, 0x6f, 0x6f, 0x6b, 0x75, 0x70, 0x73, 0x22, 0x58, 0x0a, 0x08, 0x50, - 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, - 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, - 0x69, 0x74, 0x79, 0x12, 0x30, 0x0a, 0x14, 0x6d, 0x61, 0x78, 0x5f, 0x70, 0x65, 0x6e, 0x64, 0x69, - 0x6e, 0x67, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x01, 0x52, 0x12, 0x6d, 0x61, 0x78, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x50, 0x72, 0x69, - 0x6f, 0x72, 0x69, 0x74, 0x79, 0x22, 0xb9, 0x04, 0x0a, 0x09, 0x46, 0x69, 0x6c, 0x65, 0x4d, 0x61, - 0x74, 0x63, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x64, 0x65, 0x62, - 0x75, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x64, 0x65, 0x62, 0x75, 0x67, 0x12, - 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, - 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0a, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x1a, 0x0a, 0x08, - 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, - 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x12, 0x40, 0x0a, 0x0c, 0x6c, 0x69, 0x6e, 0x65, - 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, + 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, + 0x1a, 0x3c, 0x0a, 0x0e, 0x52, 0x61, 0x77, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x3b, + 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xe8, 0x02, 0x0a, 0x14, + 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x44, 0x65, 0x6c, 0x74, 0x61, 0x53, + 0x74, 0x61, 0x74, 0x73, 0x12, 0x2c, 0x0a, 0x12, 0x6c, 0x69, 0x76, 0x65, 0x5f, 0x69, 0x6e, 0x64, + 0x65, 0x78, 0x65, 0x64, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, + 0x52, 0x10, 0x6c, 0x69, 0x76, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x65, 0x64, 0x42, 0x79, 0x74, + 0x65, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x6c, 0x69, 0x76, 0x65, 0x5f, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, + 0x11, 0x6c, 0x69, 0x76, 0x65, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x75, + 0x6e, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x6c, 0x69, 0x76, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x5f, + 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0d, 0x6c, 0x69, 0x76, + 0x65, 0x50, 0x61, 0x74, 0x68, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x34, 0x0a, 0x16, 0x70, 0x68, + 0x79, 0x73, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x65, 0x64, 0x5f, 0x62, + 0x79, 0x74, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x14, 0x70, 0x68, 0x79, 0x73, + 0x69, 0x63, 0x61, 0x6c, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, + 0x12, 0x36, 0x0a, 0x17, 0x70, 0x68, 0x79, 0x73, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x64, 0x6f, 0x63, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x04, 0x52, 0x15, 0x70, 0x68, 0x79, 0x73, 0x69, 0x63, 0x61, 0x6c, 0x44, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x30, 0x0a, 0x14, 0x74, 0x6f, 0x6d, 0x62, + 0x73, 0x74, 0x6f, 0x6e, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x52, 0x12, 0x74, 0x6f, 0x6d, 0x62, 0x73, 0x74, 0x6f, 0x6e, + 0x65, 0x50, 0x61, 0x74, 0x68, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2a, 0x0a, 0x11, 0x64, 0x65, + 0x6c, 0x74, 0x61, 0x5f, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0f, 0x64, 0x65, 0x6c, 0x74, 0x61, 0x4c, 0x61, 0x79, 0x65, + 0x72, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xd6, 0x03, 0x0a, 0x0d, 0x49, 0x6e, 0x64, 0x65, 0x78, + 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x30, 0x0a, 0x14, 0x69, 0x6e, 0x64, 0x65, + 0x78, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x12, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x46, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x32, 0x0a, 0x15, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x5f, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x13, 0x69, 0x6e, 0x64, 0x65, 0x78, + 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x37, + 0x0a, 0x18, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x6d, 0x69, 0x6e, 0x5f, 0x72, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x15, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4d, 0x69, 0x6e, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x39, 0x0a, 0x0a, 0x69, 0x6e, 0x64, 0x65, 0x78, + 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x54, 0x69, + 0x6d, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x5f, 0x61, 0x73, 0x63, 0x69, + 0x69, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x70, 0x6c, 0x61, 0x69, 0x6e, 0x41, 0x73, + 0x63, 0x69, 0x69, 0x12, 0x55, 0x0a, 0x0c, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x5f, + 0x6d, 0x61, 0x70, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, + 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x49, + 0x6e, 0x64, 0x65, 0x78, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4c, 0x61, 0x6e, + 0x67, 0x75, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0b, 0x6c, + 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x70, 0x12, 0x23, 0x0a, 0x0d, 0x7a, 0x6f, + 0x65, 0x6b, 0x74, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0c, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, + 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x1a, + 0x3e, 0x0a, 0x10, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x70, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, + 0xa1, 0x01, 0x0a, 0x14, 0x4d, 0x69, 0x6e, 0x69, 0x6d, 0x61, 0x6c, 0x52, 0x65, 0x70, 0x6f, 0x4c, + 0x69, 0x73, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x1f, 0x0a, 0x0b, 0x68, 0x61, 0x73, 0x5f, + 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x68, + 0x61, 0x73, 0x53, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x73, 0x12, 0x40, 0x0a, 0x08, 0x62, 0x72, 0x61, + 0x6e, 0x63, 0x68, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x7a, 0x6f, + 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, + 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x42, 0x72, 0x61, 0x6e, 0x63, + 0x68, 0x52, 0x08, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x69, + 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x75, 0x6e, 0x69, 0x78, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x0d, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x54, 0x69, 0x6d, 0x65, 0x55, + 0x6e, 0x69, 0x78, 0x22, 0x40, 0x0a, 0x10, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, + 0x79, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, + 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xcd, 0x02, 0x0a, 0x09, 0x52, 0x65, 0x70, 0x6f, 0x53, 0x74, + 0x61, 0x74, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x05, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x68, 0x61, + 0x72, 0x64, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x73, 0x68, 0x61, 0x72, 0x64, + 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, + 0x1f, 0x0a, 0x0b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x42, 0x79, 0x74, 0x65, 0x73, + 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x5f, 0x62, 0x79, 0x74, 0x65, + 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x77, 0x5f, 0x6c, 0x69, 0x6e, + 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0d, + 0x6e, 0x65, 0x77, 0x4c, 0x69, 0x6e, 0x65, 0x73, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x42, 0x0a, + 0x1e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x5f, + 0x6e, 0x65, 0x77, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x1a, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x42, 0x72, + 0x61, 0x6e, 0x63, 0x68, 0x4e, 0x65, 0x77, 0x4c, 0x69, 0x6e, 0x65, 0x73, 0x43, 0x6f, 0x75, 0x6e, + 0x74, 0x12, 0x42, 0x0a, 0x1e, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x5f, 0x62, 0x72, 0x61, 0x6e, 0x63, + 0x68, 0x65, 0x73, 0x5f, 0x6e, 0x65, 0x77, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x5f, 0x63, 0x6f, + 0x75, 0x6e, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x52, 0x1a, 0x6f, 0x74, 0x68, 0x65, 0x72, + 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x4e, 0x65, 0x77, 0x4c, 0x69, 0x6e, 0x65, 0x73, + 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xa9, 0x07, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, + 0x30, 0x0a, 0x14, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, + 0x5f, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x12, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x42, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x6f, 0x61, 0x64, 0x65, + 0x64, 0x12, 0x2c, 0x0a, 0x12, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, + 0x5f, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x10, 0x69, + 0x6e, 0x64, 0x65, 0x78, 0x42, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x12, + 0x18, 0x0a, 0x07, 0x63, 0x72, 0x61, 0x73, 0x68, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x07, 0x63, 0x72, 0x61, 0x73, 0x68, 0x65, 0x73, 0x12, 0x35, 0x0a, 0x08, 0x64, 0x75, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x1d, 0x0a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, + 0x34, 0x0a, 0x16, 0x73, 0x68, 0x61, 0x72, 0x64, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x63, + 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x14, 0x73, 0x68, 0x61, 0x72, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x43, 0x6f, 0x6e, 0x73, 0x69, + 0x64, 0x65, 0x72, 0x65, 0x64, 0x12, 0x29, 0x0a, 0x10, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x63, + 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x65, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x0f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x65, 0x64, + 0x12, 0x21, 0x0a, 0x0c, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, + 0x18, 0x08, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x4c, 0x6f, 0x61, + 0x64, 0x65, 0x64, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x73, 0x6b, 0x69, + 0x70, 0x70, 0x65, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, 0x66, 0x69, 0x6c, 0x65, + 0x73, 0x53, 0x6b, 0x69, 0x70, 0x70, 0x65, 0x64, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x68, 0x61, 0x72, + 0x64, 0x73, 0x5f, 0x73, 0x63, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x0d, 0x73, 0x68, 0x61, 0x72, 0x64, 0x73, 0x53, 0x63, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x12, + 0x25, 0x0a, 0x0e, 0x73, 0x68, 0x61, 0x72, 0x64, 0x73, 0x5f, 0x73, 0x6b, 0x69, 0x70, 0x70, 0x65, + 0x64, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0d, 0x73, 0x68, 0x61, 0x72, 0x64, 0x73, 0x53, + 0x6b, 0x69, 0x70, 0x70, 0x65, 0x64, 0x12, 0x32, 0x0a, 0x15, 0x73, 0x68, 0x61, 0x72, 0x64, 0x73, + 0x5f, 0x73, 0x6b, 0x69, 0x70, 0x70, 0x65, 0x64, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, + 0x0c, 0x20, 0x01, 0x28, 0x03, 0x52, 0x13, 0x73, 0x68, 0x61, 0x72, 0x64, 0x73, 0x53, 0x6b, 0x69, + 0x70, 0x70, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x61, + 0x74, 0x63, 0x68, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x0a, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x6e, + 0x67, 0x72, 0x61, 0x6d, 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x18, 0x0e, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x0c, 0x6e, 0x67, 0x72, 0x61, 0x6d, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, + 0x12, 0x2d, 0x0a, 0x04, 0x77, 0x61, 0x69, 0x74, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x77, 0x61, 0x69, 0x74, 0x12, + 0x51, 0x0a, 0x17, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x74, 0x72, 0x65, 0x65, 0x5f, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x15, 0x6d, 0x61, 0x74, + 0x63, 0x68, 0x54, 0x72, 0x65, 0x65, 0x43, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x12, 0x45, 0x0a, 0x11, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x74, 0x72, 0x65, 0x65, + 0x5f, 0x73, 0x65, 0x61, 0x72, 0x63, 0x68, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x54, + 0x72, 0x65, 0x65, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x12, 0x2d, 0x0a, 0x12, 0x72, 0x65, 0x67, + 0x65, 0x78, 0x70, 0x73, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x65, 0x64, 0x18, + 0x10, 0x20, 0x01, 0x28, 0x03, 0x52, 0x11, 0x72, 0x65, 0x67, 0x65, 0x78, 0x70, 0x73, 0x43, 0x6f, + 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x65, 0x64, 0x12, 0x42, 0x0a, 0x0c, 0x66, 0x6c, 0x75, 0x73, + 0x68, 0x5f, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1f, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, - 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x6e, 0x65, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x52, 0x0b, 0x6c, - 0x69, 0x6e, 0x65, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0d, 0x63, 0x68, - 0x75, 0x6e, 0x6b, 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x1e, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, - 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x4d, 0x61, 0x74, 0x63, - 0x68, 0x52, 0x0c, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x12, - 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x69, 0x64, - 0x18, 0x08, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, - 0x72, 0x79, 0x49, 0x64, 0x12, 0x2f, 0x0a, 0x13, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, - 0x72, 0x79, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x09, 0x20, 0x01, 0x28, - 0x01, 0x52, 0x12, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x50, 0x72, 0x69, - 0x6f, 0x72, 0x69, 0x74, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, - 0x1a, 0x0a, 0x08, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x75, 0x6d, 0x18, 0x0b, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x08, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x75, 0x6d, 0x12, 0x1a, 0x0a, 0x08, 0x6c, - 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6c, - 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x12, 0x2e, 0x0a, 0x13, 0x73, 0x75, 0x62, 0x5f, 0x72, - 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0d, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x73, 0x75, 0x62, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, - 0x6f, 0x72, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2e, 0x0a, 0x13, 0x73, 0x75, 0x62, 0x5f, 0x72, - 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x0e, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x73, 0x75, 0x62, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, - 0x6f, 0x72, 0x79, 0x50, 0x61, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x22, 0xca, 0x02, 0x0a, 0x09, 0x4c, 0x69, 0x6e, 0x65, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x12, - 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6c, - 0x69, 0x6e, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x72, - 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x74, 0x61, - 0x72, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x65, 0x6e, 0x64, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x6c, 0x69, 0x6e, 0x65, 0x45, 0x6e, 0x64, 0x12, 0x1f, 0x0a, - 0x0b, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x03, 0x52, 0x0a, 0x6c, 0x69, 0x6e, 0x65, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x16, - 0x0a, 0x06, 0x62, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, - 0x62, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x66, 0x74, 0x65, 0x72, 0x18, - 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x61, 0x66, 0x74, 0x65, 0x72, 0x12, 0x1b, 0x0a, 0x09, - 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, - 0x08, 0x66, 0x69, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, - 0x72, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x12, - 0x1f, 0x0a, 0x0b, 0x64, 0x65, 0x62, 0x75, 0x67, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x09, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x64, 0x65, 0x62, 0x75, 0x67, 0x53, 0x63, 0x6f, 0x72, 0x65, - 0x12, 0x4c, 0x0a, 0x0e, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x66, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, - 0x74, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, - 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, - 0x6e, 0x65, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x52, - 0x0d, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, 0xc5, - 0x01, 0x0a, 0x11, 0x4c, 0x69, 0x6e, 0x65, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x4d, - 0x61, 0x74, 0x63, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x6f, 0x66, 0x66, - 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x6c, 0x69, 0x6e, 0x65, 0x4f, - 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x21, 0x0a, - 0x0c, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x03, 0x52, 0x0b, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x12, 0x44, 0x0a, 0x0b, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, - 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, 0x6d, 0x62, 0x6f, - 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x48, 0x00, 0x52, 0x0a, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x49, - 0x6e, 0x66, 0x6f, 0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x73, 0x79, 0x6d, 0x62, 0x6f, - 0x6c, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x22, 0x6b, 0x0a, 0x0a, 0x53, 0x79, 0x6d, 0x62, 0x6f, 0x6c, - 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x79, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x03, 0x73, 0x79, 0x6d, 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x61, - 0x72, 0x65, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, 0x61, 0x72, 0x65, - 0x6e, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x6b, 0x69, 0x6e, - 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4b, - 0x69, 0x6e, 0x64, 0x22, 0xd9, 0x02, 0x0a, 0x0a, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x4d, 0x61, 0x74, - 0x63, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0c, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x41, 0x0a, 0x0d, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, - 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, - 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x08, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x31, 0x0a, 0x06, - 0x72, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x7a, - 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, - 0x31, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x06, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, - 0x3f, 0x0a, 0x0b, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x05, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, - 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, 0x6d, 0x62, 0x6f, 0x6c, - 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x0a, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x49, 0x6e, 0x66, 0x6f, - 0x12, 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x01, 0x52, - 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x64, 0x65, 0x62, 0x75, 0x67, 0x5f, - 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x64, 0x65, 0x62, - 0x75, 0x67, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x62, 0x65, 0x73, 0x74, 0x5f, - 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0d, - 0x52, 0x0d, 0x62, 0x65, 0x73, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x22, - 0x6b, 0x0a, 0x05, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x32, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, - 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, - 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x63, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x2e, 0x0a, 0x03, - 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, + 0x2e, 0x76, 0x31, 0x2e, 0x46, 0x6c, 0x75, 0x73, 0x68, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x52, + 0x0b, 0x66, 0x6c, 0x75, 0x73, 0x68, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, + 0x6e, 0x67, 0x72, 0x61, 0x6d, 0x5f, 0x6c, 0x6f, 0x6f, 0x6b, 0x75, 0x70, 0x73, 0x18, 0x12, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x0c, 0x6e, 0x67, 0x72, 0x61, 0x6d, 0x4c, 0x6f, 0x6f, 0x6b, 0x75, 0x70, + 0x73, 0x22, 0x58, 0x0a, 0x08, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x1a, 0x0a, + 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, + 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x12, 0x30, 0x0a, 0x14, 0x6d, 0x61, 0x78, + 0x5f, 0x70, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, + 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x12, 0x6d, 0x61, 0x78, 0x50, 0x65, 0x6e, 0x64, + 0x69, 0x6e, 0x67, 0x50, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x22, 0xb9, 0x04, 0x0a, 0x09, + 0x46, 0x69, 0x6c, 0x65, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, + 0x72, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x12, + 0x14, 0x0a, 0x05, 0x64, 0x65, 0x62, 0x75, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, + 0x64, 0x65, 0x62, 0x75, 0x67, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x4e, 0x61, + 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, + 0x72, 0x79, 0x12, 0x1a, 0x0a, 0x08, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x18, 0x05, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x12, 0x40, + 0x0a, 0x0c, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x18, 0x06, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, + 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x6e, 0x65, 0x4d, 0x61, + 0x74, 0x63, 0x68, 0x52, 0x0b, 0x6c, 0x69, 0x6e, 0x65, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, + 0x12, 0x43, 0x0a, 0x0d, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, + 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, + 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x68, 0x75, + 0x6e, 0x6b, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x52, 0x0c, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x4d, 0x61, + 0x74, 0x63, 0x68, 0x65, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x6f, 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, 0x72, 0x65, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x64, 0x12, 0x2f, 0x0a, 0x13, 0x72, 0x65, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, + 0x79, 0x18, 0x09, 0x20, 0x01, 0x28, 0x01, 0x52, 0x12, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x6f, 0x72, 0x79, 0x50, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x75, + 0x6d, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x75, + 0x6d, 0x12, 0x1a, 0x0a, 0x08, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x18, 0x0c, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x08, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x12, 0x2e, 0x0a, + 0x13, 0x73, 0x75, 0x62, 0x5f, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x5f, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x73, 0x75, 0x62, 0x52, + 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2e, 0x0a, + 0x13, 0x73, 0x75, 0x62, 0x5f, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x5f, + 0x70, 0x61, 0x74, 0x68, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x73, 0x75, 0x62, 0x52, + 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x50, 0x61, 0x74, 0x68, 0x12, 0x18, 0x0a, + 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xca, 0x02, 0x0a, 0x09, 0x4c, 0x69, 0x6e, 0x65, + 0x4d, 0x61, 0x74, 0x63, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6c, 0x69, 0x6e, + 0x65, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6c, + 0x69, 0x6e, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x6c, 0x69, 0x6e, 0x65, + 0x5f, 0x65, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x6c, 0x69, 0x6e, 0x65, + 0x45, 0x6e, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x6e, 0x75, 0x6d, 0x62, + 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x6c, 0x69, 0x6e, 0x65, 0x4e, 0x75, + 0x6d, 0x62, 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x62, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x62, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x61, 0x66, 0x74, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x61, 0x66, 0x74, + 0x65, 0x72, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, + 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, + 0x73, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x64, 0x65, 0x62, 0x75, 0x67, 0x5f, 0x73, + 0x63, 0x6f, 0x72, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x64, 0x65, 0x62, 0x75, + 0x67, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x4c, 0x0a, 0x0e, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x66, + 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, + 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x6e, 0x65, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, + 0x4d, 0x61, 0x74, 0x63, 0x68, 0x52, 0x0d, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x72, 0x61, 0x67, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x22, 0xc5, 0x01, 0x0a, 0x11, 0x4c, 0x69, 0x6e, 0x65, 0x46, 0x72, 0x61, + 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x6c, 0x69, + 0x6e, 0x65, 0x5f, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x0a, 0x6c, 0x69, 0x6e, 0x65, 0x4f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x6f, + 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6f, 0x66, 0x66, + 0x73, 0x65, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x6d, 0x61, 0x74, 0x63, 0x68, + 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x44, 0x0a, 0x0b, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, + 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x7a, 0x6f, + 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, + 0x2e, 0x53, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x48, 0x00, 0x52, 0x0a, 0x73, + 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, + 0x5f, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x22, 0x6b, 0x0a, 0x0a, + 0x53, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x79, + 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x73, 0x79, 0x6d, 0x12, 0x12, 0x0a, 0x04, + 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, + 0x12, 0x16, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x06, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x61, 0x72, 0x65, + 0x6e, 0x74, 0x5f, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4b, 0x69, 0x6e, 0x64, 0x22, 0xd9, 0x02, 0x0a, 0x0a, 0x43, 0x68, + 0x75, 0x6e, 0x6b, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x12, 0x41, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, - 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x22, 0x64, 0x0a, 0x08, - 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x62, 0x79, 0x74, 0x65, - 0x5f, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x62, - 0x79, 0x74, 0x65, 0x4f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6c, 0x69, 0x6e, - 0x65, 0x5f, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, - 0x6c, 0x69, 0x6e, 0x65, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, - 0x6c, 0x75, 0x6d, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x63, 0x6f, 0x6c, 0x75, - 0x6d, 0x6e, 0x2a, 0x8c, 0x01, 0x0a, 0x0b, 0x46, 0x6c, 0x75, 0x73, 0x68, 0x52, 0x65, 0x61, 0x73, - 0x6f, 0x6e, 0x12, 0x24, 0x0a, 0x20, 0x46, 0x4c, 0x55, 0x53, 0x48, 0x5f, 0x52, 0x45, 0x41, 0x53, - 0x4f, 0x4e, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, - 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x1e, 0x0a, 0x1a, 0x46, 0x4c, 0x55, 0x53, - 0x48, 0x5f, 0x52, 0x45, 0x41, 0x53, 0x4f, 0x4e, 0x5f, 0x54, 0x49, 0x4d, 0x45, 0x52, 0x5f, 0x45, - 0x58, 0x50, 0x49, 0x52, 0x45, 0x44, 0x10, 0x01, 0x12, 0x1c, 0x0a, 0x18, 0x46, 0x4c, 0x55, 0x53, - 0x48, 0x5f, 0x52, 0x45, 0x41, 0x53, 0x4f, 0x4e, 0x5f, 0x46, 0x49, 0x4e, 0x41, 0x4c, 0x5f, 0x46, - 0x4c, 0x55, 0x53, 0x48, 0x10, 0x02, 0x12, 0x19, 0x0a, 0x15, 0x46, 0x4c, 0x55, 0x53, 0x48, 0x5f, - 0x52, 0x45, 0x41, 0x53, 0x4f, 0x4e, 0x5f, 0x4d, 0x41, 0x58, 0x5f, 0x53, 0x49, 0x5a, 0x45, 0x10, - 0x03, 0x32, 0x99, 0x02, 0x0a, 0x10, 0x57, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, - 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x51, 0x0a, 0x06, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, - 0x12, 0x21, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, - 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x65, 0x0a, 0x0c, 0x53, 0x74, 0x72, - 0x65, 0x61, 0x6d, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x12, 0x27, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, + 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x4e, 0x61, + 0x6d, 0x65, 0x12, 0x31, 0x0a, 0x06, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, + 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x06, 0x72, + 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x3f, 0x0a, 0x0b, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x5f, + 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x7a, 0x6f, 0x65, + 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, + 0x53, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x0a, 0x73, 0x79, 0x6d, 0x62, + 0x6f, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x1f, 0x0a, 0x0b, + 0x64, 0x65, 0x62, 0x75, 0x67, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0a, 0x64, 0x65, 0x62, 0x75, 0x67, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x26, 0x0a, + 0x0f, 0x62, 0x65, 0x73, 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, + 0x18, 0x08, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x62, 0x65, 0x73, 0x74, 0x4c, 0x69, 0x6e, 0x65, + 0x4d, 0x61, 0x74, 0x63, 0x68, 0x22, 0x6b, 0x0a, 0x05, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x32, + 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, + 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, + 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x73, 0x74, 0x61, + 0x72, 0x74, 0x12, 0x2e, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1c, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, + 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x03, 0x65, + 0x6e, 0x64, 0x22, 0x64, 0x0a, 0x08, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, + 0x0a, 0x0b, 0x62, 0x79, 0x74, 0x65, 0x5f, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x4f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, + 0x1f, 0x0a, 0x0b, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x6c, 0x69, 0x6e, 0x65, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, + 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, + 0x52, 0x06, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x2a, 0x8c, 0x01, 0x0a, 0x0b, 0x46, 0x6c, 0x75, + 0x73, 0x68, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x24, 0x0a, 0x20, 0x46, 0x4c, 0x55, 0x53, + 0x48, 0x5f, 0x52, 0x45, 0x41, 0x53, 0x4f, 0x4e, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, + 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x1e, + 0x0a, 0x1a, 0x46, 0x4c, 0x55, 0x53, 0x48, 0x5f, 0x52, 0x45, 0x41, 0x53, 0x4f, 0x4e, 0x5f, 0x54, + 0x49, 0x4d, 0x45, 0x52, 0x5f, 0x45, 0x58, 0x50, 0x49, 0x52, 0x45, 0x44, 0x10, 0x01, 0x12, 0x1c, + 0x0a, 0x18, 0x46, 0x4c, 0x55, 0x53, 0x48, 0x5f, 0x52, 0x45, 0x41, 0x53, 0x4f, 0x4e, 0x5f, 0x46, + 0x49, 0x4e, 0x41, 0x4c, 0x5f, 0x46, 0x4c, 0x55, 0x53, 0x48, 0x10, 0x02, 0x12, 0x19, 0x0a, 0x15, + 0x46, 0x4c, 0x55, 0x53, 0x48, 0x5f, 0x52, 0x45, 0x41, 0x53, 0x4f, 0x4e, 0x5f, 0x4d, 0x41, 0x58, + 0x5f, 0x53, 0x49, 0x5a, 0x45, 0x10, 0x03, 0x32, 0x99, 0x02, 0x0a, 0x10, 0x57, 0x65, 0x62, 0x73, + 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x51, 0x0a, 0x06, + 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x12, 0x21, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, + 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x61, 0x72, + 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, - 0x74, 0x72, 0x65, 0x61, 0x6d, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, - 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x53, 0x65, - 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x30, 0x01, - 0x12, 0x4b, 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x1f, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, - 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, - 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, - 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, - 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x3d, 0x5a, - 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x67, 0x72, 0x61, 0x70, 0x68, 0x2f, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2f, 0x67, 0x72, - 0x70, 0x63, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2f, - 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2f, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, + 0x65, 0x0a, 0x0c, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x12, + 0x27, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, + 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x53, 0x65, 0x61, 0x72, 0x63, + 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, + 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, + 0x72, 0x65, 0x61, 0x6d, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x00, 0x30, 0x01, 0x12, 0x4b, 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x1f, + 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x20, 0x2e, 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2e, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, + 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x00, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x67, 0x72, 0x61, 0x70, 0x68, 0x2f, 0x7a, 0x6f, + 0x65, 0x6b, 0x74, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, + 0x7a, 0x6f, 0x65, 0x6b, 0x74, 0x2f, 0x77, 0x65, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2f, + 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -2792,7 +2923,7 @@ func file_zoekt_webserver_v1_webserver_proto_rawDescGZIP() []byte { } var file_zoekt_webserver_v1_webserver_proto_enumTypes = make([]protoimpl.EnumInfo, 2) -var file_zoekt_webserver_v1_webserver_proto_msgTypes = make([]protoimpl.MessageInfo, 28) +var file_zoekt_webserver_v1_webserver_proto_msgTypes = make([]protoimpl.MessageInfo, 29) var file_zoekt_webserver_v1_webserver_proto_goTypes = []interface{}{ (FlushReason)(0), // 0: zoekt.webserver.v1.FlushReason (ListOptions_RepoListField)(0), // 1: zoekt.webserver.v1.ListOptions.RepoListField @@ -2806,82 +2937,84 @@ var file_zoekt_webserver_v1_webserver_proto_goTypes = []interface{}{ (*ListResponse)(nil), // 9: zoekt.webserver.v1.ListResponse (*RepoListEntry)(nil), // 10: zoekt.webserver.v1.RepoListEntry (*Repository)(nil), // 11: zoekt.webserver.v1.Repository - (*IndexMetadata)(nil), // 12: zoekt.webserver.v1.IndexMetadata - (*MinimalRepoListEntry)(nil), // 13: zoekt.webserver.v1.MinimalRepoListEntry - (*RepositoryBranch)(nil), // 14: zoekt.webserver.v1.RepositoryBranch - (*RepoStats)(nil), // 15: zoekt.webserver.v1.RepoStats - (*Stats)(nil), // 16: zoekt.webserver.v1.Stats - (*Progress)(nil), // 17: zoekt.webserver.v1.Progress - (*FileMatch)(nil), // 18: zoekt.webserver.v1.FileMatch - (*LineMatch)(nil), // 19: zoekt.webserver.v1.LineMatch - (*LineFragmentMatch)(nil), // 20: zoekt.webserver.v1.LineFragmentMatch - (*SymbolInfo)(nil), // 21: zoekt.webserver.v1.SymbolInfo - (*ChunkMatch)(nil), // 22: zoekt.webserver.v1.ChunkMatch - (*Range)(nil), // 23: zoekt.webserver.v1.Range - (*Location)(nil), // 24: zoekt.webserver.v1.Location - nil, // 25: zoekt.webserver.v1.ListResponse.ReposMapEntry - nil, // 26: zoekt.webserver.v1.Repository.SubRepoMapEntry - nil, // 27: zoekt.webserver.v1.Repository.RawConfigEntry - nil, // 28: zoekt.webserver.v1.Repository.MetadataEntry - nil, // 29: zoekt.webserver.v1.IndexMetadata.LanguageMapEntry - (*Q)(nil), // 30: zoekt.webserver.v1.Q - (*durationpb.Duration)(nil), // 31: google.protobuf.Duration - (*timestamppb.Timestamp)(nil), // 32: google.protobuf.Timestamp + (*RepositoryDeltaStats)(nil), // 12: zoekt.webserver.v1.RepositoryDeltaStats + (*IndexMetadata)(nil), // 13: zoekt.webserver.v1.IndexMetadata + (*MinimalRepoListEntry)(nil), // 14: zoekt.webserver.v1.MinimalRepoListEntry + (*RepositoryBranch)(nil), // 15: zoekt.webserver.v1.RepositoryBranch + (*RepoStats)(nil), // 16: zoekt.webserver.v1.RepoStats + (*Stats)(nil), // 17: zoekt.webserver.v1.Stats + (*Progress)(nil), // 18: zoekt.webserver.v1.Progress + (*FileMatch)(nil), // 19: zoekt.webserver.v1.FileMatch + (*LineMatch)(nil), // 20: zoekt.webserver.v1.LineMatch + (*LineFragmentMatch)(nil), // 21: zoekt.webserver.v1.LineFragmentMatch + (*SymbolInfo)(nil), // 22: zoekt.webserver.v1.SymbolInfo + (*ChunkMatch)(nil), // 23: zoekt.webserver.v1.ChunkMatch + (*Range)(nil), // 24: zoekt.webserver.v1.Range + (*Location)(nil), // 25: zoekt.webserver.v1.Location + nil, // 26: zoekt.webserver.v1.ListResponse.ReposMapEntry + nil, // 27: zoekt.webserver.v1.Repository.SubRepoMapEntry + nil, // 28: zoekt.webserver.v1.Repository.RawConfigEntry + nil, // 29: zoekt.webserver.v1.Repository.MetadataEntry + nil, // 30: zoekt.webserver.v1.IndexMetadata.LanguageMapEntry + (*Q)(nil), // 31: zoekt.webserver.v1.Q + (*durationpb.Duration)(nil), // 32: google.protobuf.Duration + (*timestamppb.Timestamp)(nil), // 33: google.protobuf.Timestamp } var file_zoekt_webserver_v1_webserver_proto_depIdxs = []int32{ - 30, // 0: zoekt.webserver.v1.SearchRequest.query:type_name -> zoekt.webserver.v1.Q + 31, // 0: zoekt.webserver.v1.SearchRequest.query:type_name -> zoekt.webserver.v1.Q 6, // 1: zoekt.webserver.v1.SearchRequest.opts:type_name -> zoekt.webserver.v1.SearchOptions - 16, // 2: zoekt.webserver.v1.SearchResponse.stats:type_name -> zoekt.webserver.v1.Stats - 17, // 3: zoekt.webserver.v1.SearchResponse.progress:type_name -> zoekt.webserver.v1.Progress - 18, // 4: zoekt.webserver.v1.SearchResponse.files:type_name -> zoekt.webserver.v1.FileMatch + 17, // 2: zoekt.webserver.v1.SearchResponse.stats:type_name -> zoekt.webserver.v1.Stats + 18, // 3: zoekt.webserver.v1.SearchResponse.progress:type_name -> zoekt.webserver.v1.Progress + 19, // 4: zoekt.webserver.v1.SearchResponse.files:type_name -> zoekt.webserver.v1.FileMatch 2, // 5: zoekt.webserver.v1.StreamSearchRequest.request:type_name -> zoekt.webserver.v1.SearchRequest 3, // 6: zoekt.webserver.v1.StreamSearchResponse.response_chunk:type_name -> zoekt.webserver.v1.SearchResponse - 31, // 7: zoekt.webserver.v1.SearchOptions.max_wall_time:type_name -> google.protobuf.Duration - 31, // 8: zoekt.webserver.v1.SearchOptions.flush_wall_time:type_name -> google.protobuf.Duration - 30, // 9: zoekt.webserver.v1.ListRequest.query:type_name -> zoekt.webserver.v1.Q + 32, // 7: zoekt.webserver.v1.SearchOptions.max_wall_time:type_name -> google.protobuf.Duration + 32, // 8: zoekt.webserver.v1.SearchOptions.flush_wall_time:type_name -> google.protobuf.Duration + 31, // 9: zoekt.webserver.v1.ListRequest.query:type_name -> zoekt.webserver.v1.Q 8, // 10: zoekt.webserver.v1.ListRequest.opts:type_name -> zoekt.webserver.v1.ListOptions 1, // 11: zoekt.webserver.v1.ListOptions.field:type_name -> zoekt.webserver.v1.ListOptions.RepoListField 10, // 12: zoekt.webserver.v1.ListResponse.repos:type_name -> zoekt.webserver.v1.RepoListEntry - 25, // 13: zoekt.webserver.v1.ListResponse.repos_map:type_name -> zoekt.webserver.v1.ListResponse.ReposMapEntry - 15, // 14: zoekt.webserver.v1.ListResponse.stats:type_name -> zoekt.webserver.v1.RepoStats + 26, // 13: zoekt.webserver.v1.ListResponse.repos_map:type_name -> zoekt.webserver.v1.ListResponse.ReposMapEntry + 16, // 14: zoekt.webserver.v1.ListResponse.stats:type_name -> zoekt.webserver.v1.RepoStats 11, // 15: zoekt.webserver.v1.RepoListEntry.repository:type_name -> zoekt.webserver.v1.Repository - 12, // 16: zoekt.webserver.v1.RepoListEntry.index_metadata:type_name -> zoekt.webserver.v1.IndexMetadata - 15, // 17: zoekt.webserver.v1.RepoListEntry.stats:type_name -> zoekt.webserver.v1.RepoStats - 14, // 18: zoekt.webserver.v1.Repository.branches:type_name -> zoekt.webserver.v1.RepositoryBranch - 26, // 19: zoekt.webserver.v1.Repository.sub_repo_map:type_name -> zoekt.webserver.v1.Repository.SubRepoMapEntry - 27, // 20: zoekt.webserver.v1.Repository.raw_config:type_name -> zoekt.webserver.v1.Repository.RawConfigEntry - 32, // 21: zoekt.webserver.v1.Repository.latest_commit_date:type_name -> google.protobuf.Timestamp - 28, // 22: zoekt.webserver.v1.Repository.metadata:type_name -> zoekt.webserver.v1.Repository.MetadataEntry - 32, // 23: zoekt.webserver.v1.IndexMetadata.index_time:type_name -> google.protobuf.Timestamp - 29, // 24: zoekt.webserver.v1.IndexMetadata.language_map:type_name -> zoekt.webserver.v1.IndexMetadata.LanguageMapEntry - 14, // 25: zoekt.webserver.v1.MinimalRepoListEntry.branches:type_name -> zoekt.webserver.v1.RepositoryBranch - 31, // 26: zoekt.webserver.v1.Stats.duration:type_name -> google.protobuf.Duration - 31, // 27: zoekt.webserver.v1.Stats.wait:type_name -> google.protobuf.Duration - 31, // 28: zoekt.webserver.v1.Stats.match_tree_construction:type_name -> google.protobuf.Duration - 31, // 29: zoekt.webserver.v1.Stats.match_tree_search:type_name -> google.protobuf.Duration - 0, // 30: zoekt.webserver.v1.Stats.flush_reason:type_name -> zoekt.webserver.v1.FlushReason - 19, // 31: zoekt.webserver.v1.FileMatch.line_matches:type_name -> zoekt.webserver.v1.LineMatch - 22, // 32: zoekt.webserver.v1.FileMatch.chunk_matches:type_name -> zoekt.webserver.v1.ChunkMatch - 20, // 33: zoekt.webserver.v1.LineMatch.line_fragments:type_name -> zoekt.webserver.v1.LineFragmentMatch - 21, // 34: zoekt.webserver.v1.LineFragmentMatch.symbol_info:type_name -> zoekt.webserver.v1.SymbolInfo - 24, // 35: zoekt.webserver.v1.ChunkMatch.content_start:type_name -> zoekt.webserver.v1.Location - 23, // 36: zoekt.webserver.v1.ChunkMatch.ranges:type_name -> zoekt.webserver.v1.Range - 21, // 37: zoekt.webserver.v1.ChunkMatch.symbol_info:type_name -> zoekt.webserver.v1.SymbolInfo - 24, // 38: zoekt.webserver.v1.Range.start:type_name -> zoekt.webserver.v1.Location - 24, // 39: zoekt.webserver.v1.Range.end:type_name -> zoekt.webserver.v1.Location - 13, // 40: zoekt.webserver.v1.ListResponse.ReposMapEntry.value:type_name -> zoekt.webserver.v1.MinimalRepoListEntry - 11, // 41: zoekt.webserver.v1.Repository.SubRepoMapEntry.value:type_name -> zoekt.webserver.v1.Repository - 2, // 42: zoekt.webserver.v1.WebserverService.Search:input_type -> zoekt.webserver.v1.SearchRequest - 4, // 43: zoekt.webserver.v1.WebserverService.StreamSearch:input_type -> zoekt.webserver.v1.StreamSearchRequest - 7, // 44: zoekt.webserver.v1.WebserverService.List:input_type -> zoekt.webserver.v1.ListRequest - 3, // 45: zoekt.webserver.v1.WebserverService.Search:output_type -> zoekt.webserver.v1.SearchResponse - 5, // 46: zoekt.webserver.v1.WebserverService.StreamSearch:output_type -> zoekt.webserver.v1.StreamSearchResponse - 9, // 47: zoekt.webserver.v1.WebserverService.List:output_type -> zoekt.webserver.v1.ListResponse - 45, // [45:48] is the sub-list for method output_type - 42, // [42:45] is the sub-list for method input_type - 42, // [42:42] is the sub-list for extension type_name - 42, // [42:42] is the sub-list for extension extendee - 0, // [0:42] is the sub-list for field type_name + 13, // 16: zoekt.webserver.v1.RepoListEntry.index_metadata:type_name -> zoekt.webserver.v1.IndexMetadata + 16, // 17: zoekt.webserver.v1.RepoListEntry.stats:type_name -> zoekt.webserver.v1.RepoStats + 15, // 18: zoekt.webserver.v1.Repository.branches:type_name -> zoekt.webserver.v1.RepositoryBranch + 27, // 19: zoekt.webserver.v1.Repository.sub_repo_map:type_name -> zoekt.webserver.v1.Repository.SubRepoMapEntry + 28, // 20: zoekt.webserver.v1.Repository.raw_config:type_name -> zoekt.webserver.v1.Repository.RawConfigEntry + 33, // 21: zoekt.webserver.v1.Repository.latest_commit_date:type_name -> google.protobuf.Timestamp + 29, // 22: zoekt.webserver.v1.Repository.metadata:type_name -> zoekt.webserver.v1.Repository.MetadataEntry + 12, // 23: zoekt.webserver.v1.Repository.delta_stats:type_name -> zoekt.webserver.v1.RepositoryDeltaStats + 33, // 24: zoekt.webserver.v1.IndexMetadata.index_time:type_name -> google.protobuf.Timestamp + 30, // 25: zoekt.webserver.v1.IndexMetadata.language_map:type_name -> zoekt.webserver.v1.IndexMetadata.LanguageMapEntry + 15, // 26: zoekt.webserver.v1.MinimalRepoListEntry.branches:type_name -> zoekt.webserver.v1.RepositoryBranch + 32, // 27: zoekt.webserver.v1.Stats.duration:type_name -> google.protobuf.Duration + 32, // 28: zoekt.webserver.v1.Stats.wait:type_name -> google.protobuf.Duration + 32, // 29: zoekt.webserver.v1.Stats.match_tree_construction:type_name -> google.protobuf.Duration + 32, // 30: zoekt.webserver.v1.Stats.match_tree_search:type_name -> google.protobuf.Duration + 0, // 31: zoekt.webserver.v1.Stats.flush_reason:type_name -> zoekt.webserver.v1.FlushReason + 20, // 32: zoekt.webserver.v1.FileMatch.line_matches:type_name -> zoekt.webserver.v1.LineMatch + 23, // 33: zoekt.webserver.v1.FileMatch.chunk_matches:type_name -> zoekt.webserver.v1.ChunkMatch + 21, // 34: zoekt.webserver.v1.LineMatch.line_fragments:type_name -> zoekt.webserver.v1.LineFragmentMatch + 22, // 35: zoekt.webserver.v1.LineFragmentMatch.symbol_info:type_name -> zoekt.webserver.v1.SymbolInfo + 25, // 36: zoekt.webserver.v1.ChunkMatch.content_start:type_name -> zoekt.webserver.v1.Location + 24, // 37: zoekt.webserver.v1.ChunkMatch.ranges:type_name -> zoekt.webserver.v1.Range + 22, // 38: zoekt.webserver.v1.ChunkMatch.symbol_info:type_name -> zoekt.webserver.v1.SymbolInfo + 25, // 39: zoekt.webserver.v1.Range.start:type_name -> zoekt.webserver.v1.Location + 25, // 40: zoekt.webserver.v1.Range.end:type_name -> zoekt.webserver.v1.Location + 14, // 41: zoekt.webserver.v1.ListResponse.ReposMapEntry.value:type_name -> zoekt.webserver.v1.MinimalRepoListEntry + 11, // 42: zoekt.webserver.v1.Repository.SubRepoMapEntry.value:type_name -> zoekt.webserver.v1.Repository + 2, // 43: zoekt.webserver.v1.WebserverService.Search:input_type -> zoekt.webserver.v1.SearchRequest + 4, // 44: zoekt.webserver.v1.WebserverService.StreamSearch:input_type -> zoekt.webserver.v1.StreamSearchRequest + 7, // 45: zoekt.webserver.v1.WebserverService.List:input_type -> zoekt.webserver.v1.ListRequest + 3, // 46: zoekt.webserver.v1.WebserverService.Search:output_type -> zoekt.webserver.v1.SearchResponse + 5, // 47: zoekt.webserver.v1.WebserverService.StreamSearch:output_type -> zoekt.webserver.v1.StreamSearchResponse + 9, // 48: zoekt.webserver.v1.WebserverService.List:output_type -> zoekt.webserver.v1.ListResponse + 46, // [46:49] is the sub-list for method output_type + 43, // [43:46] is the sub-list for method input_type + 43, // [43:43] is the sub-list for extension type_name + 43, // [43:43] is the sub-list for extension extendee + 0, // [0:43] is the sub-list for field type_name } func init() { file_zoekt_webserver_v1_webserver_proto_init() } @@ -3012,7 +3145,7 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*IndexMetadata); i { + switch v := v.(*RepositoryDeltaStats); i { case 0: return &v.state case 1: @@ -3024,7 +3157,7 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*MinimalRepoListEntry); i { + switch v := v.(*IndexMetadata); i { case 0: return &v.state case 1: @@ -3036,7 +3169,7 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RepositoryBranch); i { + switch v := v.(*MinimalRepoListEntry); i { case 0: return &v.state case 1: @@ -3048,7 +3181,7 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RepoStats); i { + switch v := v.(*RepositoryBranch); i { case 0: return &v.state case 1: @@ -3060,7 +3193,7 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Stats); i { + switch v := v.(*RepoStats); i { case 0: return &v.state case 1: @@ -3072,7 +3205,7 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Progress); i { + switch v := v.(*Stats); i { case 0: return &v.state case 1: @@ -3084,7 +3217,7 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FileMatch); i { + switch v := v.(*Progress); i { case 0: return &v.state case 1: @@ -3096,7 +3229,7 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LineMatch); i { + switch v := v.(*FileMatch); i { case 0: return &v.state case 1: @@ -3108,7 +3241,7 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LineFragmentMatch); i { + switch v := v.(*LineMatch); i { case 0: return &v.state case 1: @@ -3120,7 +3253,7 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SymbolInfo); i { + switch v := v.(*LineFragmentMatch); i { case 0: return &v.state case 1: @@ -3132,7 +3265,7 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ChunkMatch); i { + switch v := v.(*SymbolInfo); i { case 0: return &v.state case 1: @@ -3144,7 +3277,7 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Range); i { + switch v := v.(*ChunkMatch); i { case 0: return &v.state case 1: @@ -3156,6 +3289,18 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } file_zoekt_webserver_v1_webserver_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Range); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_zoekt_webserver_v1_webserver_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Location); i { case 0: return &v.state @@ -3168,14 +3313,14 @@ func file_zoekt_webserver_v1_webserver_proto_init() { } } } - file_zoekt_webserver_v1_webserver_proto_msgTypes[18].OneofWrappers = []interface{}{} + file_zoekt_webserver_v1_webserver_proto_msgTypes[19].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_zoekt_webserver_v1_webserver_proto_rawDesc, NumEnums: 2, - NumMessages: 28, + NumMessages: 29, NumExtensions: 0, NumServices: 1, }, diff --git a/grpc/protos/zoekt/webserver/v1/webserver.proto b/grpc/protos/zoekt/webserver/v1/webserver.proto index 8fb4fca71..c8a2ec375 100644 --- a/grpc/protos/zoekt/webserver/v1/webserver.proto +++ b/grpc/protos/zoekt/webserver/v1/webserver.proto @@ -219,6 +219,19 @@ message Repository { // Additional metadata about the repository. map metadata = 19; + + // Advisory statistics used by experimental delta-index admission rules. + RepositoryDeltaStats delta_stats = 20; +} + +message RepositoryDeltaStats { + uint64 live_indexed_bytes = 1; + uint64 live_document_count = 2; + uint64 live_path_count = 3; + uint64 physical_indexed_bytes = 4; + uint64 physical_document_count = 5; + uint64 tombstone_path_count = 6; + uint64 delta_layer_count = 7; } message IndexMetadata { From 204efaa9d33a9c78085319615872d9076f970f27 Mon Sep 17 00:00:00 2001 From: Eugene Brevdo Date: Fri, 17 Apr 2026 12:44:56 -0700 Subject: [PATCH 08/10] Add smart stats-v1 branch-set deltas Use positional branch mapping for same-length branch-set changes so near-identical HEAD branch switches and branch renames only rewrite affected paths. Expose the required experimental git-index flags through the CLI and indexserver command builder. Co-authored-by: Codex --- cmd/zoekt-git-index/main.go | 4 + cmd/zoekt-sourcegraph-indexserver/index.go | 14 + .../index_integration_test.go | 17 + .../index_test.go | 12 +- gitindex/delta_smart_branchset_stats_test.go | 517 ++++++++++++++++++ gitindex/index.go | 139 +++++ 6 files changed, 698 insertions(+), 5 deletions(-) create mode 100644 gitindex/delta_smart_branchset_stats_test.go diff --git a/cmd/zoekt-git-index/main.go b/cmd/zoekt-git-index/main.go index c8f0d0216..b74874522 100644 --- a/cmd/zoekt-git-index/main.go +++ b/cmd/zoekt-git-index/main.go @@ -47,6 +47,8 @@ func run() int { deltaShardNumberFallbackThreshold := flag.Uint64("delta_threshold", 0, "upper limit on the number of preexisting shards that can exist before attempting a delta build (0 to disable fallback behavior)") deltaAdmissionMode := flag.String("delta_admission_mode", "", "experimental delta admission mode (empty or stats-v1)") deltaAdmissionLogJSON := flag.String("delta_admission_log_json", "", "append experimental delta admission decisions as JSON lines to this file") + resolveHEADToBranch := flag.Bool("resolve_head_to_branch", false, "resolve attached HEAD to its short branch name in indexed metadata") + allowDeltaBranchSetChange := flag.Bool("allow_delta_branch_set_change", false, "allow delta builds to update branch sets by rewriting old shard metadata and tombstoning old live paths") languageMap := flag.String("language_map", "", "a mapping between a language and its ctags processor (a:0,b:3).") cpuProfile := flag.String("cpu_profile", "", "write cpu profile to `file`") @@ -136,6 +138,8 @@ func run() int { DeltaShardNumberFallbackThreshold: *deltaShardNumberFallbackThreshold, DeltaAdmissionMode: *deltaAdmissionMode, DeltaAdmissionLogPath: *deltaAdmissionLogJSON, + ResolveHEADToBranch: *resolveHEADToBranch, + AllowDeltaBranchSetChange: *allowDeltaBranchSetChange, } if _, err := gitindex.IndexGitRepo(gitOpts); err != nil { diff --git a/cmd/zoekt-sourcegraph-indexserver/index.go b/cmd/zoekt-sourcegraph-indexserver/index.go index 69989c643..01e04bdf5 100644 --- a/cmd/zoekt-sourcegraph-indexserver/index.go +++ b/cmd/zoekt-sourcegraph-indexserver/index.go @@ -97,6 +97,14 @@ type indexArgs struct { // zoekt-git-index. Empty preserves current behavior. DeltaAdmissionMode string + // ResolveHEADToBranch asks zoekt-git-index to store an attached HEAD as its + // short branch name instead of the literal HEAD alias. + ResolveHEADToBranch bool + + // AllowDeltaBranchSetChange asks zoekt-git-index to allow delta builds when + // the requested branch names differ from existing shard metadata. + AllowDeltaBranchSetChange bool + // ShardMerging is true if we want zoekt-git-index to respect compound shards. ShardMerging bool } @@ -410,6 +418,12 @@ func indexRepo(ctx context.Context, gitDir string, sourcegraph Sourcegraph, o *i args = append(args, "-delta_admission_mode", o.DeltaAdmissionMode) } } + if o.ResolveHEADToBranch { + args = append(args, "-resolve_head_to_branch") + } + if o.AllowDeltaBranchSetChange { + args = append(args, "-allow_delta_branch_set_change") + } if len(o.LanguageMap) > 0 { var languageMap []string diff --git a/cmd/zoekt-sourcegraph-indexserver/index_integration_test.go b/cmd/zoekt-sourcegraph-indexserver/index_integration_test.go index c546dc3a5..246b9967d 100644 --- a/cmd/zoekt-sourcegraph-indexserver/index_integration_test.go +++ b/cmd/zoekt-sourcegraph-indexserver/index_integration_test.go @@ -327,6 +327,8 @@ func gitIndexOptionsForTest(args *indexArgs, repoDir string) gitindex.Options { Branches: branches, DeltaShardNumberFallbackThreshold: args.DeltaShardNumberFallbackThreshold, DeltaAdmissionMode: args.DeltaAdmissionMode, + ResolveHEADToBranch: args.ResolveHEADToBranch, + AllowDeltaBranchSetChange: args.AllowDeltaBranchSetChange, } } @@ -346,6 +348,21 @@ func TestGitIndexOptionsForTestDoesNotResolveHEADAlias(t *testing.T) { } } +func TestGitIndexOptionsForTestPassesExperimentalDeltaFlags(t *testing.T) { + args := &indexArgs{ + ResolveHEADToBranch: true, + AllowDeltaBranchSetChange: true, + } + + opts := gitIndexOptionsForTest(args, "/tmp/repo.git") + if !opts.ResolveHEADToBranch { + t.Fatal("expected ResolveHEADToBranch to pass through") + } + if !opts.AllowDeltaBranchSetChange { + t.Fatal("expected AllowDeltaBranchSetChange to pass through") + } +} + func assertPartialBareFetch(t *testing.T, gitDir string, fixture *gitFetchFixture) { t.Helper() require := require.New(t) diff --git a/cmd/zoekt-sourcegraph-indexserver/index_test.go b/cmd/zoekt-sourcegraph-indexserver/index_test.go index 8832a2aa2..e2ed8bd1a 100644 --- a/cmd/zoekt-sourcegraph-indexserver/index_test.go +++ b/cmd/zoekt-sourcegraph-indexserver/index_test.go @@ -620,10 +620,12 @@ func TestIndex(t *testing.T) { }, { name: "delta stats-v1 admission", args: indexArgs{ - Incremental: true, - IndexDir: "/data/index", - UseDelta: true, - DeltaAdmissionMode: gitindex.DeltaAdmissionModeStatsV1, + Incremental: true, + IndexDir: "/data/index", + UseDelta: true, + DeltaAdmissionMode: gitindex.DeltaAdmissionModeStatsV1, + ResolveHEADToBranch: true, + AllowDeltaBranchSetChange: true, IndexOptions: IndexOptions{ Name: "test/repo", CloneURL: "http://api.test/.internal/git/test/repo", @@ -654,7 +656,7 @@ func TestIndex(t *testing.T) { "git -C $TMPDIR/test%2Frepo.git config zoekt.public 0", "git -C $TMPDIR/test%2Frepo.git config zoekt.repoid 0", "git -C $TMPDIR/test%2Frepo.git config zoekt.tenantID 1", - "zoekt-git-index -submodules=false -incremental -branches HEAD -delta -delta_threshold 22 -delta_admission_mode stats-v1 -file_limit 1048576 -index /data/index -disable_ctags $TMPDIR/test%2Frepo.git", + "zoekt-git-index -submodules=false -incremental -branches HEAD -delta -delta_threshold 22 -delta_admission_mode stats-v1 -resolve_head_to_branch -allow_delta_branch_set_change -file_limit 1048576 -index /data/index -disable_ctags $TMPDIR/test%2Frepo.git", }, }} diff --git a/gitindex/delta_smart_branchset_stats_test.go b/gitindex/delta_smart_branchset_stats_test.go new file mode 100644 index 000000000..e910efc7c --- /dev/null +++ b/gitindex/delta_smart_branchset_stats_test.go @@ -0,0 +1,517 @@ +package gitindex + +import ( + "fmt" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + "github.com/google/go-cmp/cmp" + + "github.com/sourcegraph/zoekt" + "github.com/sourcegraph/zoekt/index" + "github.com/sourcegraph/zoekt/query" +) + +const ( + smartStatsRepoName = "smart-branchset-repository" + smartStatsRepoID = 9851 +) + +func TestIndexGitRepo_StatsV1SmartHEADBranchSwitchTinyDiffAccepted(t *testing.T) { + t.Parallel() + + repoDir := smartStatsCreateNearIdenticalBranchRepo(t) + indexDir := t.TempDir() + + initialOpts := smartStatsOptions(repoDir, indexDir, []string{"HEAD", "release"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + runGit(t, repoDir, "checkout", "feature-b") + + logPath := filepath.Join(t.TempDir(), "delta-admission.jsonl") + deltaOpts := smartStatsOptions(repoDir, indexDir, []string{"HEAD", "release"}) + deltaOpts.BuildOptions.IsDelta = true + deltaOpts.DeltaAdmissionLogPath = logPath + deltaBuildCalled, normalBuildCalled := smartStatsIndexWithSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if normalBuildCalled { + entry := smartStatsLastAdmissionLog(t, logPath) + t.Fatalf("expected near-identical HEAD branch switch to be accepted by stats-v1, got fallback with log entry %#v", entry) + } + + entry := smartStatsLastAdmissionLog(t, logPath) + smartStatsAssertAcceptedSmallCandidate(t, entry, 0.20) + smartStatsAssertJSONLessOrEqual(t, entry, "changed_or_deleted_paths", 2) + + cleanIndexDir := smartStatsCleanFullRebuild(t, deltaOpts) + smartStatsAssertRepositoryBranchesMatchClean(t, indexDir, cleanIndexDir, []string{"feature-b", "release"}) + smartStatsAssertQuerySurfacesMatchClean(t, indexDir, cleanIndexDir, "feature-b", "feature-b-tiny-needle") + smartStatsAssertQuerySurfacesMatchClean(t, indexDir, cleanIndexDir, "release", "release-needle") + smartStatsAssertQuerySurfacesMatchClean(t, indexDir, cleanIndexDir, "feature-b", "large-shared-needle") + smartStatsAssertNoBranchHits(t, indexDir, "feature-a", "feature-a-tiny-needle") +} + +func TestIndexGitRepo_StatsV1SmartLinkedWorktreeBranchSwitchTinyDiffAccepted(t *testing.T) { + t.Parallel() + + _, worktreeA, worktreeB := smartStatsCreateNearIdenticalLinkedWorktrees(t) + indexDir := t.TempDir() + + initialOpts := smartStatsOptions(worktreeA, indexDir, []string{"HEAD", "release"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo(worktreeA): %v", err) + } + + logPath := filepath.Join(t.TempDir(), "delta-admission.jsonl") + deltaOpts := smartStatsOptions(worktreeB, indexDir, []string{"HEAD", "release"}) + deltaOpts.BuildOptions.IsDelta = true + deltaOpts.DeltaAdmissionLogPath = logPath + deltaBuildCalled, normalBuildCalled := smartStatsIndexWithSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if normalBuildCalled { + entry := smartStatsLastAdmissionLog(t, logPath) + t.Fatalf("expected linked worktree branch switch to be accepted by stats-v1, got fallback with log entry %#v", entry) + } + + entry := smartStatsLastAdmissionLog(t, logPath) + smartStatsAssertAcceptedSmallCandidate(t, entry, 0.20) + smartStatsAssertJSONLessOrEqual(t, entry, "changed_or_deleted_paths", 2) + + cleanIndexDir := smartStatsCleanFullRebuild(t, deltaOpts) + smartStatsAssertRepositoryBranchesMatchClean(t, indexDir, cleanIndexDir, []string{"feature-b", "release"}) + smartStatsAssertQuerySurfacesMatchClean(t, indexDir, cleanIndexDir, "feature-b", "feature-b-tiny-needle") + smartStatsAssertQuerySurfacesMatchClean(t, indexDir, cleanIndexDir, "release", "release-needle") +} + +func TestIndexGitRepo_StatsV1SmartBranchRenameIdenticalTreeAccepted(t *testing.T) { + t.Parallel() + + repoDir := smartStatsCreateNearIdenticalBranchRepo(t) + indexDir := t.TempDir() + + initialOpts := smartStatsOptions(repoDir, indexDir, []string{"HEAD", "release"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + runGit(t, repoDir, "branch", "-m", "feature-renamed") + + logPath := filepath.Join(t.TempDir(), "delta-admission.jsonl") + deltaOpts := smartStatsOptions(repoDir, indexDir, []string{"HEAD", "release"}) + deltaOpts.BuildOptions.IsDelta = true + deltaOpts.DeltaAdmissionLogPath = logPath + deltaBuildCalled, normalBuildCalled := smartStatsIndexWithSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if normalBuildCalled { + entry := smartStatsLastAdmissionLog(t, logPath) + t.Fatalf("expected identical-tree branch rename to be accepted by stats-v1, got fallback with log entry %#v", entry) + } + + entry := smartStatsLastAdmissionLog(t, logPath) + smartStatsAssertAcceptedSmallCandidate(t, entry, 0.01) + smartStatsAssertJSONLessOrEqual(t, entry, "changed_or_deleted_paths", 1) + + cleanIndexDir := smartStatsCleanFullRebuild(t, deltaOpts) + smartStatsAssertRepositoryBranchesMatchClean(t, indexDir, cleanIndexDir, []string{"feature-renamed", "release"}) + smartStatsAssertQuerySurfacesMatchClean(t, indexDir, cleanIndexDir, "feature-renamed", "feature-a-tiny-needle") + smartStatsAssertNoBranchHits(t, indexDir, "feature-a", "feature-a-tiny-needle") +} + +func TestIndexGitRepo_StatsV1SmartDoesNotRewriteUnchangedLargeFile(t *testing.T) { + t.Parallel() + + repoDir := smartStatsCreateNearIdenticalBranchRepo(t) + indexDir := t.TempDir() + + initialOpts := smartStatsOptions(repoDir, indexDir, []string{"HEAD", "release"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + runGit(t, repoDir, "checkout", "feature-b") + + logPath := filepath.Join(t.TempDir(), "delta-admission.jsonl") + deltaOpts := smartStatsOptions(repoDir, indexDir, []string{"HEAD", "release"}) + deltaOpts.BuildOptions.IsDelta = true + deltaOpts.DeltaAdmissionLogPath = logPath + _, normalBuildCalled := smartStatsIndexWithSpies(t, deltaOpts) + if normalBuildCalled { + entry := smartStatsLastAdmissionLog(t, logPath) + t.Fatalf("expected smart branch switch not to rewrite the unchanged large file, got fallback with log entry %#v", entry) + } + + entry := smartStatsLastAdmissionLog(t, logPath) + candidateBytes := smartStatsJSONNumber(t, entry, "candidate_indexed_bytes") + if candidateBytes >= float64(len(smartStatsLargeContent())/2) { + t.Fatalf("candidate_indexed_bytes = %.0f, want well below unchanged large file size %d; log entry %#v", candidateBytes, len(smartStatsLargeContent()), entry) + } + smartStatsAssertJSONLessOrEqual(t, entry, "changed_or_deleted_paths", 2) +} + +func TestIndexGitRepo_StatsV1IdenticalBranchAddKeepsFullRewriteFallback(t *testing.T) { + t.Parallel() + + repoDir := smartStatsCreateSingleBranchRepo(t) + indexDir := t.TempDir() + + initialOpts := smartStatsOptions(repoDir, indexDir, []string{"main"}) + initialOpts.ResolveHEADToBranch = false + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + runGit(t, repoDir, "branch", "copy", "main") + + logPath := filepath.Join(t.TempDir(), "delta-admission.jsonl") + deltaOpts := smartStatsOptions(repoDir, indexDir, []string{"main", "copy"}) + deltaOpts.ResolveHEADToBranch = false + deltaOpts.BuildOptions.IsDelta = true + deltaOpts.DeltaAdmissionLogPath = logPath + deltaBuildCalled, normalBuildCalled := smartStatsIndexWithSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if !normalBuildCalled { + t.Fatal("expected adding an identical branch to keep falling back unless branch membership can be represented without rewriting documents") + } + + entry := smartStatsLastAdmissionLog(t, logPath) + statsBranchAssertJSONBool(t, entry, "accepted", false) + statsBranchAssertJSONStringContains(t, entry, "reason", "write indexed bytes ratio") +} + +func TestIndexGitRepo_StatsV1BranchReorderKeepsFullRewriteFallback(t *testing.T) { + t.Parallel() + + repoDir := smartStatsCreateNearIdenticalBranchRepo(t) + indexDir := t.TempDir() + + initialOpts := smartStatsOptions(repoDir, indexDir, []string{"HEAD", "release"}) + if _, err := IndexGitRepo(initialOpts); err != nil { + t.Fatalf("initial IndexGitRepo: %v", err) + } + + logPath := filepath.Join(t.TempDir(), "delta-admission.jsonl") + deltaOpts := smartStatsOptions(repoDir, indexDir, []string{"release", "HEAD"}) + deltaOpts.BuildOptions.IsDelta = true + deltaOpts.DeltaAdmissionLogPath = logPath + deltaBuildCalled, normalBuildCalled := smartStatsIndexWithSpies(t, deltaOpts) + if !deltaBuildCalled { + t.Fatal("expected delta build to be attempted") + } + if !normalBuildCalled { + t.Fatal("expected branch reordering to avoid the smart branch-set delta path and fall back") + } + + entry := smartStatsLastAdmissionLog(t, logPath) + statsBranchAssertJSONBool(t, entry, "accepted", false) + statsBranchAssertJSONStringContains(t, entry, "reason", "write indexed bytes ratio") +} + +func smartStatsOptions(repoDir, indexDir string, branches []string) Options { + return Options{ + RepoDir: repoDir, + Branches: append([]string(nil), branches...), + ResolveHEADToBranch: true, + AllowDeltaBranchSetChange: true, + DeltaAdmissionMode: DeltaAdmissionModeStatsV1, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{ + ID: smartStatsRepoID, + Name: smartStatsRepoName, + }, + IndexDir: indexDir, + DisableCTags: true, + }, + } +} + +func smartStatsIndexWithSpies(t *testing.T, opts Options) (deltaBuildCalled, normalBuildCalled bool) { + t.Helper() + + prepareDeltaSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, err error) { + deltaBuildCalled = true + return prepareDeltaBuild(options, repository) + } + prepareNormalSpy := func(options Options, repository *git.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, err error) { + normalBuildCalled = true + return prepareNormalBuild(options, repository) + } + + if _, err := indexGitRepo(opts, gitIndexConfig{ + prepareDeltaBuild: prepareDeltaSpy, + prepareNormalBuild: prepareNormalSpy, + }); err != nil { + t.Fatalf("IndexGitRepo(delta=%t, branches=%v): %v", opts.BuildOptions.IsDelta, opts.Branches, err) + } + return deltaBuildCalled, normalBuildCalled +} + +func smartStatsCleanFullRebuild(t *testing.T, opts Options) string { + t.Helper() + + cleanIndexDir := t.TempDir() + cleanOpts := opts + cleanOpts.BuildOptions.IndexDir = cleanIndexDir + cleanOpts.BuildOptions.IsDelta = false + cleanOpts.DeltaAdmissionLogPath = "" + if _, err := IndexGitRepo(cleanOpts); err != nil { + t.Fatalf("clean IndexGitRepo: %v", err) + } + return cleanIndexDir +} + +func smartStatsCreateNearIdenticalBranchRepo(t *testing.T) string { + t.Helper() + + repoDir := t.TempDir() + runGit(t, repoDir, "init", "-b", "feature-a") + smartStatsConfigureRepo(t, repoDir) + smartStatsWriteFiles(t, repoDir, map[string]string{ + "large/shared.txt": smartStatsLargeContent(), + "tiny.txt": "feature-a-tiny-needle\n", + "common.txt": "common-needle\n", + }) + smartStatsWriteStableFiles(t, repoDir, 24) + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "feature-a initial") + + runGit(t, repoDir, "branch", "release", "feature-a") + runGit(t, repoDir, "checkout", "release") + smartStatsWriteFiles(t, repoDir, map[string]string{ + "release.txt": "release-needle\n", + }) + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "release") + + runGit(t, repoDir, "checkout", "-B", "feature-b", "feature-a") + smartStatsWriteFiles(t, repoDir, map[string]string{ + "tiny.txt": "feature-b-tiny-needle\n", + }) + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "feature-b tiny change") + + runGit(t, repoDir, "checkout", "feature-a") + return repoDir +} + +func smartStatsCreateNearIdenticalLinkedWorktrees(t *testing.T) (repoDir, worktreeA, worktreeB string) { + t.Helper() + + root := t.TempDir() + repoDir = filepath.Join(root, "repo") + runGit(t, root, "init", "-b", "main", "repo") + smartStatsConfigureRepo(t, repoDir) + smartStatsWriteFiles(t, repoDir, map[string]string{ + "large/shared.txt": smartStatsLargeContent(), + "tiny.txt": "base-tiny-needle\n", + }) + smartStatsWriteStableFiles(t, repoDir, 24) + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "base") + + runGit(t, repoDir, "checkout", "-B", "release", "main") + smartStatsWriteFiles(t, repoDir, map[string]string{ + "release.txt": "release-needle\n", + }) + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "release") + + runGit(t, repoDir, "checkout", "-B", "feature-a", "main") + smartStatsWriteFiles(t, repoDir, map[string]string{ + "tiny.txt": "feature-a-tiny-needle\n", + }) + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "feature-a") + + runGit(t, repoDir, "checkout", "-B", "feature-b", "feature-a") + smartStatsWriteFiles(t, repoDir, map[string]string{ + "tiny.txt": "feature-b-tiny-needle\n", + }) + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "feature-b tiny change") + + runGit(t, repoDir, "checkout", "main") + worktreeA = filepath.Join(root, "worktree-a") + worktreeB = filepath.Join(root, "worktree-b") + runGit(t, repoDir, "worktree", "add", worktreeA, "feature-a") + runGit(t, repoDir, "worktree", "add", worktreeB, "feature-b") + return repoDir, worktreeA, worktreeB +} + +func smartStatsCreateSingleBranchRepo(t *testing.T) string { + t.Helper() + + repoDir := t.TempDir() + runGit(t, repoDir, "init", "-b", "main") + smartStatsConfigureRepo(t, repoDir) + smartStatsWriteFiles(t, repoDir, map[string]string{ + "large/shared.txt": smartStatsLargeContent(), + "tiny.txt": "main-tiny-needle\n", + }) + smartStatsWriteStableFiles(t, repoDir, 24) + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "main") + return repoDir +} + +func smartStatsConfigureRepo(t *testing.T, repoDir string) { + t.Helper() + + runGit(t, repoDir, "config", "zoekt.name", smartStatsRepoName) + runGit(t, repoDir, "config", "zoekt.repoid", fmt.Sprintf("%d", smartStatsRepoID)) +} + +func smartStatsWriteFiles(t *testing.T, repoDir string, files map[string]string) { + t.Helper() + + for name, content := range files { + path := filepath.Join(repoDir, name) + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(path), err) + } + if err := os.WriteFile(path, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", path, err) + } + } +} + +func smartStatsWriteStableFiles(t *testing.T, repoDir string, n int) { + t.Helper() + + files := make(map[string]string, n) + for i := range n { + files[fmt.Sprintf("stable/file-%02d.txt", i)] = fmt.Sprintf("stable-needle-%02d\n", i) + } + smartStatsWriteFiles(t, repoDir, files) +} + +func smartStatsLargeContent() string { + return "large-shared-needle\n" + strings.Repeat("large unchanged payload\n", 8192) +} + +func smartStatsLastAdmissionLog(t *testing.T, path string) map[string]any { + t.Helper() + + entries := statsBranchReadAdmissionLogObjects(t, path) + if len(entries) == 0 { + t.Fatalf("expected at least one admission log entry in %q", path) + } + return entries[len(entries)-1] +} + +func smartStatsAssertAcceptedSmallCandidate(t *testing.T, entry map[string]any, maxWriteRatio float64) { + t.Helper() + + statsBranchAssertJSONBool(t, entry, "accepted", true) + statsBranchAssertJSONString(t, entry, "reason", "accepted") + ratio := smartStatsJSONNumber(t, entry, "write_bytes_ratio") + if ratio > maxWriteRatio { + t.Fatalf("write_bytes_ratio = %.4f, want <= %.4f in log entry %#v", ratio, maxWriteRatio, entry) + } +} + +func smartStatsAssertJSONLessOrEqual(t *testing.T, entry map[string]any, key string, want float64) { + t.Helper() + + got := smartStatsJSONNumber(t, entry, key) + if got > want { + t.Fatalf("admission log key %q = %.4f, want <= %.4f in %#v", key, got, want, entry) + } +} + +func smartStatsJSONNumber(t *testing.T, entry map[string]any, key string) float64 { + t.Helper() + + got, ok := entry[key].(float64) + if !ok { + t.Fatalf("admission log key %q = %#v, want number in %#v", key, entry[key], entry) + } + return got +} + +func smartStatsAssertRepositoryBranchesMatchClean(t *testing.T, indexDir, cleanIndexDir string, wantBranches []string) { + t.Helper() + + gotRepo := smartStatsIndexedRepository(t, indexDir) + cleanRepo := smartStatsIndexedRepository(t, cleanIndexDir) + if diff := cmp.Diff(wantBranches, smartStatsRepositoryBranchNames(gotRepo.Branches)); diff != "" { + t.Fatalf("delta branch names mismatch (-want +got):\n%s", diff) + } + if diff := cmp.Diff(cleanRepo.Branches, gotRepo.Branches); diff != "" { + t.Fatalf("delta branch metadata differs from clean rebuild (-clean +delta):\n%s", diff) + } +} + +func smartStatsAssertQuerySurfacesMatchClean(t *testing.T, indexDir, cleanIndexDir, branch, pattern string) { + t.Helper() + + contentQuery := &query.Substring{Pattern: pattern, Content: true} + smartStatsAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "unfiltered "+pattern, contentQuery) + smartStatsAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "branch:"+branch+" "+pattern, query.NewAnd(&query.Branch{Pattern: branch, Exact: true}, contentQuery)) + smartStatsAssertQueryMatchesClean(t, indexDir, cleanIndexDir, "BranchesRepos:"+branch+" "+pattern, query.NewAnd(query.NewSingleBranchesRepos(branch, smartStatsRepoID), contentQuery)) +} + +func smartStatsAssertNoBranchHits(t *testing.T, indexDir, branch, pattern string) { + t.Helper() + + contentQuery := &query.Substring{Pattern: pattern, Content: true} + for _, q := range []query.Q{ + query.NewAnd(&query.Branch{Pattern: branch, Exact: true}, contentQuery), + query.NewAnd(query.NewSingleBranchesRepos(branch, smartStatsRepoID), contentQuery), + } { + if hits := statsBranchSearch(t, indexDir, q); len(hits) != 0 { + t.Fatalf("expected no hits for %s, got %+v", q.String(), hits) + } + } +} + +func smartStatsAssertQueryMatchesClean(t *testing.T, indexDir, cleanIndexDir, label string, q query.Q) { + t.Helper() + + got := statsBranchSearch(t, indexDir, q) + want := statsBranchSearch(t, cleanIndexDir, q) + if diff := cmp.Diff(want, got); diff != "" { + t.Fatalf("%s differs from clean rebuild (-clean +delta):\n%s", label, diff) + } +} + +func smartStatsIndexedRepository(t *testing.T, indexDir string) *zoekt.Repository { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + ID: smartStatsRepoID, + Name: smartStatsRepoName, + }, + } + repo, _, ok, err := opts.FindRepositoryMetadata() + if err != nil { + t.Fatalf("FindRepositoryMetadata: %v", err) + } + if !ok { + t.Fatalf("FindRepositoryMetadata: repository %q not found", smartStatsRepoName) + } + return repo +} + +func smartStatsRepositoryBranchNames(branches []zoekt.RepositoryBranch) []string { + names := make([]string, 0, len(branches)) + for _, branch := range branches { + names = append(names, branch.Name) + } + return names +} diff --git a/gitindex/index.go b/gitindex/index.go index 39a023621..ae5a47f64 100644 --- a/gitindex/index.go +++ b/gitindex/index.go @@ -1523,6 +1523,14 @@ func prepareBranchSetDeltaBuild(options Options, repository *git.Repository, exi return nil, nil, nil, err } + repos, branchVersions, changedOrDeletedPaths, ok, err := prepareSmartBranchSetDeltaBuild(options, repository, existingRepository) + if err != nil { + return nil, nil, nil, err + } + if ok { + return repos, branchVersions, changedOrDeletedPaths, nil + } + repos, branchVersions, err = prepareNormalBuildRecurse(options, repository, nil, false) if err != nil { return nil, nil, nil, fmt.Errorf("preparing branch-set delta live files: %w", err) @@ -1535,6 +1543,137 @@ func prepareBranchSetDeltaBuild(options Options, repository *git.Repository, exi return repos, branchVersions, changedOrDeletedPaths, nil } +func prepareSmartBranchSetDeltaBuild(options Options, repository *git.Repository, existingRepository *zoekt.Repository) (repos map[fileKey]BlobLocation, branchVersions map[string]map[string]plumbing.Hash, changedOrDeletedPaths []string, ok bool, err error) { + oldBranches := existingRepository.Branches + newBranches := options.BuildOptions.RepositoryDescription.Branches + if len(oldBranches) != len(newBranches) { + return nil, nil, nil, false, nil + } + if branchNamesMoved(oldBranches, newBranches) { + return nil, nil, nil, false, nil + } + + rawURL := options.BuildOptions.RepositoryDescription.URL + u, err := url.Parse(rawURL) + if err != nil { + return nil, nil, nil, false, fmt.Errorf("parsing repository URL %q: %w", rawURL, err) + } + + type branchTree struct { + name string + tree *object.Tree + ig *ignore.Matcher + } + currentTrees := make([]branchTree, 0, len(newBranches)) + affectedPaths := map[string]struct{}{} + + for i := range oldBranches { + oldCommit, err := getCommit(repository, "", oldBranches[i].Version) + if err != nil { + return nil, nil, nil, false, fmt.Errorf("getting last indexed commit for branch %q: %w", oldBranches[i].Name, err) + } + oldTree, err := oldCommit.Tree() + if err != nil { + return nil, nil, nil, false, fmt.Errorf("getting last indexed git tree for branch %q: %w", oldBranches[i].Name, err) + } + + newCommit, err := getCommit(repository, "", newBranches[i].Version) + if err != nil { + return nil, nil, nil, false, fmt.Errorf("getting current commit for branch %q: %w", newBranches[i].Name, err) + } + newTree, err := newCommit.Tree() + if err != nil { + return nil, nil, nil, false, fmt.Errorf("getting current git tree for branch %q: %w", newBranches[i].Name, err) + } + ig, err := newIgnoreMatcher(newTree) + if err != nil { + return nil, nil, nil, false, fmt.Errorf("newIgnoreMatcher for branch %q: %w", newBranches[i].Name, err) + } + currentTrees = append(currentTrees, branchTree{ + name: newBranches[i].Name, + tree: newTree, + ig: ig, + }) + + changes, err := object.DiffTreeWithOptions(context.Background(), oldTree, newTree, &object.DiffTreeOptions{DetectRenames: false}) + if err != nil { + return nil, nil, nil, false, fmt.Errorf("generating changeset for branch %q -> %q: %w", oldBranches[i].Name, newBranches[i].Name, err) + } + for j, c := range changes { + oldFile, newFile, err := c.Files() + if err != nil { + return nil, nil, nil, false, fmt.Errorf("change #%d: getting files before and after change: %w", j, err) + } + if oldFile != nil { + if c.From.Name == ignore.IgnoreFile { + return nil, nil, nil, false, fmt.Errorf("%q file is not yet supported in delta builds", ignore.IgnoreFile) + } + affectedPaths[c.From.Name] = struct{}{} + } + if newFile != nil { + if c.To.Name == ignore.IgnoreFile { + return nil, nil, nil, false, fmt.Errorf("%q file is not yet supported in delta builds", ignore.IgnoreFile) + } + affectedPaths[c.To.Name] = struct{}{} + } + } + } + + changedOrDeletedPaths = make([]string, 0, len(affectedPaths)) + for path := range affectedPaths { + changedOrDeletedPaths = append(changedOrDeletedPaths, path) + } + sort.Strings(changedOrDeletedPaths) + + repos = make(map[fileKey]BlobLocation) + for _, path := range changedOrDeletedPaths { + for _, current := range currentTrees { + if current.ig.Match(path) { + continue + } + f, err := current.tree.File(path) + if err != nil { + if errors.Is(err, object.ErrFileNotFound) { + continue + } + return nil, nil, nil, false, fmt.Errorf("getting current file %q in branch %q: %w", path, current.name, err) + } + key := fileKey{Path: path, ID: f.ID()} + if existing, ok := repos[key]; ok { + existing.Branches = append(existing.Branches, current.name) + repos[key] = existing + } else { + repos[key] = BlobLocation{ + GitRepo: repository, + URL: u, + Branches: []string{current.name}, + } + } + } + } + + for key, info := range repos { + sort.Strings(info.Branches) + info.Branches = uniq(info.Branches) + repos[key] = info + } + + return repos, nil, changedOrDeletedPaths, true, nil +} + +func branchNamesMoved(oldBranches, newBranches []zoekt.RepositoryBranch) bool { + oldIndex := make(map[string]int, len(oldBranches)) + for i, branch := range oldBranches { + oldIndex[branch.Name] = i + } + for i, branch := range newBranches { + if old, ok := oldIndex[branch.Name]; ok && old != i { + return true + } + } + return false +} + func validateBranchSetDelta(options Options, existingRepository *zoekt.Repository) error { newBranches := repositoryBranchNamesForDelta(options.BuildOptions.RepositoryDescription.Branches) if len(newBranches) != len(uniqPreserveOrder(append([]string(nil), newBranches...))) { From 2366390ce00402b949f2eaa689766c144182142d Mon Sep 17 00:00:00 2001 From: Eugene Brevdo Date: Fri, 17 Apr 2026 13:24:50 -0700 Subject: [PATCH 09/10] Add worktree set git indexing mode Allow zoekt-git-index to accept multiple linked worktree paths and index their attached HEAD branches together as one multi-branch repository. Co-authored-by: Codex --- cmd/zoekt-git-index/main.go | 165 ++++++++++++++++++++++++---- cmd/zoekt-git-index/main_test.go | 178 +++++++++++++++++++++++++++++++ 2 files changed, 321 insertions(+), 22 deletions(-) create mode 100644 cmd/zoekt-git-index/main_test.go diff --git a/cmd/zoekt-git-index/main.go b/cmd/zoekt-git-index/main.go index b74874522..ac2265387 100644 --- a/cmd/zoekt-git-index/main.go +++ b/cmd/zoekt-git-index/main.go @@ -18,8 +18,10 @@ package main import ( "flag" + "fmt" "log" "os" + "os/exec" "path/filepath" "runtime/pprof" "strings" @@ -37,6 +39,7 @@ func run() int { allowMissing := flag.Bool("allow_missing_branches", false, "allow missing branches.") submodules := flag.Bool("submodules", true, "if set to false, do not recurse into submodules") branchesStr := flag.String("branches", "HEAD", "git branches to index.") + worktrees := flag.Bool("worktrees", false, "treat arguments as worktrees of one repository and index their attached HEAD branches together") branchPrefix := flag.String("prefix", "refs/heads/", "prefix for branch names") incremental := flag.Bool("incremental", true, "only index changed repositories") @@ -54,6 +57,12 @@ func run() int { cpuProfile := flag.String("cpu_profile", "", "write cpu profile to `file`") flag.Parse() + branchesFlagSet := false + flag.Visit(func(f *flag.Flag) { + if f.Name == "branches" { + branchesFlagSet = true + } + }) // Tune GOMAXPROCS to match Linux container CPU quota. _, _ = maxprocs.Set() @@ -86,22 +95,9 @@ func run() int { branches = strings.Split(*branchesStr, ",") } - gitRepos := map[string]string{} - for _, repoDir := range flag.Args() { - repoDir, err := filepath.Abs(repoDir) - if err != nil { - log.Fatal(err) - } - repoDir = filepath.Clean(repoDir) - - name := strings.TrimSuffix(repoDir, "/.git") - if *repoCacheDir != "" && strings.HasPrefix(name, *repoCacheDir) { - name = strings.TrimPrefix(name, *repoCacheDir+"/") - name = strings.TrimSuffix(name, ".git") - } else { - name = strings.TrimSuffix(filepath.Base(name), ".git") - } - gitRepos[repoDir] = name + gitRepos, err := gitRepoSpecs(flag.Args(), *repoCacheDir, branches, *resolveHEADToBranch, *worktrees, branchesFlagSet) + if err != nil { + log.Fatal(err) } opts.LanguageMap = make(ctags.LanguageMap) @@ -124,8 +120,8 @@ func run() int { profiler.Init("zoekt-git-index") exitStatus := 0 - for dir, name := range gitRepos { - opts.RepositoryDescription.Name = name + for _, repoSpec := range gitRepos { + opts.RepositoryDescription.Name = repoSpec.name gitOpts := gitindex.Options{ BranchPrefix: *branchPrefix, Incremental: *incremental, @@ -133,17 +129,17 @@ func run() int { RepoCacheDir: *repoCacheDir, AllowMissingBranch: *allowMissing, BuildOptions: *opts, - Branches: branches, - RepoDir: dir, + Branches: repoSpec.branches, + RepoDir: repoSpec.dir, DeltaShardNumberFallbackThreshold: *deltaShardNumberFallbackThreshold, DeltaAdmissionMode: *deltaAdmissionMode, DeltaAdmissionLogPath: *deltaAdmissionLogJSON, - ResolveHEADToBranch: *resolveHEADToBranch, + ResolveHEADToBranch: repoSpec.resolveHEADToBranch, AllowDeltaBranchSetChange: *allowDeltaBranchSetChange, } if _, err := gitindex.IndexGitRepo(gitOpts); err != nil { - log.Printf("indexGitRepo(%s, delta=%t): %v", dir, gitOpts.BuildOptions.IsDelta, err) + log.Printf("indexGitRepo(%s, delta=%t): %v", repoSpec.dir, gitOpts.BuildOptions.IsDelta, err) exitStatus = 1 } } @@ -151,6 +147,131 @@ func run() int { return exitStatus } +type gitRepoSpec struct { + dir string + name string + branches []string + resolveHEADToBranch bool +} + +func gitRepoSpecs(args []string, repoCacheDir string, branches []string, resolveHEADToBranch, worktrees, branchesFlagSet bool) ([]gitRepoSpec, error) { + if worktrees { + return gitRepoSpecsForWorktrees(args, repoCacheDir, branchesFlagSet) + } + + specs := make([]gitRepoSpec, 0, len(args)) + for _, repoDir := range args { + repoDir, err := filepath.Abs(repoDir) + if err != nil { + return nil, err + } + repoDir = filepath.Clean(repoDir) + specs = append(specs, gitRepoSpec{ + dir: repoDir, + name: inferRepoName(repoDir, repoCacheDir), + branches: append([]string(nil), branches...), + resolveHEADToBranch: resolveHEADToBranch, + }) + } + return specs, nil +} + +func gitRepoSpecsForWorktrees(args []string, repoCacheDir string, branchesFlagSet bool) ([]gitRepoSpec, error) { + if len(args) == 0 { + return nil, fmt.Errorf("-worktrees requires at least one worktree path") + } + if branchesFlagSet { + return nil, fmt.Errorf("-worktrees cannot be combined with -branches; branches are inferred from each worktree HEAD") + } + + var firstDir string + var commonDir string + seenBranches := map[string]struct{}{} + branches := make([]string, 0, len(args)) + for _, repoDir := range args { + repoDir, err := filepath.Abs(repoDir) + if err != nil { + return nil, err + } + repoDir = filepath.Clean(repoDir) + if firstDir == "" { + firstDir = repoDir + } + + dir, branch, err := worktreeCommonDirAndBranch(repoDir) + if err != nil { + return nil, err + } + if commonDir == "" { + commonDir = dir + } else if commonDir != dir { + return nil, fmt.Errorf("-worktrees arguments must share one git common dir: %q and %q differ", commonDir, dir) + } + if _, ok := seenBranches[branch]; ok { + continue + } + seenBranches[branch] = struct{}{} + branches = append(branches, branch) + } + + nameSource := firstDir + if commonDir != "" { + nameSource = commonDir + } + return []gitRepoSpec{{ + dir: firstDir, + name: inferRepoName(nameSource, repoCacheDir), + branches: branches, + resolveHEADToBranch: true, + }}, nil +} + +func worktreeCommonDirAndBranch(repoDir string) (commonDir, branch string, err error) { + commonDir, err = gitOutput(repoDir, "rev-parse", "--git-common-dir") + if err != nil { + return "", "", err + } + commonDir = strings.TrimSpace(commonDir) + if !filepath.IsAbs(commonDir) { + commonDir = filepath.Join(repoDir, commonDir) + } + commonDir, err = filepath.Abs(commonDir) + if err != nil { + return "", "", err + } + commonDir = filepath.Clean(commonDir) + + branch, err = gitOutput(repoDir, "symbolic-ref", "--short", "HEAD") + if err != nil { + return "", "", fmt.Errorf("worktree %q must have an attached HEAD: %w", repoDir, err) + } + branch = strings.TrimSpace(branch) + if branch == "" { + return "", "", fmt.Errorf("worktree %q must have an attached HEAD", repoDir) + } + return commonDir, branch, nil +} + +func gitOutput(repoDir string, args ...string) (string, error) { + cmd := exec.Command("git", append([]string{"-C", repoDir}, args...)...) + out, err := cmd.CombinedOutput() + if err != nil { + return "", fmt.Errorf("git -C %q %s: %w: %s", repoDir, strings.Join(args, " "), err, strings.TrimSpace(string(out))) + } + return string(out), nil +} + +func inferRepoName(repoDir, repoCacheDir string) string { + name := strings.TrimSuffix(repoDir, "/.git") + if repoCacheDir != "" && strings.HasPrefix(name, repoCacheDir) { + name = strings.TrimPrefix(name, repoCacheDir+"/") + name = strings.TrimSuffix(name, ".git") + } else { + name = strings.TrimSuffix(filepath.Base(name), ".git") + } + return name +} + func main() { exitStatus := run() os.Exit(exitStatus) diff --git a/cmd/zoekt-git-index/main_test.go b/cmd/zoekt-git-index/main_test.go new file mode 100644 index 000000000..95191d4d0 --- /dev/null +++ b/cmd/zoekt-git-index/main_test.go @@ -0,0 +1,178 @@ +package main + +import ( + "context" + "os" + "os/exec" + "path/filepath" + "sort" + "testing" + + "github.com/google/go-cmp/cmp" + + "github.com/sourcegraph/zoekt" + "github.com/sourcegraph/zoekt/gitindex" + "github.com/sourcegraph/zoekt/index" + "github.com/sourcegraph/zoekt/query" + "github.com/sourcegraph/zoekt/search" +) + +func TestGitRepoSpecsForWorktreesIndexesAttachedHeadsTogether(t *testing.T) { + root := t.TempDir() + repoDir := filepath.Join(root, "repo") + runGit(t, root, "init", "-b", "main", "repo") + runGit(t, repoDir, "config", "zoekt.name", "multi-worktree-repo") + + writeFile(t, filepath.Join(repoDir, "seed.txt"), "seed\n") + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "seed") + + runGit(t, repoDir, "checkout", "-B", "feature-a") + writeFile(t, filepath.Join(repoDir, "a.txt"), "feature-a-needle\n") + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "feature-a") + + runGit(t, repoDir, "checkout", "main") + runGit(t, repoDir, "checkout", "-B", "feature-b") + writeFile(t, filepath.Join(repoDir, "b.txt"), "feature-b-needle\n") + runGit(t, repoDir, "add", "-A") + runGit(t, repoDir, "commit", "-m", "feature-b") + + runGit(t, repoDir, "checkout", "main") + worktreeA := filepath.Join(root, "worktree-a") + worktreeB := filepath.Join(root, "worktree-b") + runGit(t, repoDir, "worktree", "add", worktreeA, "feature-a") + runGit(t, repoDir, "worktree", "add", worktreeB, "feature-b") + + specs, err := gitRepoSpecs([]string{worktreeA, worktreeB}, "", nil, false, true, false) + if err != nil { + t.Fatalf("gitRepoSpecs: %v", err) + } + if len(specs) != 1 { + t.Fatalf("got %d specs, want 1", len(specs)) + } + spec := specs[0] + if diff := cmp.Diff([]string{"feature-a", "feature-b"}, spec.branches); diff != "" { + t.Fatalf("branches mismatch (-want +got):\n%s", diff) + } + if !spec.resolveHEADToBranch { + t.Fatal("worktree mode should resolve HEAD to branch") + } + + indexDir := t.TempDir() + _, err = gitindex.IndexGitRepo(gitindex.Options{ + RepoDir: spec.dir, + Branches: spec.branches, + ResolveHEADToBranch: spec.resolveHEADToBranch, + Submodules: false, + Incremental: false, + BuildOptions: index.Options{ + RepositoryDescription: zoekt.Repository{Name: spec.name}, + IndexDir: indexDir, + DisableCTags: true, + }, + }) + if err != nil { + t.Fatalf("IndexGitRepo: %v", err) + } + + repo := indexedRepository(t, indexDir, "multi-worktree-repo") + if diff := cmp.Diff([]string{"feature-a", "feature-b"}, repositoryBranchNames(repo.Branches)); diff != "" { + t.Fatalf("indexed branch names mismatch (-want +got):\n%s", diff) + } + + assertSearchFiles(t, indexDir, query.NewAnd(&query.Branch{Pattern: "feature-a", Exact: true}, &query.Substring{Pattern: "feature-a-needle", Content: true}), []string{"a.txt"}) + assertSearchFiles(t, indexDir, query.NewAnd(&query.Branch{Pattern: "feature-b", Exact: true}, &query.Substring{Pattern: "feature-b-needle", Content: true}), []string{"b.txt"}) + assertSearchFiles(t, indexDir, query.NewAnd(&query.Branch{Pattern: "feature-a", Exact: true}, &query.Substring{Pattern: "feature-b-needle", Content: true}), nil) +} + +func TestGitRepoSpecsForWorktreesRejectsBranchesFlag(t *testing.T) { + if _, err := gitRepoSpecs([]string{"."}, "", nil, false, true, true); err == nil { + t.Fatal("expected -worktrees with explicit -branches to fail") + } +} + +func runGit(t *testing.T, cwd string, args ...string) string { + t.Helper() + + cmd := exec.Command("git", args...) + cmd.Dir = cwd + cmd.Env = append(os.Environ(), + "GIT_CONFIG_GLOBAL=", + "GIT_CONFIG_SYSTEM=", + "GIT_COMMITTER_NAME=Zoekt Test", + "GIT_COMMITTER_EMAIL=zoekt@example.com", + "GIT_AUTHOR_NAME=Zoekt Test", + "GIT_AUTHOR_EMAIL=zoekt@example.com", + ) + out, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("git %v: %v\n%s", args, err, out) + } + return string(out) +} + +func writeFile(t *testing.T, path, content string) { + t.Helper() + + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("MkdirAll(%q): %v", filepath.Dir(path), err) + } + if err := os.WriteFile(path, []byte(content), 0o644); err != nil { + t.Fatalf("WriteFile(%q): %v", path, err) + } +} + +func indexedRepository(t *testing.T, indexDir, name string) *zoekt.Repository { + t.Helper() + + opts := index.Options{ + IndexDir: indexDir, + RepositoryDescription: zoekt.Repository{ + Name: name, + }, + } + repo, _, ok, err := opts.FindRepositoryMetadata() + if err != nil { + t.Fatalf("FindRepositoryMetadata: %v", err) + } + if !ok { + t.Fatalf("FindRepositoryMetadata: repository %q not found", name) + } + return repo +} + +func repositoryBranchNames(branches []zoekt.RepositoryBranch) []string { + names := make([]string, 0, len(branches)) + for _, branch := range branches { + names = append(names, branch.Name) + } + return names +} + +func assertSearchFiles(t *testing.T, indexDir string, q query.Q, want []string) { + t.Helper() + + searcher, err := search.NewDirectorySearcher(indexDir) + if err != nil { + t.Fatalf("NewDirectorySearcher: %v", err) + } + defer searcher.Close() + + result, err := searcher.Search(context.Background(), q, &zoekt.SearchOptions{}) + if err != nil { + t.Fatalf("Search(%s): %v", q.String(), err) + } + + got := make([]string, 0, len(result.Files)) + for _, file := range result.Files { + got = append(got, file.FileName) + } + sort.Strings(got) + if want == nil { + want = []string{} + } + if diff := cmp.Diff(want, got); diff != "" { + t.Fatalf("search files mismatch for %s (-want +got):\n%s", q.String(), diff) + } +} From 08b3a46203a7c3f8de75c02083589c7de0c517e2 Mon Sep 17 00:00:00 2001 From: Eugene Brevdo Date: Fri, 24 Apr 2026 16:52:09 -0700 Subject: [PATCH 10/10] cleanup --- TEST_LIST.md | 282 --------------------------------------------------- 1 file changed, 282 deletions(-) delete mode 100644 TEST_LIST.md diff --git a/TEST_LIST.md b/TEST_LIST.md deleted file mode 100644 index db06974fe..000000000 --- a/TEST_LIST.md +++ /dev/null @@ -1,282 +0,0 @@ -# Multi-Branch Delta Test List - -Goal: define failing tests that should pass once delta indexing can update multi-branch indexes across branch-set changes. These tests should verify that the delta path is actually used and that branch-filtered lookups return the same results as a clean full rebuild. - -Branch-set-changing deltas are explicitly opt-in via `gitindex.Options.AllowDeltaBranchSetChange`. With that option enabled, unmatched old/new branch slots are allowed to use the conservative full-live delta path; the option itself is the caller's provenance that branch-set changes are intentional. Without that option, branch-set mismatches should preserve the old behavior and fall back to a normal rebuild. - -## Test Harness Expectations - -- Each test should build an initial full index, mutate the Git repo, request a delta build, and assert the delta build path was used. -- Each test should compare search results against a clean full rebuild of the final branch set. -- Search assertions should include unfiltered search and branch-filtered search for every old, new, unchanged, added, and removed branch involved. -- Search assertions should verify both positive hits and absence of stale hits. -- Tests should inspect repository metadata branch names and versions after the delta. -- Tests should inspect that old shards carry the expected file tombstones when paths changed or disappeared. -- Tests should cover both default branch filters and `query.BranchesRepos` because Sourcegraph branch filtering uses `BranchesRepos`. -- Where `ResolveHEADToBranch` is involved, tests should verify stored short branch names, not `HEAD`, unless the worktree is detached. - -## Baseline Existing-Behavior Controls - -1. Exact same multi-branch set, one branch content changes. - - Initial: `[main, release]` - - Final: `[main, release]` - - Change: `main: shared.txt` changes, `release: shared.txt` unchanged. - - Expect: delta used; `branch:main` sees new main content; `branch:release` sees old release content; old main content is absent. - -2. Exact same multi-branch set, one branch deletes a path that still exists on another branch. - - Initial: `[main, release]` - - Final: `[main, release]` - - Change: `main` deletes `shared.txt`, `release` keeps it. - - Expect: delta used; `branch:main` misses `shared.txt`; `branch:release` still finds `shared.txt`. - -3. Exact same multi-branch set, a file becomes identical across two branches. - - Initial: `main: a.txt = A`, `release: a.txt = B` - - Final: `main: a.txt = B`, `release: a.txt = B` - - Expect: delta used; one logical final document may serve both branches, but branch-filtered searches for both branches must find it. - -4. Exact same multi-branch set, a file diverges from shared content. - - Initial: `main` and `release` both have `a.txt = same`. - - Final: `main: a.txt = new-main`, `release: a.txt = same`. - - Expect: delta used; branch masks are split correctly. - -## Single Branch Rename - -5. Rename one branch in a two-branch index, no file changes on renamed branch. - - Initial: `[feature-a, release]` - - Final: `[feature-b, release]` - - `feature-b` points to the same commit as old `feature-a`. - - Expect: delta used; metadata says `[feature-b, release]`; `branch:feature-b` finds old feature content; `branch:feature-a` finds nothing. - -6. Rename one branch and modify a path on renamed branch. - - Initial: `[feature-a, release]` - - Final: `[feature-b, release]` - - Change: `feature-b: branch.txt` differs from old `feature-a`. - - Expect: delta used; new feature content found only on `feature-b`; old feature content absent; release content preserved. - -7. Rename one branch and delete a path on renamed branch. - - Initial: `[feature-a, release]` - - Final: `[feature-b, release]` - - Change: `feature-b` deletes `branch.txt`; `release` keeps unrelated content. - - Expect: delta used; `branch:feature-b` misses deleted path; stale `feature-a` path absent; release queries unchanged. - -8. Rename one branch and add a path on renamed branch. - - Initial: `[feature-a, release]` - - Final: `[feature-b, release]` - - Change: `feature-b` adds `new.txt`. - - Expect: delta used; `branch:feature-b` finds `new.txt`; `branch:feature-a` finds nothing. - -## Multiple Branch Renames - -9. Rename two branches at the same time with no file changes. - - Initial: `[feature-a, qa-a, release]` - - Final: `[feature-b, qa-b, release]` - - Expect: delta used; both renamed branch filters work; old branch filters return no results; release unchanged. - -10. Rename two branches with independent file changes. - - Initial: `[feature-a, qa-a, release]` - - Final: `[feature-b, qa-b, release]` - - Change: `feature-b` modifies `feature.txt`; `qa-b` modifies `qa.txt`. - - Expect: delta used; each branch sees only its new content; old content absent. - -11. Rename two branches where both end up sharing the same blob for a path. - - Initial: `feature-a: shared.txt = A`, `qa-a: shared.txt = B` - - Final: `feature-b: shared.txt = C`, `qa-b: shared.txt = C` - - Expect: delta used; final branch masks include both renamed branches. - -12. Rename branch order changes at the same time. - - Initial: `[feature-a, release, qa-a]` - - Final: `[qa-b, feature-b, release]` - - Expect: delta used when `AllowDeltaBranchSetChange` is set; branch-filtered search must match clean rebuild exactly. - -## Add Branches - -13. Add one new branch that is identical to an existing branch. - - Initial: `[main]` - - Final: `[main, release]` - - `release` points to same commit as `main`. - - Expect: delta used; existing docs gain `release` membership; `branch:release` finds the same files as `branch:main`. - -14. Add one new branch with one changed path. - - Initial: `[main]` - - Final: `[main, release]` - - `release` branches from `main` and modifies `release.txt`. - - Expect: delta used; `branch:release` sees release-only file/content; `branch:main` does not. - -15. Add one new branch with deletions relative to existing branch. - - Initial: `[main]` - - Final: `[main, slim]` - - `slim` deletes `large.txt`. - - Expect: delta used; `branch:slim` misses `large.txt`; `branch:main` still finds it. - -16. Add multiple new branches at once. - - Initial: `[main]` - - Final: `[main, release, dev]` - - `release` and `dev` each have distinct path changes. - - Expect: delta used; all three branch filters match clean rebuild. - -17. Add a new branch that shares some docs and diverges on others. - - Initial: `[main]` - - Final: `[main, feature]` - - `feature` changes `a.txt`, keeps `b.txt`, adds `c.txt`, deletes `d.txt`. - - Expect: delta used; branch masks correct for all four categories. - -## Remove Branches - -18. Remove one branch, no file content changes. - - Initial: `[main, release]` - - Final: `[main]` - - Expect: delta used; `branch:release` returns no results; unfiltered search does not duplicate stale release-only docs. - -19. Remove one branch that had branch-only files. - - Initial: `[main, release]` - - Final: `[main]` - - `release` had `release-only.txt`. - - Expect: delta used; release-only file absent from unfiltered and branch-filtered searches. - -20. Remove one branch while another remaining branch still has the same path with different content. - - Initial: `main: shared.txt = main`, `release: shared.txt = release` - - Final: `[main]` - - Expect: delta used; final unfiltered and `branch:main` searches show only main content. - -21. Remove multiple branches at once. - - Initial: `[main, release, dev, qa]` - - Final: `[main]` - - Expect: delta used; removed branch filters return no results; main results match clean rebuild. - -22. Remove a branch and modify a retained branch in the same update. - - Initial: `[main, release]` - - Final: `[main]` - - Change: `main` modifies `a.txt`; `release` removed. - - Expect: delta used; new main content present; old release-only content absent. - -## Combined Rename/Add/Remove - -23. Rename one branch and add one branch. - - Initial: `[feature-a, release]` - - Final: `[feature-b, release, dev]` - - Expect: delta used; renamed branch, unchanged branch, and added branch all match clean rebuild. - -24. Rename one branch and remove one branch. - - Initial: `[feature-a, release, dev]` - - Final: `[feature-b, release]` - - Expect: delta used; `feature-b` correct, `dev` absent, release unchanged. - -25. Add one branch and remove one branch. - - Initial: `[main, old-release]` - - Final: `[main, new-release]` - - Expect: delta used; old-release absent; new-release present. - -26. Rename multiple branches, add one branch, remove one branch. - - Initial: `[feature-a, qa-a, old-release, main]` - - Final: `[feature-b, qa-b, new-release, main]` - - Expect: delta used; branch-filtered searches for every final branch match clean rebuild; every removed branch returns no results. - -27. Branch count changes and branch order changes at the same time. - - Initial: `[main, release, dev]` - - Final: `[qa, main, release-renamed]` - - Expect: delta used only with explicit old-to-new mapping; results must be order-independent. - -## HEAD Resolution And Worktrees - -28. Single requested `HEAD` resolves from `feature-a` to `feature-b`. - - Initial indexed branch: `[feature-a]` - - Final indexed branch: `[feature-b]` - - Expect: delta used; metadata stores `feature-b`; old branch filter returns no results. - -29. Multi-branch `HEAD, release`, where `HEAD` resolves from `feature-a` to `feature-b`. - - Initial: `[feature-a, release]` - - Final: `[feature-b, release]` - - Expect: delta used; same assertions as single branch rename plus release preservation. - -30. Multi-branch `HEAD, release`, where `HEAD` resolves to `release`. - - Initial or final branch list would contain duplicate logical branches. - - Expect: either deterministic dedupe with correct branch masks or conservative full rebuild; define chosen behavior before implementation. - -31. Detached `HEAD` in a multi-branch index. - - Initial: `[feature-a, release]` - - Final: `[HEAD, release]` - - Expect: define whether detached HEAD can delta; if allowed, branch-filtered lookup for `HEAD` must match clean rebuild. - -32. Two linked worktrees of the same repo in the same index dir. - - Worktree A: `HEAD -> feature-a` - - Worktree B: `HEAD -> feature-b` - - Expect: indexes either have distinct repo identities or explicit behavior; branch metadata must not conflate both as `HEAD`. - -## Ambiguity And Safety Tests - -33. Unmatched old/new branch names with branch-set delta opt-in. - - Initial: `[foo, bar]` - - Final: `[baz, bar]` - - With `AllowDeltaBranchSetChange`, expect delta used; final results match clean rebuild and branch mapping is logged. - - Without `AllowDeltaBranchSetChange`, expect old behavior: full rebuild fallback. - -34. Many-to-one branch update with branch-set delta opt-in. - - Initial: `[a, b]` - - Final: `[c]` - - With `AllowDeltaBranchSetChange`, expect delta used via the conservative full-live path; final results match clean rebuild. - - Without the flag, expect fallback. - -35. One-to-many branch split with branch-set delta opt-in. - - Initial: `[a]` - - Final: `[b, c]` - - With `AllowDeltaBranchSetChange`, expect delta used via the conservative full-live path; final results match clean rebuild. - - Without the flag, expect fallback. - -36. Duplicate final branch names after wildcard expansion. - - Initial: `[main]` - - Final request expands to `[main, main]` - - Expect deterministic dedupe when branch-set changes are allowed, or full rebuild otherwise; never duplicate branch masks. - -37. Branch removed and re-added with the same name but unrelated history. - - Initial: `[release]` at old lineage - - Final: `[release]` at unrelated commit - - Expect: delta can still be correct if old and new commits are diffable or fallback if commit ancestry/object availability is insufficient. - -38. Old commit missing locally for a removed/renamed branch. - - Expect: full rebuild; no partial stale results. - -39. New branch commit missing locally after fetch/filter. - - Expect: full rebuild or hard error according to existing fetch semantics; no partial index. - -40. Compound shards present before multi-branch branch-set change. - - Expect: current unsupported compound-shard delta behavior remains conservative until separately implemented. - -## Stats And Observability - -41. DeltaStats after branch rename. - - Expect: live document/path counts match clean rebuild; physical counts include stacked debt; tombstones reflect affected paths. - -42. DeltaStats after adding branch identical to existing branch. - - Expect: live document count may not grow for shared docs, but branch membership changes must be reflected; counters match chosen semantics. - -43. DeltaStats after removing branch with branch-only files. - - Expect: live counts decrease; physical counts retain old docs until rebuild; tombstone/path debt increases appropriately. - -44. Admission log for successful multi-branch branch-set delta. - - Expect: JSONL contains mapping summary, accepted=true, old/new branch counts, and cost metrics. - -45. Admission log for fallback due ambiguous mapping. - - Expect `accepted=true` for unmatched branch mappings when `AllowDeltaBranchSetChange` is set and the conservative delta path is safe. - - Keep `accepted=false` fallback logging for structurally unsafe cases such as missing commits or unsupported compound shards. - -## Query Surfaces To Exercise - -46. Plain substring query across all branches. -47. `branch:` parsed query. -48. `query.BranchesRepos` query for one branch and repo ID. -49. `BranchesRepos` query with multiple branch/repo pairs. -50. File-name query after branch rename/add/remove. -51. Regex content query after branch rename/add/remove. -52. Case-sensitive content query after branch rename/add/remove. - -## Comparison Strategy - -For each successful delta test: - -1. Build initial index. -2. Mutate branch set and branch contents. -3. Run delta indexing with spies and require the delta path, not fallback. -4. Build a clean full index in a separate temp dir for the final branch set. -5. Compare search results for all relevant queries between delta and clean indexes. -6. Compare repository branch metadata. -7. Compare advisory live `DeltaStats` fields against the clean full index when `stats-v1` is enabled.