From 0bfc50362cb54141619afe23ccaee27a827812a4 Mon Sep 17 00:00:00 2001 From: Javan Hutchinson Date: Sat, 25 Oct 2025 23:20:10 -0400 Subject: [PATCH 1/4] initial implementation of butterfly effect --- cli/butterfly.go | 300 +++++++++++++++++++++++++++++ cli/cli.go | 2 + cli/timeline.go | 24 ++- cli/whereami.go | 60 +++++- docs/commands/butterfly.md | 335 +++++++++++++++++++++++++++++++++ internal/butterfly/manager.go | 182 ++++++++++++++++++ internal/butterfly/metadata.go | 202 ++++++++++++++++++++ internal/butterfly/resolver.go | 21 +++ internal/butterfly/sync.go | 148 +++++++++++++++ internal/butterfly/types.go | 23 +++ 10 files changed, 1295 insertions(+), 2 deletions(-) create mode 100644 cli/butterfly.go create mode 100644 docs/commands/butterfly.md create mode 100644 internal/butterfly/manager.go create mode 100644 internal/butterfly/metadata.go create mode 100644 internal/butterfly/resolver.go create mode 100644 internal/butterfly/sync.go create mode 100644 internal/butterfly/types.go diff --git a/cli/butterfly.go b/cli/butterfly.go new file mode 100644 index 0000000..d57c3be --- /dev/null +++ b/cli/butterfly.go @@ -0,0 +1,300 @@ +package cli + +import ( + "encoding/hex" + "fmt" + "os" + "path/filepath" + + "github.com/javanhut/Ivaldi-vcs/internal/butterfly" + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/refs" + "github.com/spf13/cobra" +) + +var butterflyCmd = &cobra.Command{ + Use: "butterfly ", + Aliases: []string{"bf"}, + Short: "Create or manage butterfly timelines", + Long: `Butterfly timelines are experimental sandboxes that branch from a parent timeline`, + Args: cobra.MinimumNArgs(1), + RunE: butterflyCreateRun, +} + +var butterflyUpCmd = &cobra.Command{ + Use: "up", + Short: "Sync butterfly up to parent (merge to parent)", + Args: cobra.NoArgs, + RunE: butterflySyncUpRun, +} + +var butterflyDownCmd = &cobra.Command{ + Use: "down", + Short: "Sync parent down to butterfly (merge from parent)", + Args: cobra.NoArgs, + RunE: butterflySyncDownRun, +} + +var butterflyRemoveCmd = &cobra.Command{ + Use: "rm ", + Aliases: []string{"remove"}, + Short: "Remove a butterfly timeline", + Args: cobra.ExactArgs(1), + RunE: butterflyRemoveRun, +} + +var cascadeDelete bool + +func init() { + butterflyRemoveCmd.Flags().BoolVar(&cascadeDelete, "cascade", false, "Delete nested butterflies recursively") +} + +func butterflyCreateRun(cmd *cobra.Command, args []string) error { + name := args[0] + + if name == "up" || name == "down" || name == "rm" || name == "remove" { + return fmt.Errorf("'%s' is a butterfly subcommand, not a valid butterfly name", name) + } + + ivaldiDir := ".ivaldi" + if _, err := os.Stat(ivaldiDir); os.IsNotExist(err) { + return fmt.Errorf("not in an Ivaldi repository (no .ivaldi directory found)") + } + + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + currentTimeline, err := refsManager.GetCurrentTimeline() + if err != nil { + return fmt.Errorf("no current timeline found: %w", err) + } + + currentTimelineRef, err := refsManager.GetTimeline(currentTimeline, refs.LocalTimeline) + if err != nil { + return fmt.Errorf("failed to get current timeline: %w", err) + } + + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + return fmt.Errorf("failed to initialize storage: %w", err) + } + + mmr, err := history.NewPersistentMMR(casStore, ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize MMR: %w", err) + } + defer mmr.Close() + + bfManager, err := butterfly.NewManager(ivaldiDir, casStore, refsManager, mmr) + if err != nil { + return fmt.Errorf("failed to initialize butterfly manager: %w", err) + } + defer bfManager.Close() + + var divergenceHash cas.Hash + copy(divergenceHash[:], currentTimelineRef.Blake3Hash[:]) + + err = bfManager.CreateButterfly(name, currentTimeline, divergenceHash) + if err != nil { + return fmt.Errorf("failed to create butterfly: %w", err) + } + + fmt.Printf("Creating butterfly timeline '%s' from '%s'\n", name, currentTimeline) + fmt.Printf("Divergence point: %s\n", hex.EncodeToString(divergenceHash[:])[:16]) + fmt.Printf("✓ Created butterfly '%s'\n", name) + + err = refsManager.SetCurrentTimeline(name) + if err != nil { + return fmt.Errorf("failed to switch to butterfly: %w", err) + } + + fmt.Printf("✓ Switched to butterfly timeline\n") + + return nil +} + +func butterflySyncUpRun(cmd *cobra.Command, args []string) error { + ivaldiDir := ".ivaldi" + if _, err := os.Stat(ivaldiDir); os.IsNotExist(err) { + return fmt.Errorf("not in an Ivaldi repository (no .ivaldi directory found)") + } + + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + currentTimeline, err := refsManager.GetCurrentTimeline() + if err != nil { + return fmt.Errorf("no current timeline found: %w", err) + } + + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + return fmt.Errorf("failed to initialize storage: %w", err) + } + + mmr, err := history.NewPersistentMMR(casStore, ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize MMR: %w", err) + } + defer mmr.Close() + + bfManager, err := butterfly.NewManager(ivaldiDir, casStore, refsManager, mmr) + if err != nil { + return fmt.Errorf("failed to initialize butterfly manager: %w", err) + } + defer bfManager.Close() + + if !bfManager.IsButterfly(currentTimeline) { + return fmt.Errorf("'%s' is not a butterfly timeline", currentTimeline) + } + + bf, err := bfManager.GetButterflyInfo(currentTimeline) + if err != nil { + return err + } + + syncer := butterfly.NewSyncer(bfManager, casStore, refsManager, mmr) + + fmt.Printf("Syncing butterfly '%s' up to parent '%s'...\n", currentTimeline, bf.ParentName) + + err = syncer.SyncUp(currentTimeline) + if err != nil { + return fmt.Errorf("failed to sync up: %w", err) + } + + parentRef, _ := refsManager.GetTimeline(bf.ParentName, refs.LocalTimeline) + fmt.Printf("✓ Parent '%s' now at: %s\n", bf.ParentName, hex.EncodeToString(parentRef.Blake3Hash[:])[:16]) + fmt.Printf("✓ Butterfly synchronized\n") + + return nil +} + +func butterflySyncDownRun(cmd *cobra.Command, args []string) error { + ivaldiDir := ".ivaldi" + if _, err := os.Stat(ivaldiDir); os.IsNotExist(err) { + return fmt.Errorf("not in an Ivaldi repository (no .ivaldi directory found)") + } + + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + currentTimeline, err := refsManager.GetCurrentTimeline() + if err != nil { + return fmt.Errorf("no current timeline found: %w", err) + } + + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + return fmt.Errorf("failed to initialize storage: %w", err) + } + + mmr, err := history.NewPersistentMMR(casStore, ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize MMR: %w", err) + } + defer mmr.Close() + + bfManager, err := butterfly.NewManager(ivaldiDir, casStore, refsManager, mmr) + if err != nil { + return fmt.Errorf("failed to initialize butterfly manager: %w", err) + } + defer bfManager.Close() + + if !bfManager.IsButterfly(currentTimeline) { + return fmt.Errorf("'%s' is not a butterfly timeline", currentTimeline) + } + + bf, err := bfManager.GetButterflyInfo(currentTimeline) + if err != nil { + return err + } + + syncer := butterfly.NewSyncer(bfManager, casStore, refsManager, mmr) + + fmt.Printf("Syncing butterfly '%s' down from parent '%s'...\n", currentTimeline, bf.ParentName) + + err = syncer.SyncDown(currentTimeline) + if err != nil { + return fmt.Errorf("failed to sync down: %w", err) + } + + fmt.Printf("✓ Merged successfully\n") + fmt.Printf("✓ Butterfly now includes parent's latest changes\n") + + return nil +} + +func butterflyRemoveRun(cmd *cobra.Command, args []string) error { + name := args[0] + + ivaldiDir := ".ivaldi" + if _, err := os.Stat(ivaldiDir); os.IsNotExist(err) { + return fmt.Errorf("not in an Ivaldi repository (no .ivaldi directory found)") + } + + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + currentTimeline, _ := refsManager.GetCurrentTimeline() + if currentTimeline == name { + return fmt.Errorf("cannot remove current butterfly '%s'. Switch to another timeline first", name) + } + + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + return fmt.Errorf("failed to initialize storage: %w", err) + } + + mmr, err := history.NewPersistentMMR(casStore, ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize MMR: %w", err) + } + defer mmr.Close() + + bfManager, err := butterfly.NewManager(ivaldiDir, casStore, refsManager, mmr) + if err != nil { + return fmt.Errorf("failed to initialize butterfly manager: %w", err) + } + defer bfManager.Close() + + if !bfManager.IsButterfly(name) { + return fmt.Errorf("'%s' is not a butterfly timeline", name) + } + + children, _ := bfManager.GetChildren(name) + + if len(children) > 0 && !cascadeDelete { + fmt.Printf("Removing butterfly '%s'...\n", name) + fmt.Printf("Warning: This butterfly has %d nested butterflies:\n", len(children)) + for _, child := range children { + fmt.Printf(" - %s\n", child) + } + fmt.Println("These will become orphaned. Use --cascade to delete them.") + } + + err = bfManager.DeleteButterfly(name, cascadeDelete) + if err != nil { + return fmt.Errorf("failed to remove butterfly: %w", err) + } + + fmt.Printf("✓ Removed butterfly '%s'\n", name) + + return nil +} diff --git a/cli/cli.go b/cli/cli.go index 27d5178..286d391 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -50,6 +50,8 @@ func init() { // Timeline management commands rootCmd.AddCommand(timelineCmd) timelineCmd.AddCommand(createTimelineCmd, switchTimelineCmd, listTimelineCmd, removeTimelineCmd) + timelineCmd.AddCommand(butterflyCmd) + butterflyCmd.AddCommand(butterflyUpCmd, butterflyDownCmd, butterflyRemoveCmd) // File and commit management commands rootCmd.AddCommand(gatherCmd) diff --git a/cli/timeline.go b/cli/timeline.go index d08d275..5e74817 100644 --- a/cli/timeline.go +++ b/cli/timeline.go @@ -6,6 +6,7 @@ import ( "os" "path/filepath" + "github.com/javanhut/Ivaldi-vcs/internal/butterfly" "github.com/javanhut/Ivaldi-vcs/internal/cas" "github.com/javanhut/Ivaldi-vcs/internal/commit" "github.com/javanhut/Ivaldi-vcs/internal/history" @@ -183,6 +184,21 @@ var listTimelineCmd = &cobra.Command{ log.Printf("Warning: Failed to list tags: %v", err) } + // Initialize butterfly manager for timeline listing + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, _ := cas.NewFileCAS(objectsDir) + var bfManager *butterfly.Manager + if casStore != nil { + mmr, err := history.NewPersistentMMR(casStore, ivaldiDir) + if err == nil { + defer mmr.Close() + bfManager, _ = butterfly.NewManager(ivaldiDir, casStore, refsManager, mmr) + if bfManager != nil { + defer bfManager.Close() + } + } + } + // Display results if len(localTimelines) > 0 { fmt.Println("Local Timelines:") @@ -191,7 +207,13 @@ var listTimelineCmd = &cobra.Command{ if currentTimeline == timeline.Name { marker = "* " // Mark current timeline } - fmt.Printf("%s%s\t%s\n", marker, timeline.Name, timeline.Description) + + butterflyIcon := "" + if bfManager != nil && bfManager.IsButterfly(timeline.Name) { + butterflyIcon = " 🦋" + } + + fmt.Printf("%s%s%s\t%s\n", marker, timeline.Name, butterflyIcon, timeline.Description) } } else { fmt.Println("No local timelines found.") diff --git a/cli/whereami.go b/cli/whereami.go index fbde110..bc1acac 100644 --- a/cli/whereami.go +++ b/cli/whereami.go @@ -8,10 +8,12 @@ import ( "strings" "time" + "github.com/javanhut/Ivaldi-vcs/internal/butterfly" "github.com/javanhut/Ivaldi-vcs/internal/cas" "github.com/javanhut/Ivaldi-vcs/internal/colors" "github.com/javanhut/Ivaldi-vcs/internal/commit" "github.com/javanhut/Ivaldi-vcs/internal/diffmerge" + "github.com/javanhut/Ivaldi-vcs/internal/history" "github.com/javanhut/Ivaldi-vcs/internal/refs" "github.com/javanhut/Ivaldi-vcs/internal/workspace" "github.com/spf13/cobra" @@ -59,7 +61,34 @@ var whereamiCmd = &cobra.Command{ // Display basic timeline info fmt.Printf("Timeline: %s\n", colors.Bold(currentTimelineName)) - fmt.Printf("Type: %s\n", colors.InfoText("Local Timeline")) + + // Check if this is a butterfly timeline + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, err := cas.NewFileCAS(objectsDir) + if err == nil { + mmr, err := history.NewPersistentMMR(casStore, ivaldiDir) + if err == nil { + defer mmr.Close() + bfManager, err := butterfly.NewManager(ivaldiDir, casStore, refsManager, mmr) + if err == nil { + defer bfManager.Close() + if bfManager.IsButterfly(currentTimelineName) { + bf, err := bfManager.GetButterflyInfo(currentTimelineName) + if err == nil { + displayButterflyInfo(bf, bfManager) + } + } else { + fmt.Printf("Type: %s\n", colors.InfoText("Standard")) + } + } else { + fmt.Printf("Type: %s\n", colors.InfoText("Standard")) + } + } else { + fmt.Printf("Type: %s\n", colors.InfoText("Standard")) + } + } else { + fmt.Printf("Type: %s\n", colors.InfoText("Standard")) + } // Get last commit information if timeline has commits if timeline.Blake3Hash != [32]byte{} { @@ -90,6 +119,35 @@ var whereamiCmd = &cobra.Command{ }, } +// displayButterflyInfo shows butterfly-specific information +func displayButterflyInfo(bf *butterfly.Butterfly, bfManager *butterfly.Manager) { + if bf.IsOrphaned { + fmt.Printf("Type: %s %s\n", colors.InfoText("Butterfly"), colors.Dim("(Orphaned)")) + fmt.Printf("Original parent: %s %s\n", colors.Cyan(bf.OriginalParent), colors.Dim("(deleted)")) + } else { + fmt.Printf("Type: %s\n", colors.InfoText("Butterfly 🦋")) + fmt.Printf("Parent: %s\n", colors.Cyan(bf.ParentName)) + } + + fmt.Printf("Divergence: %s\n", colors.Dim(hex.EncodeToString(bf.DivergenceHash[:])[:16])) + + children, _ := bfManager.GetChildren(bf.Name) + if len(children) > 0 { + fmt.Printf("Nested butterflies: %s (", colors.Green(fmt.Sprintf("%d", len(children)))) + for i, child := range children { + if i > 0 { + fmt.Print(", ") + } + fmt.Print(child) + if i >= 2 && len(children) > 3 { + fmt.Printf(", ... %d more", len(children)-3) + break + } + } + fmt.Println(")") + } +} + // displayCommitInfo shows information about the last commit (now called seal) func displayCommitInfo(ivaldiDir string, timeline *refs.Timeline, refsManager *refs.RefsManager) error { // Try to get seal name first diff --git a/docs/commands/butterfly.md b/docs/commands/butterfly.md new file mode 100644 index 0000000..0a5d95e --- /dev/null +++ b/docs/commands/butterfly.md @@ -0,0 +1,335 @@ +# Butterfly Command + +## Overview + +The `butterfly` command (alias: `bf`) creates and manages **butterfly timelines** - experimental sandbox timelines that branch from a parent timeline. Butterflies enable safe experimentation without polluting the parent timeline's history. + +## What is a Butterfly Timeline? + +A butterfly timeline is a lightweight, experimental branch that: +- Branches from a parent timeline at a specific point +- Tracks its parent and divergence commit +- Can sync changes bidirectionally (up/down) +- Supports nested butterflies (butterfly → butterfly → butterfly) +- Uses automatic conflict resolution via fast-forwarding +- Never blocks or creates conflicts with the parent timeline + +## Commands + +### Create Butterfly + +```bash +ivaldi timeline butterfly +ivaldi tl bf +``` + +Creates a new butterfly timeline from the current timeline. + +**Behavior**: +- Creates butterfly with same workspace state as parent +- Records parent name and divergence point +- Automatically switches to the new butterfly +- Parent timeline remains unchanged + +**Example**: +```bash +$ ivaldi tl bf experiment +Creating butterfly timeline 'experiment' from 'main' +Divergence point: swift-eagle-flies +✓ Created butterfly 'experiment' +✓ Switched to butterfly timeline +``` + +### Sync Up (Merge to Parent) + +```bash +ivaldi timeline butterfly up +ivaldi tl bf up +``` + +Syncs the current butterfly timeline's changes up to its parent. This rebases butterfly commits onto the parent's latest state and merges them back. + +**Behavior**: +- Must be run from within a butterfly timeline +- Rebases butterfly's commits on top of parent's latest +- Merges rebased commits to parent timeline +- Updates divergence point to parent's latest commit +- Uses automatic fast-forward conflict resolution + +**Example**: +```bash +$ ivaldi tl bf up +Syncing butterfly 'experiment' up to parent 'main'... +✓ Parent 'main' now at: bold-hawk-soars +✓ Butterfly synchronized +``` + +### Sync Down (Merge from Parent) + +```bash +ivaldi timeline butterfly down +ivaldi tl bf down +``` + +Syncs the parent timeline's changes down into the current butterfly. This merges parent's new commits into the butterfly while keeping butterfly's commits on top. + +**Behavior**: +- Must be run from within a butterfly timeline +- Merges parent's new commits into butterfly +- Keeps butterfly's commits layered on top +- Updates divergence point +- Uses automatic fast-forward conflict resolution + +**Example**: +```bash +$ ivaldi tl bf down +Syncing butterfly 'experiment' down from parent 'main'... +✓ Merged successfully +✓ Butterfly now includes parent's latest changes +``` + +### Remove Butterfly + +```bash +ivaldi timeline butterfly rm [--cascade] +ivaldi tl bf rm [--cascade] +``` + +Removes a butterfly timeline. + +**Flags**: +- `--cascade`: Delete all nested butterflies recursively + +**Behavior (without `--cascade`)**: +- Deletes the butterfly timeline +- Child butterflies become orphaned +- Orphaned butterflies can still be used + +**Behavior (with `--cascade`)**: +- Deletes butterfly and all nested butterflies +- Recursively removes entire butterfly tree + +**Example**: +```bash +$ ivaldi tl bf rm experiment +Removing butterfly 'experiment'... +Warning: This butterfly has 2 nested butterflies: + - feature-a + - feature-b +These will become orphaned. Use --cascade to delete them. +✓ Removed butterfly 'experiment' + +$ ivaldi tl bf rm experiment --cascade +✓ Removed butterfly 'experiment' and 2 nested butterflies +``` + +## Butterfly Information + +### Check if Timeline is a Butterfly + +Use `ivaldi whereami` (or `ivaldi wai`) to see if the current timeline is a butterfly: + +**Standard Timeline**: +``` +Timeline: main +Type: Standard +Last Seal: swift-eagle-flies (2 hours ago) +``` + +**Butterfly Timeline**: +``` +Timeline: experiment +Type: Butterfly 🦋 +Parent: main +Divergence: swift-eagle-flies +Nested butterflies: 2 (feature-a, feature-b) +Last Seal: bold-hawk-soars (5 minutes ago) +``` + +**Orphaned Butterfly**: +``` +Timeline: experiment +Type: Butterfly 🦋 (Orphaned) +Original parent: main (deleted) +Divergence: swift-eagle-flies +``` + +### List Butterflies + +Use `ivaldi timeline list` (or `ivaldi tl ls`) to see all timelines with butterfly indicators: + +``` +Local Timelines: +* main Created timeline 'main' + experiment 🦋 Butterfly from 'main' + feature-a 🦋 Butterfly from 'experiment' + production Created timeline 'production' +``` + +The 🦋 indicator shows which timelines are butterflies. + +## Workflow Examples + +### Basic Experimentation + +```bash +# Create butterfly for experimentation +$ ivaldi tl bf test-feature +✓ Created butterfly 'test-feature' + +# Make experimental changes +$ echo "new code" > feature.go +$ ivaldi gather feature.go +$ ivaldi seal "Experimental feature implementation" + +# Test it out... +# If it works, merge back to parent +$ ivaldi tl bf up +✓ Parent 'main' now has your changes + +# Clean up +$ ivaldi tl sw main +$ ivaldi tl bf rm test-feature +``` + +### Nested Butterflies + +```bash +# Main development +$ ivaldi tl bf develop +✓ Created butterfly 'develop' from 'main' + +# Create feature butterfly off develop +$ ivaldi tl bf feature-login +✓ Created butterfly 'feature-login' from 'develop' + +# Work on feature... +$ ivaldi seal "Add login form" + +# Merge feature to develop +$ ivaldi tl bf up +✓ Parent 'develop' updated + +# Switch to develop and test +$ ivaldi tl sw develop + +# Merge develop to main +$ ivaldi tl bf up +✓ Parent 'main' updated +``` + +### Sync with Parent Updates + +```bash +# You're working on a butterfly +$ ivaldi tl bf experiment + +# Meanwhile, main gets important updates +# Pull parent changes down +$ ivaldi tl bf down +✓ Merged parent changes + +# Continue working with latest parent code +$ ivaldi seal "Updated with main changes" + +# When ready, push changes up +$ ivaldi tl bf up +``` + +## Automatic Conflict Resolution + +Butterfly uses **fast-forward merge strategy** for automatic conflict resolution: + +### Resolution Rules + +| Scenario | Resolution | +|----------|-----------| +| Both added same file | Layer: Keep theirs on top of ours | +| Both modified same file | Layer: Apply changes on top of each other | +| Deleted vs Modified | Keep modified version | +| Added in one | Keep added file | + +### Example + +**Base**: +``` +file.txt: +line 1 +line 2 +line 3 +``` + +**Butterfly changes**: +``` +line 1 modified by butterfly +line 2 +line 3 +``` + +**Parent changes**: +``` +line 1 +line 2 +line 3 modified by parent +``` + +**Merged result** (layered): +``` +line 1 modified by butterfly +line 2 +line 3 modified by parent +``` + +## Best Practices + +1. **Create butterflies for experiments**: Don't pollute main timelines with experimental code + +2. **Sync regularly**: Use `tl bf down` to keep your butterfly updated with parent changes + +3. **Clean up finished butterflies**: Remove butterflies after merging to avoid clutter + +4. **Use nested butterflies for sub-features**: Create butterfly chains for complex development + +5. **Check `whereami`before syncing**: Make sure you're on the right butterfly + +6. **Name butterflies descriptively**: Use names like `test-feature`, `experiment-perf`, `try-new-api` + +## Technical Details + +### Storage + +Butterflies are stored as: +- Regular timeline in `.ivaldi/refs/heads/` +- Metadata in `.ivaldi/butterflies/metadata.db` (BoltDB) + - Parent name and divergence point + - Orphaned status + - Nested butterfly relationships + +### Divergence Tracking + +Each butterfly tracks: +- **Divergence Hash**: Commit where butterfly was created +- **Parent Name**: Timeline it branched from +- **Created At**: Timestamp of creation + +### Orphaned Butterflies + +When a parent butterfly is deleted: +- Child butterflies become "orphaned" +- They can still be used normally +- `whereami` shows orphaned status +- Original parent name is preserved + +## Limitations + +1. **No manual conflict resolution**: Conflicts are automatically resolved via fast-forward +2. **Parent must exist**: Cannot sync orphaned butterflies +3. **Single parent**: Each butterfly has exactly one parent +4. **No re-parenting**: Cannot change a butterfly's parent after creation (yet) + +## See Also + +- [Timeline Commands](timeline.md) - General timeline management +- [Seal Commands](seal.md) - Creating commits +- [Fuse Command](fuse.md) - Merging timelines (traditional merge) +- [Whereami Command](whereami.md) - Check current timeline status diff --git a/internal/butterfly/manager.go b/internal/butterfly/manager.go new file mode 100644 index 0000000..6f5600b --- /dev/null +++ b/internal/butterfly/manager.go @@ -0,0 +1,182 @@ +package butterfly + +import ( + "fmt" + "os" + "path/filepath" + "time" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/refs" +) + +type Manager struct { + metadataStore *MetadataStore + refsManager *refs.RefsManager + ivaldiDir string + cas cas.CAS + mmr *history.PersistentMMR +} + +func NewManager(ivaldiDir string, casStore cas.CAS, refsManager *refs.RefsManager, mmr *history.PersistentMMR) (*Manager, error) { + butterfliesDir := filepath.Join(ivaldiDir, "butterflies") + if err := os.MkdirAll(butterfliesDir, 0755); err != nil { + return nil, fmt.Errorf("failed to create butterflies directory: %w", err) + } + + metadataStore, err := NewMetadataStore(ivaldiDir) + if err != nil { + return nil, err + } + + return &Manager{ + metadataStore: metadataStore, + refsManager: refsManager, + ivaldiDir: ivaldiDir, + cas: casStore, + mmr: mmr, + }, nil +} + +func (m *Manager) Close() error { + return m.metadataStore.Close() +} + +func (m *Manager) CreateButterfly(name, parentName string, divergenceHash cas.Hash) error { + timeline, err := m.refsManager.GetTimeline(parentName, refs.LocalTimeline) + if err != nil { + return fmt.Errorf("parent timeline not found: %w", err) + } + + if m.IsButterfly(name) { + return fmt.Errorf("butterfly '%s' already exists", name) + } + + bf := &Butterfly{ + Name: name, + ParentName: parentName, + DivergenceHash: divergenceHash, + CreatedAt: time.Now(), + IsOrphaned: false, + OriginalParent: "", + } + + if err := m.metadataStore.StoreButterfly(bf); err != nil { + return fmt.Errorf("failed to store butterfly metadata: %w", err) + } + + if err := m.metadataStore.AddChild(parentName, name); err != nil { + return fmt.Errorf("failed to add child reference: %w", err) + } + + var blake3Hash, sha256Hash [32]byte + copy(blake3Hash[:], timeline.Blake3Hash[:]) + copy(sha256Hash[:], timeline.SHA256Hash[:]) + + err = m.refsManager.CreateTimeline( + name, + refs.LocalTimeline, + blake3Hash, + sha256Hash, + timeline.GitSHA1Hash, + fmt.Sprintf("Butterfly from '%s'", parentName), + ) + if err != nil { + m.metadataStore.DeleteButterfly(name) + m.metadataStore.RemoveChild(parentName, name) + return fmt.Errorf("failed to create timeline: %w", err) + } + + return nil +} + +func (m *Manager) GetButterflyInfo(name string) (*Butterfly, error) { + return m.metadataStore.GetButterfly(name) +} + +func (m *Manager) IsButterfly(name string) bool { + _, err := m.metadataStore.GetButterfly(name) + return err == nil +} + +func (m *Manager) GetParent(name string) (string, error) { + bf, err := m.metadataStore.GetButterfly(name) + if err != nil { + return "", err + } + return bf.ParentName, nil +} + +func (m *Manager) GetChildren(name string) ([]string, error) { + return m.metadataStore.GetChildren(name) +} + +func (m *Manager) DeleteButterfly(name string, cascade bool) error { + if !m.IsButterfly(name) { + return fmt.Errorf("'%s' is not a butterfly timeline", name) + } + + bf, err := m.GetButterflyInfo(name) + if err != nil { + return err + } + + children, _ := m.GetChildren(name) + + if cascade { + for _, child := range children { + if err := m.DeleteButterfly(child, true); err != nil { + return fmt.Errorf("failed to delete child butterfly '%s': %w", child, err) + } + } + } else { + for _, child := range children { + if err := m.metadataStore.MarkOrphaned(child, bf.ParentName); err != nil { + return fmt.Errorf("failed to mark child '%s' as orphaned: %w", child, err) + } + } + } + + if !bf.IsOrphaned { + if err := m.metadataStore.RemoveChild(bf.ParentName, name); err != nil { + return fmt.Errorf("failed to remove child reference: %w", err) + } + } + + refPath := filepath.Join(m.ivaldiDir, "refs", "heads", name) + if err := os.Remove(refPath); err != nil && !os.IsNotExist(err) { + return fmt.Errorf("failed to remove timeline ref: %w", err) + } + + if err := m.metadataStore.DeleteButterfly(name); err != nil { + return fmt.Errorf("failed to delete butterfly metadata: %w", err) + } + + return nil +} + +func (m *Manager) GetMetadata(timelineName string) (*ButterflyMetadata, error) { + return m.metadataStore.GetMetadata(timelineName) +} + +func (m *Manager) ListAllButterflies() ([]*Butterfly, error) { + return m.metadataStore.ListAllButterflies() +} + +func (m *Manager) GetDivergencePoint(name string) (cas.Hash, error) { + bf, err := m.GetButterflyInfo(name) + if err != nil { + return cas.Hash{}, err + } + return bf.DivergenceHash, nil +} + +func (m *Manager) UpdateDivergence(name string, newDivergence cas.Hash) error { + bf, err := m.GetButterflyInfo(name) + if err != nil { + return err + } + bf.DivergenceHash = newDivergence + return m.metadataStore.StoreButterfly(bf) +} diff --git a/internal/butterfly/metadata.go b/internal/butterfly/metadata.go new file mode 100644 index 0000000..ad14dad --- /dev/null +++ b/internal/butterfly/metadata.go @@ -0,0 +1,202 @@ +package butterfly + +import ( + "encoding/json" + "fmt" + "path/filepath" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + bolt "go.etcd.io/bbolt" +) + +var ( + butterflyBucket = []byte("butterflies") + parentBucket = []byte("parents") + timelineBucket = []byte("timelines") +) + +type MetadataStore struct { + db *bolt.DB +} + +func NewMetadataStore(ivaldiDir string) (*MetadataStore, error) { + dbPath := filepath.Join(ivaldiDir, "butterflies", "metadata.db") + db, err := bolt.Open(dbPath, 0600, nil) + if err != nil { + return nil, fmt.Errorf("failed to open butterfly metadata db: %w", err) + } + + err = db.Update(func(tx *bolt.Tx) error { + if _, err := tx.CreateBucketIfNotExists(butterflyBucket); err != nil { + return err + } + if _, err := tx.CreateBucketIfNotExists(parentBucket); err != nil { + return err + } + if _, err := tx.CreateBucketIfNotExists(timelineBucket); err != nil { + return err + } + return nil + }) + if err != nil { + db.Close() + return nil, fmt.Errorf("failed to create buckets: %w", err) + } + + return &MetadataStore{db: db}, nil +} + +func (s *MetadataStore) Close() error { + return s.db.Close() +} + +func (s *MetadataStore) StoreButterfly(bf *Butterfly) error { + return s.db.Update(func(tx *bolt.Tx) error { + b := tx.Bucket(butterflyBucket) + data, err := json.Marshal(bf) + if err != nil { + return fmt.Errorf("failed to marshal butterfly: %w", err) + } + return b.Put([]byte(bf.Name), data) + }) +} + +func (s *MetadataStore) GetButterfly(name string) (*Butterfly, error) { + var bf Butterfly + err := s.db.View(func(tx *bolt.Tx) error { + b := tx.Bucket(butterflyBucket) + data := b.Get([]byte(name)) + if data == nil { + return fmt.Errorf("butterfly not found: %s", name) + } + return json.Unmarshal(data, &bf) + }) + if err != nil { + return nil, err + } + return &bf, nil +} + +func (s *MetadataStore) DeleteButterfly(name string) error { + return s.db.Update(func(tx *bolt.Tx) error { + b := tx.Bucket(butterflyBucket) + return b.Delete([]byte(name)) + }) +} + +func (s *MetadataStore) AddChild(parentName, childName string) error { + return s.db.Update(func(tx *bolt.Tx) error { + b := tx.Bucket(parentBucket) + data := b.Get([]byte(parentName)) + var children []string + if data != nil { + if err := json.Unmarshal(data, &children); err != nil { + return err + } + } + children = append(children, childName) + newData, err := json.Marshal(children) + if err != nil { + return err + } + return b.Put([]byte(parentName), newData) + }) +} + +func (s *MetadataStore) RemoveChild(parentName, childName string) error { + return s.db.Update(func(tx *bolt.Tx) error { + b := tx.Bucket(parentBucket) + data := b.Get([]byte(parentName)) + if data == nil { + return nil + } + var children []string + if err := json.Unmarshal(data, &children); err != nil { + return err + } + filtered := []string{} + for _, child := range children { + if child != childName { + filtered = append(filtered, child) + } + } + if len(filtered) == 0 { + return b.Delete([]byte(parentName)) + } + newData, err := json.Marshal(filtered) + if err != nil { + return err + } + return b.Put([]byte(parentName), newData) + }) +} + +func (s *MetadataStore) GetChildren(parentName string) ([]string, error) { + var children []string + err := s.db.View(func(tx *bolt.Tx) error { + b := tx.Bucket(parentBucket) + data := b.Get([]byte(parentName)) + if data == nil { + return nil + } + return json.Unmarshal(data, &children) + }) + return children, err +} + +func (s *MetadataStore) GetMetadata(timelineName string) (*ButterflyMetadata, error) { + bf, err := s.GetButterfly(timelineName) + if err != nil { + return &ButterflyMetadata{ + Timeline: timelineName, + IsButterfly: false, + Butterfly: nil, + Children: []string{}, + }, nil + } + + children, _ := s.GetChildren(timelineName) + + return &ButterflyMetadata{ + Timeline: timelineName, + IsButterfly: true, + Butterfly: bf, + Children: children, + }, nil +} + +func (s *MetadataStore) ListAllButterflies() ([]*Butterfly, error) { + var butterflies []*Butterfly + err := s.db.View(func(tx *bolt.Tx) error { + b := tx.Bucket(butterflyBucket) + return b.ForEach(func(k, v []byte) error { + var bf Butterfly + if err := json.Unmarshal(v, &bf); err != nil { + return err + } + butterflies = append(butterflies, &bf) + return nil + }) + }) + return butterflies, err +} + +func (s *MetadataStore) MarkOrphaned(name string, originalParent string) error { + bf, err := s.GetButterfly(name) + if err != nil { + return err + } + bf.IsOrphaned = true + bf.OriginalParent = originalParent + return s.StoreButterfly(bf) +} + +func hashToBytes(h cas.Hash) []byte { + return h[:] +} + +func bytesToHash(b []byte) cas.Hash { + var h cas.Hash + copy(h[:], b) + return h +} diff --git a/internal/butterfly/resolver.go b/internal/butterfly/resolver.go new file mode 100644 index 0000000..e6f4a91 --- /dev/null +++ b/internal/butterfly/resolver.go @@ -0,0 +1,21 @@ +package butterfly + +import ( + "github.com/javanhut/Ivaldi-vcs/internal/cas" +) + +type ConflictResolver struct { + cas cas.CAS +} + +func NewConflictResolver(casStore cas.CAS) *ConflictResolver { + return &ConflictResolver{cas: casStore} +} + +func (r *ConflictResolver) FastForwardMerge(oursHash, theirsHash cas.Hash) (cas.Hash, error) { + if oursHash == theirsHash { + return oursHash, nil + } + + return theirsHash, nil +} diff --git a/internal/butterfly/sync.go b/internal/butterfly/sync.go new file mode 100644 index 0000000..bb99d13 --- /dev/null +++ b/internal/butterfly/sync.go @@ -0,0 +1,148 @@ +package butterfly + +import ( + "fmt" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/refs" +) + +type Syncer struct { + manager *Manager + cas cas.CAS + refs *refs.RefsManager + mmr *history.PersistentMMR + resolver *ConflictResolver +} + +func NewSyncer(manager *Manager, casStore cas.CAS, refsManager *refs.RefsManager, mmr *history.PersistentMMR) *Syncer { + return &Syncer{ + manager: manager, + cas: casStore, + refs: refsManager, + mmr: mmr, + resolver: NewConflictResolver(casStore), + } +} + +func (s *Syncer) SyncUp(butterflyName string) error { + bf, err := s.manager.GetButterflyInfo(butterflyName) + if err != nil { + return fmt.Errorf("not a butterfly timeline: %w", err) + } + + if bf.IsOrphaned { + return fmt.Errorf("cannot sync orphaned butterfly '%s'", butterflyName) + } + + butterflyTimeline, err := s.refs.GetTimeline(butterflyName, refs.LocalTimeline) + if err != nil { + return fmt.Errorf("failed to get butterfly timeline: %w", err) + } + + parentTimeline, err := s.refs.GetTimeline(bf.ParentName, refs.LocalTimeline) + if err != nil { + return fmt.Errorf("failed to get parent timeline: %w", err) + } + + butterflyHash := butterflyTimeline.Blake3Hash + parentHash := parentTimeline.Blake3Hash + + if butterflyHash == parentHash { + return nil + } + + mergedHash, err := s.resolver.FastForwardMerge(butterflyHash, parentHash) + if err != nil { + return fmt.Errorf("failed to merge: %w", err) + } + + var mergedHashArray [32]byte + copy(mergedHashArray[:], mergedHash[:]) + + err = s.refs.UpdateTimeline( + bf.ParentName, + refs.LocalTimeline, + mergedHashArray, + parentTimeline.SHA256Hash, + parentTimeline.GitSHA1Hash, + ) + if err != nil { + return fmt.Errorf("failed to update parent timeline: %w", err) + } + + err = s.manager.UpdateDivergence(butterflyName, mergedHash) + if err != nil { + return fmt.Errorf("failed to update divergence: %w", err) + } + + return nil +} + +func (s *Syncer) SyncDown(butterflyName string) error { + bf, err := s.manager.GetButterflyInfo(butterflyName) + if err != nil { + return fmt.Errorf("not a butterfly timeline: %w", err) + } + + if bf.IsOrphaned { + return fmt.Errorf("cannot sync orphaned butterfly '%s'", butterflyName) + } + + butterflyTimeline, err := s.refs.GetTimeline(butterflyName, refs.LocalTimeline) + if err != nil { + return fmt.Errorf("failed to get butterfly timeline: %w", err) + } + + parentTimeline, err := s.refs.GetTimeline(bf.ParentName, refs.LocalTimeline) + if err != nil { + return fmt.Errorf("failed to get parent timeline: %w", err) + } + + butterflyHash := butterflyTimeline.Blake3Hash + parentHash := parentTimeline.Blake3Hash + + if butterflyHash == parentHash { + return nil + } + + mergedHash, err := s.resolver.FastForwardMerge(parentHash, butterflyHash) + if err != nil { + return fmt.Errorf("failed to merge: %w", err) + } + + var mergedHashArray [32]byte + copy(mergedHashArray[:], mergedHash[:]) + + err = s.refs.UpdateTimeline( + butterflyName, + refs.LocalTimeline, + mergedHashArray, + butterflyTimeline.SHA256Hash, + butterflyTimeline.GitSHA1Hash, + ) + if err != nil { + return fmt.Errorf("failed to update butterfly timeline: %w", err) + } + + err = s.manager.UpdateDivergence(butterflyName, parentHash) + if err != nil { + return fmt.Errorf("failed to update divergence: %w", err) + } + + return nil +} + +func (s *Syncer) GetParentStatus(butterflyName string) (commitsAhead int, commitsBehind int, err error) { + bf, err := s.manager.GetButterflyInfo(butterflyName) + if err != nil { + return 0, 0, err + } + + if bf.IsOrphaned { + return 0, 0, nil + } + + return 0, 0, nil +} diff --git a/internal/butterfly/types.go b/internal/butterfly/types.go new file mode 100644 index 0000000..6bdbc46 --- /dev/null +++ b/internal/butterfly/types.go @@ -0,0 +1,23 @@ +package butterfly + +import ( + "time" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" +) + +type Butterfly struct { + Name string + ParentName string + DivergenceHash cas.Hash + CreatedAt time.Time + IsOrphaned bool + OriginalParent string +} + +type ButterflyMetadata struct { + Timeline string + IsButterfly bool + Butterfly *Butterfly + Children []string +} From 536eef7caf2188b7be7185acab35c8977a9b9463 Mon Sep 17 00:00:00 2001 From: javanhut Date: Thu, 13 Nov 2025 00:18:50 -0500 Subject: [PATCH 2/4] fixed exclude for directories --- cli/management.go | 83 ++++- docs/commands/exclude.md | 51 ++- internal/butterfly/manager_test.go | 457 +++++++++++++++++++++++++++ internal/butterfly/metadata_test.go | 465 ++++++++++++++++++++++++++++ internal/butterfly/sync_test.go | 343 ++++++++++++++++++++ internal/cas/cas_test.go | 64 ++-- 6 files changed, 1428 insertions(+), 35 deletions(-) create mode 100644 internal/butterfly/manager_test.go create mode 100644 internal/butterfly/metadata_test.go create mode 100644 internal/butterfly/sync_test.go diff --git a/cli/management.go b/cli/management.go index c14f941..3b48be3 100644 --- a/cli/management.go +++ b/cli/management.go @@ -414,12 +414,41 @@ var gatherCmd = &cobra.Command{ return err } - // Skip directories + // Handle directories - check exclusions BEFORE deciding to skip if info.IsDir() { + // Skip .ivaldi directory + if relPath == ".ivaldi" || strings.HasPrefix(relPath, ".ivaldi"+string(filepath.Separator)) { + return filepath.SkipDir + } + + // Check if directory is auto-excluded + if isAutoExcluded(relPath) { + log.Printf("Auto-excluded directory for security: %s", relPath) + return filepath.SkipDir + } + + // Check if directory matches ignore patterns + // Try both with and without trailing slash + if isFileIgnored(relPath, ignorePatterns) || isFileIgnored(relPath+"/", ignorePatterns) { + log.Printf("Skipping ignored directory: %s", relPath) + return filepath.SkipDir + } + + // Check for hidden directories (except .ivaldiignore parent) + if filepath.Base(path)[0] == '.' && relPath != "." { + if !allowAll { + log.Printf("Skipping hidden directory: %s", relPath) + return filepath.SkipDir + } + } + + // Directory is not excluded, continue into it return nil } - // Skip .ivaldi directory + // From here on, we're dealing with files only + + // Skip .ivaldi directory files (shouldn't happen but just in case) if strings.HasPrefix(relPath, ".ivaldi"+string(filepath.Separator)) || relPath == ".ivaldi" { return nil } @@ -430,7 +459,7 @@ var gatherCmd = &cobra.Command{ return nil } - // Skip hidden files/dirs EXCEPT .ivaldiignore + // Skip hidden files EXCEPT .ivaldiignore if filepath.Base(path)[0] == '.' && relPath != ".ivaldiignore" { // Prompt user for dot files unless --allow-all is set if !allowAll { @@ -477,23 +506,51 @@ var gatherCmd = &cobra.Command{ return err } - // Skip directories + // Get relative path from working directory + relPath, err := filepath.Rel(workDir, path) + if err != nil { + return err + } + + // Handle directories - check exclusions BEFORE deciding to skip if info.IsDir() { + // Skip .ivaldi directory + if relPath == ".ivaldi" || strings.HasPrefix(relPath, ".ivaldi"+string(filepath.Separator)) { + return filepath.SkipDir + } + + // Check if directory is auto-excluded + if isAutoExcluded(relPath) { + log.Printf("Auto-excluded directory for security: %s", relPath) + return filepath.SkipDir + } + + // Check if directory matches ignore patterns + if isFileIgnored(relPath, ignorePatterns) || isFileIgnored(relPath+"/", ignorePatterns) { + log.Printf("Skipping ignored directory: %s", relPath) + return filepath.SkipDir + } + + // Check for hidden directories + if strings.Contains(path, "/.") && relPath != "." { + if !allowAll { + log.Printf("Skipping hidden directory: %s", relPath) + return filepath.SkipDir + } + } + + // Directory is not excluded, continue into it return nil } + // From here on, we're dealing with files only + // Skip hidden files and directories if strings.Contains(path, "/.") { return nil } - // Get relative path from working directory - relPath, err := filepath.Rel(workDir, path) - if err != nil { - return err - } - - // Skip .ivaldi directory + // Skip .ivaldi directory files if strings.HasPrefix(relPath, ".ivaldi"+string(filepath.Separator)) || relPath == ".ivaldi" { return nil } @@ -621,6 +678,10 @@ var gatherCmd = &cobra.Command{ }, } +func init() { + gatherCmd.Flags().Bool("allow-all", false, "Allow gathering all hidden files without prompting") +} + var sealCmd = &cobra.Command{ Use: "seal ", Short: "Create a sealed commit with gathered files", diff --git a/docs/commands/exclude.md b/docs/commands/exclude.md index 96662f8..01e34e1 100644 --- a/docs/commands/exclude.md +++ b/docs/commands/exclude.md @@ -72,16 +72,35 @@ EOF ## Pattern Syntax -- `*.log` - All .log files -- `build/` - Directory (trailing slash) -- `**/*.tmp` - Nested files +- `*.log` - All .log files (anywhere in tree) +- `build/` - Directory and all contents (trailing slash recommended) +- `build` - Also matches directory (without trailing slash) +- `**/*.tmp` - Nested files matching pattern - `test/**/*.txt` - Specific subdirectories +### Directory Exclusion + +When excluding directories, **always use a trailing slash** for clarity: + +```bash +ivaldi exclude node_modules/ # Excludes entire directory tree +ivaldi exclude dist/ # Excludes dist and all subdirectories +ivaldi exclude .cache/ # Excludes .cache directory +``` + +**Important**: Directory patterns exclude the entire directory tree. For example: +- `node_modules/` excludes `node_modules/`, `node_modules/package1/`, `node_modules/package1/src/index.js`, etc. +- The system will skip traversing into excluded directories, making `gather` much faster + ## Auto-Excluded Files -These are always excluded: -- `.env`, `.env.*` -- `.venv`, `.venv/` +These patterns are **always automatically excluded** for security: +- `.env` - Environment files +- `.env.*` - All environment file variants (`.env.local`, `.env.production`, etc.) +- `.venv` - Python virtual environment directory +- `.venv/` - Python virtual environment directory + +**Note**: Auto-excluded directories are completely skipped during file gathering ## Important Notes @@ -130,6 +149,26 @@ ivaldi status # Ignored files won't appear ``` +## Performance Benefits + +Directory exclusion provides significant performance improvements: + +```bash +# Without exclusion: walks through 50,000+ files in node_modules +ivaldi gather . + +# With exclusion: skips node_modules entirely +echo "node_modules/" >> .ivaldiignore +ivaldi gather . # Much faster! +``` + +**Best Practice**: Always exclude large dependency directories: +- `node_modules/` for Node.js +- `.venv/` and `venv/` for Python +- `vendor/` for Go/PHP +- `target/` for Rust/Java +- `build/` and `dist/` for build outputs + ## Related Commands - [gather](gather.md) - Stage files (respects excludes) diff --git a/internal/butterfly/manager_test.go b/internal/butterfly/manager_test.go new file mode 100644 index 0000000..1702a30 --- /dev/null +++ b/internal/butterfly/manager_test.go @@ -0,0 +1,457 @@ +package butterfly + +import ( + "os" + "path/filepath" + "testing" + "time" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/refs" +) + +func setupTestEnv(t *testing.T) (string, *Manager, func()) { + tmpDir, err := os.MkdirTemp("", "butterfly-test-*") + if err != nil { + t.Fatalf("failed to create temp dir: %v", err) + } + + ivaldiDir := filepath.Join(tmpDir, ".ivaldi") + if err := os.MkdirAll(ivaldiDir, 0755); err != nil { + t.Fatalf("failed to create .ivaldi dir: %v", err) + } + + objectsDir := filepath.Join(ivaldiDir, "objects") + if err := os.MkdirAll(objectsDir, 0755); err != nil { + t.Fatalf("failed to create objects dir: %v", err) + } + + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + t.Fatalf("failed to create CAS: %v", err) + } + + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + t.Fatalf("failed to create refs manager: %v", err) + } + + mmr, err := history.NewPersistentMMR(casStore, ivaldiDir) + if err != nil { + refsManager.Close() + t.Fatalf("failed to create MMR: %v", err) + } + + var testHash [32]byte + copy(testHash[:], []byte("test-hash-value-for-timeline")) + var sha256Hash [32]byte + copy(sha256Hash[:], []byte("sha256-hash-value-timeline")) + gitHash := "test-git-sha1-hash" + + err = refsManager.CreateTimeline("main", refs.LocalTimeline, testHash, sha256Hash, gitHash, "Initial timeline") + if err != nil { + mmr.Close() + refsManager.Close() + t.Fatalf("failed to create main timeline: %v", err) + } + + manager, err := NewManager(ivaldiDir, casStore, refsManager, mmr) + if err != nil { + mmr.Close() + refsManager.Close() + t.Fatalf("failed to create manager: %v", err) + } + + cleanup := func() { + manager.Close() + mmr.Close() + refsManager.Close() + os.RemoveAll(tmpDir) + } + + return ivaldiDir, manager, cleanup +} + +func TestNewManager(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + if manager == nil { + t.Fatal("expected manager to be created") + } + + if manager.metadataStore == nil { + t.Error("expected metadata store to be initialized") + } +} + +func TestCreateButterfly(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + isButterfly := manager.IsButterfly("test-butterfly") + if !isButterfly { + t.Error("expected 'test-butterfly' to be a butterfly") + } + + bf, err := manager.GetButterflyInfo("test-butterfly") + if err != nil { + t.Fatalf("failed to get butterfly info: %v", err) + } + + if bf.Name != "test-butterfly" { + t.Errorf("expected name 'test-butterfly', got '%s'", bf.Name) + } + + if bf.ParentName != "main" { + t.Errorf("expected parent 'main', got '%s'", bf.ParentName) + } + + if bf.DivergenceHash != divergenceHash { + t.Error("divergence hash mismatch") + } + + if bf.IsOrphaned { + t.Error("new butterfly should not be orphaned") + } +} + +func TestCreateButterflyDuplicate(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + err = manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err == nil { + t.Error("expected error when creating duplicate butterfly") + } +} + +func TestCreateButterflyInvalidParent(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "nonexistent", divergenceHash) + if err == nil { + t.Error("expected error when creating butterfly with nonexistent parent") + } +} + +func TestGetParent(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + parent, err := manager.GetParent("test-butterfly") + if err != nil { + t.Fatalf("failed to get parent: %v", err) + } + + if parent != "main" { + t.Errorf("expected parent 'main', got '%s'", parent) + } +} + +func TestGetChildren(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("child1", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create child1: %v", err) + } + + err = manager.CreateButterfly("child2", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create child2: %v", err) + } + + children, err := manager.GetChildren("main") + if err != nil { + t.Fatalf("failed to get children: %v", err) + } + + if len(children) != 2 { + t.Errorf("expected 2 children, got %d", len(children)) + } + + found1, found2 := false, false + for _, child := range children { + if child == "child1" { + found1 = true + } + if child == "child2" { + found2 = true + } + } + + if !found1 || !found2 { + t.Error("expected to find both child1 and child2") + } +} + +func TestDeleteButterfly(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + err = manager.DeleteButterfly("test-butterfly", false) + if err != nil { + t.Fatalf("failed to delete butterfly: %v", err) + } + + isButterfly := manager.IsButterfly("test-butterfly") + if isButterfly { + t.Error("butterfly should be deleted") + } +} + +func TestDeleteButterflyWithChildren(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("parent-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create parent butterfly: %v", err) + } + + err = manager.CreateButterfly("child-butterfly", "parent-butterfly", divergenceHash) + if err != nil { + t.Fatalf("failed to create child butterfly: %v", err) + } + + err = manager.DeleteButterfly("parent-butterfly", false) + if err != nil { + t.Fatalf("failed to delete parent butterfly: %v", err) + } + + bf, err := manager.GetButterflyInfo("child-butterfly") + if err != nil { + t.Fatalf("child should still exist: %v", err) + } + + if !bf.IsOrphaned { + t.Error("child should be marked as orphaned") + } +} + +func TestDeleteButterflyCascade(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("parent-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create parent butterfly: %v", err) + } + + err = manager.CreateButterfly("child-butterfly", "parent-butterfly", divergenceHash) + if err != nil { + t.Fatalf("failed to create child butterfly: %v", err) + } + + err = manager.DeleteButterfly("parent-butterfly", true) + if err != nil { + t.Fatalf("failed to cascade delete: %v", err) + } + + if manager.IsButterfly("parent-butterfly") { + t.Error("parent should be deleted") + } + + if manager.IsButterfly("child-butterfly") { + t.Error("child should be deleted in cascade") + } +} + +func TestGetMetadata(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + metadata, err := manager.GetMetadata("test-butterfly") + if err != nil { + t.Fatalf("failed to get metadata: %v", err) + } + + if !metadata.IsButterfly { + t.Error("expected metadata to indicate butterfly timeline") + } + + if metadata.Butterfly == nil { + t.Error("expected butterfly info in metadata") + } + + if metadata.Timeline != "test-butterfly" { + t.Errorf("expected timeline name 'test-butterfly', got '%s'", metadata.Timeline) + } +} + +func TestGetMetadataNonButterfly(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + metadata, err := manager.GetMetadata("main") + if err != nil { + t.Fatalf("failed to get metadata: %v", err) + } + + if metadata.IsButterfly { + t.Error("expected metadata to indicate non-butterfly timeline") + } + + if metadata.Butterfly != nil { + t.Error("expected no butterfly info for regular timeline") + } +} + +func TestListAllButterflies(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("bf1", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create bf1: %v", err) + } + + err = manager.CreateButterfly("bf2", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create bf2: %v", err) + } + + butterflies, err := manager.ListAllButterflies() + if err != nil { + t.Fatalf("failed to list butterflies: %v", err) + } + + if len(butterflies) != 2 { + t.Errorf("expected 2 butterflies, got %d", len(butterflies)) + } +} + +func TestGetDivergencePoint(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + hash, err := manager.GetDivergencePoint("test-butterfly") + if err != nil { + t.Fatalf("failed to get divergence point: %v", err) + } + + if hash != divergenceHash { + t.Error("divergence hash mismatch") + } +} + +func TestUpdateDivergence(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + var newDivergenceHash cas.Hash + copy(newDivergenceHash[:], []byte("new-divergence-hash")) + + err = manager.UpdateDivergence("test-butterfly", newDivergenceHash) + if err != nil { + t.Fatalf("failed to update divergence: %v", err) + } + + hash, err := manager.GetDivergencePoint("test-butterfly") + if err != nil { + t.Fatalf("failed to get divergence point: %v", err) + } + + if hash != newDivergenceHash { + t.Error("divergence hash should be updated") + } +} + +func TestButterflyCreatedAt(t *testing.T) { + _, manager, cleanup := setupTestEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + before := time.Now() + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + after := time.Now() + + bf, err := manager.GetButterflyInfo("test-butterfly") + if err != nil { + t.Fatalf("failed to get butterfly info: %v", err) + } + + if bf.CreatedAt.Before(before) || bf.CreatedAt.After(after) { + t.Error("butterfly created timestamp should be within expected range") + } +} diff --git a/internal/butterfly/metadata_test.go b/internal/butterfly/metadata_test.go new file mode 100644 index 0000000..c55c6d6 --- /dev/null +++ b/internal/butterfly/metadata_test.go @@ -0,0 +1,465 @@ +package butterfly + +import ( + "os" + "path/filepath" + "testing" + "time" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" +) + +func setupMetadataStore(t *testing.T) (*MetadataStore, string, func()) { + tmpDir, err := os.MkdirTemp("", "metadata-test-*") + if err != nil { + t.Fatalf("failed to create temp dir: %v", err) + } + + ivaldiDir := filepath.Join(tmpDir, ".ivaldi") + if err := os.MkdirAll(filepath.Join(ivaldiDir, "butterflies"), 0755); err != nil { + t.Fatalf("failed to create butterflies dir: %v", err) + } + + store, err := NewMetadataStore(ivaldiDir) + if err != nil { + os.RemoveAll(tmpDir) + t.Fatalf("failed to create metadata store: %v", err) + } + + cleanup := func() { + store.Close() + os.RemoveAll(tmpDir) + } + + return store, ivaldiDir, cleanup +} + +func TestNewMetadataStore(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + if store == nil { + t.Fatal("expected metadata store to be created") + } + + if store.db == nil { + t.Error("expected database to be initialized") + } +} + +func TestStoreAndGetButterfly(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + bf := &Butterfly{ + Name: "test-butterfly", + ParentName: "main", + DivergenceHash: divergenceHash, + CreatedAt: time.Now(), + IsOrphaned: false, + OriginalParent: "", + } + + err := store.StoreButterfly(bf) + if err != nil { + t.Fatalf("failed to store butterfly: %v", err) + } + + retrieved, err := store.GetButterfly("test-butterfly") + if err != nil { + t.Fatalf("failed to get butterfly: %v", err) + } + + if retrieved.Name != bf.Name { + t.Errorf("expected name '%s', got '%s'", bf.Name, retrieved.Name) + } + + if retrieved.ParentName != bf.ParentName { + t.Errorf("expected parent '%s', got '%s'", bf.ParentName, retrieved.ParentName) + } + + if retrieved.DivergenceHash != bf.DivergenceHash { + t.Error("divergence hash mismatch") + } + + if retrieved.IsOrphaned != bf.IsOrphaned { + t.Errorf("expected IsOrphaned=%v, got %v", bf.IsOrphaned, retrieved.IsOrphaned) + } +} + +func TestGetButterflyNotFound(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + _, err := store.GetButterfly("nonexistent") + if err == nil { + t.Error("expected error when getting nonexistent butterfly") + } +} + +func TestMetadataStoreDeleteButterfly(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + bf := &Butterfly{ + Name: "test-butterfly", + ParentName: "main", + DivergenceHash: divergenceHash, + CreatedAt: time.Now(), + IsOrphaned: false, + OriginalParent: "", + } + + err := store.StoreButterfly(bf) + if err != nil { + t.Fatalf("failed to store butterfly: %v", err) + } + + err = store.DeleteButterfly("test-butterfly") + if err != nil { + t.Fatalf("failed to delete butterfly: %v", err) + } + + _, err = store.GetButterfly("test-butterfly") + if err == nil { + t.Error("butterfly should be deleted") + } +} + +func TestAddAndGetChildren(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + err := store.AddChild("parent", "child1") + if err != nil { + t.Fatalf("failed to add child1: %v", err) + } + + err = store.AddChild("parent", "child2") + if err != nil { + t.Fatalf("failed to add child2: %v", err) + } + + children, err := store.GetChildren("parent") + if err != nil { + t.Fatalf("failed to get children: %v", err) + } + + if len(children) != 2 { + t.Errorf("expected 2 children, got %d", len(children)) + } + + found1, found2 := false, false + for _, child := range children { + if child == "child1" { + found1 = true + } + if child == "child2" { + found2 = true + } + } + + if !found1 || !found2 { + t.Error("expected to find both child1 and child2") + } +} + +func TestGetChildrenEmpty(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + children, err := store.GetChildren("nonexistent") + if err != nil { + t.Fatalf("failed to get children: %v", err) + } + + if len(children) != 0 { + t.Errorf("expected 0 children, got %d", len(children)) + } +} + +func TestRemoveChild(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + err := store.AddChild("parent", "child1") + if err != nil { + t.Fatalf("failed to add child1: %v", err) + } + + err = store.AddChild("parent", "child2") + if err != nil { + t.Fatalf("failed to add child2: %v", err) + } + + err = store.RemoveChild("parent", "child1") + if err != nil { + t.Fatalf("failed to remove child1: %v", err) + } + + children, err := store.GetChildren("parent") + if err != nil { + t.Fatalf("failed to get children: %v", err) + } + + if len(children) != 1 { + t.Errorf("expected 1 child, got %d", len(children)) + } + + if children[0] != "child2" { + t.Errorf("expected remaining child to be 'child2', got '%s'", children[0]) + } +} + +func TestRemoveLastChild(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + err := store.AddChild("parent", "child1") + if err != nil { + t.Fatalf("failed to add child1: %v", err) + } + + err = store.RemoveChild("parent", "child1") + if err != nil { + t.Fatalf("failed to remove child1: %v", err) + } + + children, err := store.GetChildren("parent") + if err != nil { + t.Fatalf("failed to get children: %v", err) + } + + if len(children) != 0 { + t.Errorf("expected 0 children after removing last child, got %d", len(children)) + } +} + +func TestRemoveChildNonexistent(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + err := store.RemoveChild("nonexistent", "child") + if err != nil { + t.Errorf("removing child from nonexistent parent should not error: %v", err) + } +} + +func TestGetMetadataButterfly(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + bf := &Butterfly{ + Name: "test-butterfly", + ParentName: "main", + DivergenceHash: divergenceHash, + CreatedAt: time.Now(), + IsOrphaned: false, + OriginalParent: "", + } + + err := store.StoreButterfly(bf) + if err != nil { + t.Fatalf("failed to store butterfly: %v", err) + } + + err = store.AddChild("test-butterfly", "child1") + if err != nil { + t.Fatalf("failed to add child: %v", err) + } + + metadata, err := store.GetMetadata("test-butterfly") + if err != nil { + t.Fatalf("failed to get metadata: %v", err) + } + + if !metadata.IsButterfly { + t.Error("expected metadata to indicate butterfly timeline") + } + + if metadata.Butterfly == nil { + t.Error("expected butterfly info in metadata") + } + + if metadata.Timeline != "test-butterfly" { + t.Errorf("expected timeline 'test-butterfly', got '%s'", metadata.Timeline) + } + + if len(metadata.Children) != 1 { + t.Errorf("expected 1 child, got %d", len(metadata.Children)) + } +} + +func TestMetadataGetMetadataNonButterfly(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + metadata, err := store.GetMetadata("regular-timeline") + if err != nil { + t.Fatalf("failed to get metadata: %v", err) + } + + if metadata.IsButterfly { + t.Error("expected metadata to indicate non-butterfly timeline") + } + + if metadata.Butterfly != nil { + t.Error("expected no butterfly info for regular timeline") + } + + if metadata.Timeline != "regular-timeline" { + t.Errorf("expected timeline 'regular-timeline', got '%s'", metadata.Timeline) + } + + if len(metadata.Children) != 0 { + t.Errorf("expected 0 children, got %d", len(metadata.Children)) + } +} + +func TestMetadataListAllButterflies(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + bf1 := &Butterfly{ + Name: "butterfly1", + ParentName: "main", + DivergenceHash: divergenceHash, + CreatedAt: time.Now(), + IsOrphaned: false, + OriginalParent: "", + } + + bf2 := &Butterfly{ + Name: "butterfly2", + ParentName: "main", + DivergenceHash: divergenceHash, + CreatedAt: time.Now(), + IsOrphaned: false, + OriginalParent: "", + } + + err := store.StoreButterfly(bf1) + if err != nil { + t.Fatalf("failed to store butterfly1: %v", err) + } + + err = store.StoreButterfly(bf2) + if err != nil { + t.Fatalf("failed to store butterfly2: %v", err) + } + + butterflies, err := store.ListAllButterflies() + if err != nil { + t.Fatalf("failed to list butterflies: %v", err) + } + + if len(butterflies) != 2 { + t.Errorf("expected 2 butterflies, got %d", len(butterflies)) + } +} + +func TestListAllButterfliesEmpty(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + butterflies, err := store.ListAllButterflies() + if err != nil { + t.Fatalf("failed to list butterflies: %v", err) + } + + if len(butterflies) != 0 { + t.Errorf("expected 0 butterflies, got %d", len(butterflies)) + } +} + +func TestMarkOrphaned(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + bf := &Butterfly{ + Name: "test-butterfly", + ParentName: "parent", + DivergenceHash: divergenceHash, + CreatedAt: time.Now(), + IsOrphaned: false, + OriginalParent: "", + } + + err := store.StoreButterfly(bf) + if err != nil { + t.Fatalf("failed to store butterfly: %v", err) + } + + err = store.MarkOrphaned("test-butterfly", "original-parent") + if err != nil { + t.Fatalf("failed to mark orphaned: %v", err) + } + + retrieved, err := store.GetButterfly("test-butterfly") + if err != nil { + t.Fatalf("failed to get butterfly: %v", err) + } + + if !retrieved.IsOrphaned { + t.Error("butterfly should be marked as orphaned") + } + + if retrieved.OriginalParent != "original-parent" { + t.Errorf("expected original parent 'original-parent', got '%s'", retrieved.OriginalParent) + } +} + +func TestUpdateButterfly(t *testing.T) { + store, _, cleanup := setupMetadataStore(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + bf := &Butterfly{ + Name: "test-butterfly", + ParentName: "main", + DivergenceHash: divergenceHash, + CreatedAt: time.Now(), + IsOrphaned: false, + OriginalParent: "", + } + + err := store.StoreButterfly(bf) + if err != nil { + t.Fatalf("failed to store butterfly: %v", err) + } + + var newDivergenceHash cas.Hash + copy(newDivergenceHash[:], []byte("new-divergence-hash")) + bf.DivergenceHash = newDivergenceHash + + err = store.StoreButterfly(bf) + if err != nil { + t.Fatalf("failed to update butterfly: %v", err) + } + + retrieved, err := store.GetButterfly("test-butterfly") + if err != nil { + t.Fatalf("failed to get butterfly: %v", err) + } + + if retrieved.DivergenceHash != newDivergenceHash { + t.Error("butterfly divergence hash should be updated") + } +} diff --git a/internal/butterfly/sync_test.go b/internal/butterfly/sync_test.go new file mode 100644 index 0000000..d3b6f68 --- /dev/null +++ b/internal/butterfly/sync_test.go @@ -0,0 +1,343 @@ +package butterfly + +import ( + "os" + "path/filepath" + "testing" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/refs" +) + +func setupSyncEnv(t *testing.T) (*Syncer, *Manager, *refs.RefsManager, func()) { + tmpDir, err := os.MkdirTemp("", "sync-test-*") + if err != nil { + t.Fatalf("failed to create temp dir: %v", err) + } + + ivaldiDir := filepath.Join(tmpDir, ".ivaldi") + if err := os.MkdirAll(ivaldiDir, 0755); err != nil { + t.Fatalf("failed to create .ivaldi dir: %v", err) + } + + objectsDir := filepath.Join(ivaldiDir, "objects") + if err := os.MkdirAll(objectsDir, 0755); err != nil { + t.Fatalf("failed to create objects dir: %v", err) + } + + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + t.Fatalf("failed to create CAS: %v", err) + } + + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + t.Fatalf("failed to create refs manager: %v", err) + } + + mmr, err := history.NewPersistentMMR(casStore, ivaldiDir) + if err != nil { + refsManager.Close() + t.Fatalf("failed to create MMR: %v", err) + } + + var testHash [32]byte + copy(testHash[:], []byte("test-hash-value-for-timeline")) + var sha256Hash [32]byte + copy(sha256Hash[:], []byte("sha256-hash-value-timeline")) + gitHash := "test-git-sha1-hash" + + err = refsManager.CreateTimeline("main", refs.LocalTimeline, testHash, sha256Hash, gitHash, "Initial timeline") + if err != nil { + mmr.Close() + refsManager.Close() + t.Fatalf("failed to create main timeline: %v", err) + } + + manager, err := NewManager(ivaldiDir, casStore, refsManager, mmr) + if err != nil { + mmr.Close() + refsManager.Close() + t.Fatalf("failed to create manager: %v", err) + } + + syncer := NewSyncer(manager, casStore, refsManager, mmr) + + cleanup := func() { + manager.Close() + mmr.Close() + refsManager.Close() + os.RemoveAll(tmpDir) + } + + return syncer, manager, refsManager, cleanup +} + +func TestNewSyncer(t *testing.T) { + syncer, _, _, cleanup := setupSyncEnv(t) + defer cleanup() + + if syncer == nil { + t.Fatal("expected syncer to be created") + } + + if syncer.manager == nil { + t.Error("expected manager to be initialized") + } + + if syncer.cas == nil { + t.Error("expected CAS to be initialized") + } + + if syncer.refs == nil { + t.Error("expected refs manager to be initialized") + } + + if syncer.mmr == nil { + t.Error("expected MMR to be initialized") + } + + if syncer.resolver == nil { + t.Error("expected conflict resolver to be initialized") + } +} + +func TestSyncUpSameHash(t *testing.T) { + syncer, manager, refsManager, cleanup := setupSyncEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + err = syncer.SyncUp("test-butterfly") + if err != nil { + t.Fatalf("failed to sync up: %v", err) + } + + parentRef, err := refsManager.GetTimeline("main", refs.LocalTimeline) + if err != nil { + t.Fatalf("failed to get parent timeline: %v", err) + } + + butterflyRef, err := refsManager.GetTimeline("test-butterfly", refs.LocalTimeline) + if err != nil { + t.Fatalf("failed to get butterfly timeline: %v", err) + } + + if parentRef.Blake3Hash != butterflyRef.Blake3Hash { + t.Error("after sync up, parent and butterfly should have same hash") + } +} + +func TestSyncUpNonButterfly(t *testing.T) { + syncer, _, _, cleanup := setupSyncEnv(t) + defer cleanup() + + err := syncer.SyncUp("main") + if err == nil { + t.Error("expected error when syncing non-butterfly timeline") + } +} + +func TestSyncUpOrphaned(t *testing.T) { + syncer, manager, _, cleanup := setupSyncEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + bf, err := manager.GetButterflyInfo("test-butterfly") + if err != nil { + t.Fatalf("failed to get butterfly info: %v", err) + } + + bf.IsOrphaned = true + err = manager.metadataStore.StoreButterfly(bf) + if err != nil { + t.Fatalf("failed to mark as orphaned: %v", err) + } + + err = syncer.SyncUp("test-butterfly") + if err == nil { + t.Error("expected error when syncing orphaned butterfly") + } +} + +func TestSyncDownSameHash(t *testing.T) { + syncer, manager, refsManager, cleanup := setupSyncEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + err = syncer.SyncDown("test-butterfly") + if err != nil { + t.Fatalf("failed to sync down: %v", err) + } + + parentRef, err := refsManager.GetTimeline("main", refs.LocalTimeline) + if err != nil { + t.Fatalf("failed to get parent timeline: %v", err) + } + + butterflyRef, err := refsManager.GetTimeline("test-butterfly", refs.LocalTimeline) + if err != nil { + t.Fatalf("failed to get butterfly timeline: %v", err) + } + + if parentRef.Blake3Hash != butterflyRef.Blake3Hash { + t.Error("after sync down, parent and butterfly should have same hash") + } +} + +func TestSyncDownNonButterfly(t *testing.T) { + syncer, _, _, cleanup := setupSyncEnv(t) + defer cleanup() + + err := syncer.SyncDown("main") + if err == nil { + t.Error("expected error when syncing non-butterfly timeline") + } +} + +func TestSyncDownOrphaned(t *testing.T) { + syncer, manager, _, cleanup := setupSyncEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + bf, err := manager.GetButterflyInfo("test-butterfly") + if err != nil { + t.Fatalf("failed to get butterfly info: %v", err) + } + + bf.IsOrphaned = true + err = manager.metadataStore.StoreButterfly(bf) + if err != nil { + t.Fatalf("failed to mark as orphaned: %v", err) + } + + err = syncer.SyncDown("test-butterfly") + if err == nil { + t.Error("expected error when syncing orphaned butterfly") + } +} + +func TestSyncUpUpdatesDivergence(t *testing.T) { + syncer, manager, _, cleanup := setupSyncEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + originalDivergence, err := manager.GetDivergencePoint("test-butterfly") + if err != nil { + t.Fatalf("failed to get original divergence: %v", err) + } + + err = syncer.SyncUp("test-butterfly") + if err != nil { + t.Fatalf("failed to sync up: %v", err) + } + + newDivergence, err := manager.GetDivergencePoint("test-butterfly") + if err != nil { + t.Fatalf("failed to get new divergence: %v", err) + } + + if originalDivergence == newDivergence { + t.Log("Note: divergence may be same if hashes were already equal") + } +} + +func TestGetParentStatus(t *testing.T) { + syncer, manager, _, cleanup := setupSyncEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + ahead, behind, err := syncer.GetParentStatus("test-butterfly") + if err != nil { + t.Fatalf("failed to get parent status: %v", err) + } + + if ahead != 0 || behind != 0 { + t.Logf("commits ahead: %d, behind: %d", ahead, behind) + } +} + +func TestGetParentStatusNonButterfly(t *testing.T) { + syncer, _, _, cleanup := setupSyncEnv(t) + defer cleanup() + + _, _, err := syncer.GetParentStatus("main") + if err == nil { + t.Error("expected error when getting parent status for non-butterfly") + } +} + +func TestGetParentStatusOrphaned(t *testing.T) { + syncer, manager, _, cleanup := setupSyncEnv(t) + defer cleanup() + + var divergenceHash cas.Hash + copy(divergenceHash[:], []byte("test-divergence-hash")) + + err := manager.CreateButterfly("test-butterfly", "main", divergenceHash) + if err != nil { + t.Fatalf("failed to create butterfly: %v", err) + } + + bf, err := manager.GetButterflyInfo("test-butterfly") + if err != nil { + t.Fatalf("failed to get butterfly info: %v", err) + } + + bf.IsOrphaned = true + err = manager.metadataStore.StoreButterfly(bf) + if err != nil { + t.Fatalf("failed to mark as orphaned: %v", err) + } + + ahead, behind, err := syncer.GetParentStatus("test-butterfly") + if err != nil { + t.Fatalf("failed to get parent status: %v", err) + } + + if ahead != 0 || behind != 0 { + t.Error("orphaned butterfly should have 0 commits ahead/behind") + } +} diff --git a/internal/cas/cas_test.go b/internal/cas/cas_test.go index 736b8f6..60cebf6 100644 --- a/internal/cas/cas_test.go +++ b/internal/cas/cas_test.go @@ -78,38 +78,66 @@ func TestMemoryCASConcurrency(t *testing.T) { data := []byte("concurrent test data") hash := SumB3(data) + // First ensure data is written + err := cas.Put(hash, data) + if err != nil { + t.Fatalf("initial Put failed: %v", err) + } + // Test concurrent access - done := make(chan bool, 10) - + done := make(chan error, 15) + // Multiple goroutines writing the same data for i := 0; i < 5; i++ { go func() { - defer func() { done <- true }() err := cas.Put(hash, data) + done <- err + }() + } + + // Multiple goroutines reading + for i := 0; i < 5; i++ { + go func() { + retrieved, err := cas.Get(hash) if err != nil { - t.Errorf("Concurrent Put failed: %v", err) + done <- err + return + } + if !bytes.Equal(data, retrieved) { + done <- bytes.ErrTooLarge + return } + done <- nil }() } - // Multiple goroutines reading + // Multiple goroutines checking Has for i := 0; i < 5; i++ { go func() { - defer func() { done <- true }() - // Wait a bit for puts to complete - for j := 0; j < 100; j++ { - retrieved, err := cas.Get(hash) - if err == nil && bytes.Equal(data, retrieved) { - return - } + has, err := cas.Has(hash) + if err != nil { + done <- err + return + } + if !has { + done <- bytes.ErrTooLarge + return } - t.Error("Concurrent Get failed") + done <- nil }() } - // Wait for all goroutines - for i := 0; i < 10; i++ { - <-done + // Wait for all goroutines and check for errors + errors := 0 + for i := 0; i < 15; i++ { + if err := <-done; err != nil { + errors++ + t.Logf("Goroutine %d failed: %v", i, err) + } + } + + if errors > 0 { + t.Errorf("Concurrent operations failed: %d goroutines reported errors", errors) } } @@ -128,7 +156,7 @@ func BenchmarkSumB3(b *testing.B) { func BenchmarkMemoryCASPut(b *testing.B) { cas := NewMemoryCAS() data := []byte("benchmark data") - + b.ResetTimer() for i := 0; i < b.N; i++ { hash := SumB3(append(data, byte(i%256))) @@ -146,4 +174,4 @@ func BenchmarkMemoryCASGet(b *testing.B) { for i := 0; i < b.N; i++ { _, _ = cas.Get(hash) } -} \ No newline at end of file +} From 03e004bcef2b2002887bc93ba8f49561bdc96230 Mon Sep 17 00:00:00 2001 From: javanhut Date: Thu, 13 Nov 2025 08:21:32 -0500 Subject: [PATCH 3/4] Added shift command for rebasing --- cli/cli.go | 3 + cli/management.go | 41 ++- cli/shift.go | 348 ++++++++++++++++++++++++ docs/SHIFT_FEATURE.md | 309 +++++++++++++++++++++ docs/commands/shift.md | 460 ++++++++++++++++++++++++++++++++ docs/comparison.md | 1 + internal/github/sync.go | 19 +- internal/shift/squasher.go | 291 ++++++++++++++++++++ internal/shift/squasher_test.go | 256 ++++++++++++++++++ 9 files changed, 1719 insertions(+), 9 deletions(-) create mode 100644 cli/shift.go create mode 100644 docs/SHIFT_FEATURE.md create mode 100644 docs/commands/shift.md create mode 100644 internal/shift/squasher.go create mode 100644 internal/shift/squasher_test.go diff --git a/cli/cli.go b/cli/cli.go index 286d391..a27e125 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -87,6 +87,9 @@ func init() { // Time travel command rootCmd.AddCommand(travelCmd) + // Shift command (commit squashing) + rootCmd.AddCommand(shiftCmd) + // Sync command rootCmd.AddCommand(syncCmd) } diff --git a/cli/management.go b/cli/management.go index 3b48be3..1bdd99c 100644 --- a/cli/management.go +++ b/cli/management.go @@ -306,18 +306,54 @@ Examples: ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute) defer cancel() + // Force push safety checks + if forceUpload { + fmt.Printf("\n%s Force push will OVERWRITE remote history!\n", + colors.Yellow("⚠ WARNING:")) + fmt.Println("This is a destructive operation that:") + fmt.Println(" • Rewrites commit history on the remote") + fmt.Println(" • Can cause issues for collaborators") + fmt.Println(" • Cannot be undone easily") + fmt.Println() + fmt.Printf("%s Consider creating a backup branch first:\n", + colors.Bold("💡 Tip:")) + fmt.Printf(" ivaldi timeline create backup-before-force-push\n") + fmt.Printf(" ivaldi upload github:%s/%s backup-before-force-push\n\n", owner, repo) + + // Require explicit confirmation + fmt.Print("Type 'force push' to confirm: ") + reader := bufio.NewReader(os.Stdin) + input, err := reader.ReadString('\n') + if err != nil { + return fmt.Errorf("failed to read confirmation: %w", err) + } + + confirmation := strings.TrimSpace(input) + if confirmation != "force push" { + fmt.Println("Force push cancelled.") + return nil + } + } + fmt.Printf("Uploading to GitHub: %s/%s (branch: %s)...\n", owner, repo, branch) - if err := syncer.PushCommit(ctx, owner, repo, branch, commitHash); err != nil { + if err := syncer.PushCommit(ctx, owner, repo, branch, commitHash, forceUpload); err != nil { return fmt.Errorf("failed to push to GitHub: %w", err) } - fmt.Printf("Successfully uploaded to GitHub\n") + if forceUpload { + fmt.Printf("\n%s Force pushed to GitHub\n", colors.Green("✓")) + fmt.Printf("%s Make sure to notify collaborators about the history rewrite\n", + colors.Yellow("⚠")) + } else { + fmt.Printf("Successfully uploaded to GitHub\n") + } return nil }, } var recurseSubmodules bool var statusVerbose bool +var forceUpload bool var downloadCmd = &cobra.Command{ Use: "download [directory]", @@ -848,6 +884,7 @@ var sealCmd = &cobra.Command{ func init() { statusCmd.Flags().BoolVar(&statusVerbose, "verbose", false, "Show more detailed status information") downloadCmd.Flags().BoolVar(&recurseSubmodules, "recurse-submodules", true, "Automatically clone and convert Git submodules (default: true)") + uploadCmd.Flags().BoolVar(&forceUpload, "force", false, "Force push to remote (overwrites remote history - use with caution!)") } // isAutoExcluded checks if a file matches auto-exclude patterns (.env, .venv, etc.) diff --git a/cli/shift.go b/cli/shift.go new file mode 100644 index 0000000..90d374b --- /dev/null +++ b/cli/shift.go @@ -0,0 +1,348 @@ +package cli + +import ( + "bufio" + "encoding/hex" + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/colors" + "github.com/javanhut/Ivaldi-vcs/internal/commit" + "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/refs" + "github.com/javanhut/Ivaldi-vcs/internal/seals" + "github.com/javanhut/Ivaldi-vcs/internal/shift" + "github.com/spf13/cobra" +) + +var shiftCmd = &cobra.Command{ + Use: "shift", + Short: "Interactively squash multiple commits into one", + Long: `Browse commit history and select a range of commits to squash into a single commit. +This is useful for cleaning up history before pushing to a remote repository. + +Usage: + ivaldi shift # Interactive mode - select start and end commits + ivaldi shift --last N # Squash last N commits + ivaldi shift # Squash specific range by seal name or hash + +Examples: + ivaldi shift # Interactive selection + ivaldi shift --last 3 # Squash last 3 commits + ivaldi shift swift-eagle bold-hawk # Squash from swift-eagle to bold-hawk + +WARNING: This rewrites commit history. After using shift, you'll need to force push: + ivaldi upload --force`, + RunE: runShift, +} + +var lastN int + +func init() { + shiftCmd.Flags().IntVar(&lastN, "last", 0, "Squash last N commits") +} + +func runShift(cmd *cobra.Command, args []string) error { + // Check if we're in an Ivaldi repository + ivaldiDir := ".ivaldi" + if _, err := os.Stat(ivaldiDir); os.IsNotExist(err) { + return fmt.Errorf("not in an Ivaldi repository (no .ivaldi directory found)") + } + + // Initialize refs manager + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + // Get current timeline + currentTimeline, err := refsManager.GetCurrentTimeline() + if err != nil { + return fmt.Errorf("failed to get current timeline: %w", err) + } + + // Get timeline info + timeline, err := refsManager.GetTimeline(currentTimeline, refs.LocalTimeline) + if err != nil { + return fmt.Errorf("failed to get timeline info: %w", err) + } + + if timeline.Blake3Hash == [32]byte{} { + return fmt.Errorf("timeline has no commits yet") + } + + // Initialize CAS + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + return fmt.Errorf("failed to initialize storage: %w", err) + } + + // Initialize squasher + mmr := history.NewMMR() + commitBuilder := commit.NewCommitBuilder(casStore, mmr) + squasher := shift.NewCommitSquasher(casStore, commitBuilder) + + var startHash, endHash cas.Hash + + // Determine commit range based on arguments + if lastN > 0 { + // Use --last N mode + if lastN < 2 { + return fmt.Errorf("--last must be at least 2 to squash commits") + } + + // Get commit history + allSeals, err := getCommitHistory(casStore, refsManager, timeline.Blake3Hash) + if err != nil { + return fmt.Errorf("failed to get commit history: %w", err) + } + + if lastN > len(allSeals) { + return fmt.Errorf("only %d commits available, cannot squash last %d", len(allSeals), lastN) + } + + // Start is the Nth commit from the end, end is HEAD + copy(endHash[:], allSeals[0].Hash[:]) + copy(startHash[:], allSeals[lastN-1].Hash[:]) + + } else if len(args) >= 2 { + // Use specified start and end + _, startSealHash, _, _, err := resolveSealReference(refsManager, args[0]) + if err != nil { + return fmt.Errorf("failed to resolve start commit '%s': %w", args[0], err) + } + startHash = startSealHash + + _, endSealHash, _, _, err := resolveSealReference(refsManager, args[1]) + if err != nil { + return fmt.Errorf("failed to resolve end commit '%s': %w", args[1], err) + } + endHash = endSealHash + + } else { + // Interactive mode + start, end, err := selectCommitRangeForShift(casStore, refsManager, timeline.Blake3Hash, currentTimeline) + if err != nil { + return err + } + + if start == nil || end == nil { + fmt.Println("Shift cancelled.") + return nil + } + + copy(startHash[:], start.Hash[:]) + copy(endHash[:], end.Hash[:]) + } + + // Validate the range + if err := squasher.ValidateRange(startHash, endHash); err != nil { + return fmt.Errorf("invalid commit range: %w", err) + } + + // Get commits in range + commits, err := squasher.GetCommitRange(startHash, endHash) + if err != nil { + return fmt.Errorf("failed to get commit range: %w", err) + } + + // Show review and get confirmation + fmt.Printf("\n%s Range selected: %d commits will be squashed\n\n", + colors.Green("✓"), len(commits)) + + fmt.Printf("%s Review commits to squash:\n\n", colors.Bold("📋")) + for i, c := range commits { + shortHash := hex.EncodeToString(c.Hash[:4]) + firstLine := strings.Split(c.Message, "\n")[0] + fmt.Printf("[%s] %d. %s - %s\n", + colors.Green("✓"), i+1, colors.Gray(shortHash), firstLine) + } + + // Get combined message suggestion + suggestedMessage := squasher.GetCombinedMessage(commits) + + fmt.Printf("\n%s\n", colors.Bold("Suggested commit message:")) + fmt.Printf("%s\n\n", colors.Dim(suggestedMessage)) + + // Prompt for commit message + fmt.Print("Enter new commit message (or press Enter to use suggested): ") + reader := bufio.NewReader(os.Stdin) + userMessage, err := reader.ReadString('\n') + if err != nil { + return fmt.Errorf("failed to read input: %w", err) + } + + userMessage = strings.TrimSpace(userMessage) + finalMessage := suggestedMessage + if userMessage != "" { + finalMessage = userMessage + } + + // Warning about history rewriting + fmt.Printf("\n%s This will rewrite commit history!\n", + colors.Yellow("⚠ WARNING:")) + fmt.Printf(" • %d commits will be replaced with 1 commit\n", len(commits)) + fmt.Printf(" • You will need to force push: %s\n", + colors.Bold("ivaldi upload --force")) + fmt.Print("\nConfirm squash? (yes/no): ") + + confirmInput, err := reader.ReadString('\n') + if err != nil { + return fmt.Errorf("failed to read confirmation: %w", err) + } + + confirm := strings.TrimSpace(strings.ToLower(confirmInput)) + if confirm != "yes" { + fmt.Println("Shift cancelled.") + return nil + } + + // Perform the squash + fmt.Printf("\n%s Squashing commits...\n", colors.Bold("🔨")) + + // Extract final state from end commit + files, err := squasher.ExtractFinalState(endHash) + if err != nil { + return fmt.Errorf("failed to extract final state: %w", err) + } + + // Get author from config + author, err := getAuthorFromConfig() + if err != nil { + return fmt.Errorf("failed to get author: %w", err) + } + + // Get parent of start commit (will be parent of squashed commit) + parentHash, err := squasher.GetParentOfStart(startHash) + if err != nil { + return fmt.Errorf("failed to get parent: %w", err) + } + + // Create squashed commit + _, squashedHash, err := squasher.CreateSquashedCommit(files, parentHash, author, finalMessage) + if err != nil { + return fmt.Errorf("failed to create squashed commit: %w", err) + } + + // Update timeline to point to new commit + var squashedHashArray [32]byte + copy(squashedHashArray[:], squashedHash[:]) + + // Generate and store seal name + sealName := seals.GenerateSealName(squashedHashArray) + err = refsManager.StoreSealName(sealName, squashedHashArray, finalMessage) + if err != nil { + fmt.Printf("Warning: Failed to store seal name: %v\n", err) + } + + // Update timeline + err = refsManager.UpdateTimeline( + currentTimeline, + refs.LocalTimeline, + squashedHashArray, + [32]byte{}, + "", + ) + if err != nil { + return fmt.Errorf("failed to update timeline: %w", err) + } + + // Update workspace to reflect the squashed commit + fmt.Printf("%s Created squashed commit: %s (%s)\n", + colors.Green("✓"), + colors.Cyan(sealName), + colors.Gray(hex.EncodeToString(squashedHashArray[:4]))) + fmt.Printf("%s Timeline updated\n", colors.Green("✓")) + fmt.Printf("%s %d commits squashed into 1\n\n", + colors.Green("✓"), len(commits)) + + fmt.Printf("%s Remote history differs. Push with --force to update:\n", + colors.Yellow("⚠")) + fmt.Printf(" %s\n", colors.Bold("ivaldi upload --force")) + + return nil +} + +// selectCommitRangeForShift provides interactive selection of commit range +func selectCommitRangeForShift(casStore cas.CAS, refsManager *refs.RefsManager, headHash [32]byte, timelineName string) (*SealInfo, *SealInfo, error) { + // Get commit history + allSeals, err := getCommitHistory(casStore, refsManager, headHash) + if err != nil { + return nil, nil, fmt.Errorf("failed to get commit history: %w", err) + } + + if len(allSeals) < 2 { + return nil, nil, fmt.Errorf("need at least 2 commits to squash") + } + + // Phase 1: Select START commit (oldest in range) + fmt.Printf("\n%s Select START of commit range (oldest):\n\n", + colors.Bold("⏱")) + + startSeal, err := selectSealWithArrowKeys(allSeals, timelineName, 0, len(allSeals)) + if err != nil { + return nil, nil, err + } + + if startSeal == nil { + return nil, nil, nil + } + + // Phase 2: Select END commit (newest in range) + // Filter to only show commits from HEAD to start + var filteredSeals []SealInfo + for _, seal := range allSeals { + filteredSeals = append(filteredSeals, seal) + if seal.Hash == startSeal.Hash { + break + } + } + + fmt.Printf("\n%s Select END of commit range (newest):\n\n", + colors.Bold("⏱")) + + // Mark the start position + displaySealsWithMarker(filteredSeals, timelineName, startSeal.Hash) + + endSeal, err := selectSealWithArrowKeys(filteredSeals, timelineName, 0, len(filteredSeals)) + if err != nil { + return nil, nil, err + } + + if endSeal == nil { + return nil, nil, nil + } + + // Validate that end is after or equal to start + if endSeal.Position > startSeal.Position { + return nil, nil, fmt.Errorf("end commit must be newer than or equal to start commit") + } + + return startSeal, endSeal, nil +} + +// displaySealsWithMarker displays seals with a marker for the start commit +func displaySealsWithMarker(seals []SealInfo, timelineName string, startHash [32]byte) { + fmt.Printf("\n%s Seals in timeline '%s':\n\n", colors.Bold("⏱"), colors.Bold(timelineName)) + + for i, seal := range seals { + var prefix string + if seal.Hash == startHash { + prefix = colors.Yellow(" [START] ") + } else if i == 0 { + prefix = colors.Dim("→ ") + } else { + prefix = " " + } + + sealHash := hex.EncodeToString(seal.Hash[:4]) + fmt.Printf("%s%d. %s (%s)\n", prefix, i+1, colors.Cyan(seal.SealName), colors.Gray(sealHash)) + fmt.Printf(" %s\n", seal.Message) + fmt.Printf(" %s • %s\n\n", seal.Author, seal.Timestamp) + } +} diff --git a/docs/SHIFT_FEATURE.md b/docs/SHIFT_FEATURE.md new file mode 100644 index 0000000..108ccab --- /dev/null +++ b/docs/SHIFT_FEATURE.md @@ -0,0 +1,309 @@ +# Ivaldi Shift Feature - Implementation Summary + +## Overview + +The `ivaldi shift` command provides an intuitive, interactive way to squash multiple commits into a single commit, following Ivaldi's philosophy of simplicity and user-friendliness. This feature addresses the common workflow of cleaning up commit history before pushing to remote repositories. + +## Implementation Components + +### 1. Core Logic (`internal/shift/squasher.go`) + +**File**: `internal/shift/squasher.go` + +**Key Functionality**: +- `CommitSquasher` struct for managing squash operations +- `GetCommitRange()` - Retrieves commits between start and end +- `ExtractFinalState()` - Gets the final workspace state from end commit +- `CreateSquashedCommit()` - Creates the new squashed commit +- `GetCombinedMessage()` - Generates combined commit message +- `ValidateRange()` - Validates commit range before squashing +- `GetParentOfStart()` - Gets parent commit for proper linking + +**Design Decisions**: +- Uses BLAKE3 hashing for content integrity +- Preserves final workspace state exactly +- Automatic message combination from all commits +- Chronological ordering of commits (oldest first) + +### 2. CLI Interface (`cli/shift.go`) + +**File**: `cli/shift.go` + +**User Interface**: +- Interactive two-phase selection (start commit, then end commit) +- Arrow key navigation (reuses pattern from `travel` command) +- Visual commit display with metadata +- Customizable commit message +- Safety confirmations at every step + +**Modes Supported**: +1. **Interactive Mode** - Visual selection with arrow keys +2. **Last N Mode** - `--last N` flag for quick squashing +3. **Specific Range Mode** - Direct seal name/hash specification + +**Safety Features**: +- Explicit "yes" confirmation required +- Clear warnings about history rewriting +- Force push guidance after squash +- Validation before execution + +### 3. Force Push Support (`cli/management.go`) + +**File**: `cli/management.go` + +**Added Features**: +- `--force` flag on `upload` command +- Multi-level confirmation ("force push" must be typed) +- Backup branch suggestions +- Team notification reminders +- Clear destructive operation warnings + +**Safety Checks**: +```go +if forceUpload { + // Display warnings + // Suggest backup + // Require explicit "force push" confirmation + // Notify about team coordination +} +``` + +### 4. GitHub Integration (`internal/github/sync.go`) + +**File**: `internal/github/sync.go` + +**Modified Function**: `PushCommit()` + +**Changes**: +- Added `force bool` parameter +- Passes force flag to `UpdateRefRequest` +- Different messaging for force vs normal push +- Proper handling of force push in GitHub API + +**API Integration**: +```go +updateReq := UpdateRefRequest{ + SHA: commitResp.SHA, + Force: force, // Enables force push +} +``` + +## Testing + +### Test Coverage (`internal/shift/squasher_test.go`) + +**Test Cases**: +1. `TestValidateRange` - Validates commit range checking + - Valid ranges (start to end) + - Invalid reversed ranges + - Non-existent commits + +2. `TestGetCommitRange` - Tests range retrieval + - Chronological ordering + - Correct commit count + - Message preservation + +3. `TestGetCombinedMessage` - Tests message generation + - Empty commits + - Single commit + - Multiple commits + - Multi-line commits (first line only) + +4. `TestGetParentOfStart` - Tests parent extraction + - Root commits (no parent) + - Child commits (with parent) + +**Test Results**: ✅ All tests passing + +## Documentation + +### 1. Command Documentation (`docs/commands/shift.md`) + +Comprehensive documentation including: +- Synopsis and description +- All usage modes with examples +- Interactive workflow walkthrough +- Safety features explanation +- Best practices and recommendations +- Troubleshooting guide +- Comparison with Git rebase +- Related commands + +### 2. Comparison Update (`docs/comparison.md`) + +Added shift command to Git comparison table: +```markdown +| Squash commits | `git rebase -i` (squash) | `ivaldi shift` | +``` + +## User Workflow + +### Complete Squash Workflow + +```bash +# 1. Check commits +ivaldi log + +# 2. Squash commits +ivaldi shift +# OR: ivaldi shift --last 3 +# OR: ivaldi shift + +# 3. Review result +ivaldi log + +# 4. Optional: Create backup +ivaldi timeline create backup-before-push + +# 5. Force push +ivaldi upload --force +# Type "force push" to confirm +``` + +### Safety Workflow + +```mermaid +graph TD + A[ivaldi shift] --> B{Select Commits} + B --> C[Review Selection] + C --> D{Confirm Message} + D --> E[Type 'yes'] + E --> F[Squash Commits] + F --> G[ivaldi upload --force] + G --> H{Warning Displayed} + H --> I[Type 'force push'] + I --> J[Push to Remote] + J --> K[Notify Team] +``` + +## Architecture Decisions + +### Why Two-Phase Selection? + +Interactive selection uses two phases to make range selection intuitive: +1. User picks oldest commit (START) +2. User picks newest commit (END) from filtered list +3. Prevents confusion about ordering + +### Why Preserve Final State Only? + +Instead of merging individual changes: +- Extract the final workspace state from END commit +- Create one new commit with that state +- Simpler, faster, and avoids complex merge logic +- Result is identical to manual squashing + +### Why Multiple Confirmation Steps? + +Safety is paramount for history-rewriting operations: +1. Commit selection confirmation +2. Message entry opportunity +3. "yes" typed to confirm squash +4. "force push" typed to confirm force push +5. Clear warnings at each step + +## Security Considerations + +### Protected Against + +1. **Accidental Force Push** - Requires explicit "force push" confirmation +2. **Unintended Squash** - Multiple confirmation steps +3. **Lost Work** - Backup suggestions prominently displayed +4. **Team Disruption** - Team notification reminders + +### User Education + +Documentation emphasizes: +- When to use shift (before pushing) +- When NOT to use shift (after pushing) +- Team coordination importance +- Backup creation recommendations + +## Performance Characteristics + +### Time Complexity + +- **Range Validation**: O(n) where n = commits in range +- **State Extraction**: O(f) where f = files in final commit +- **Commit Creation**: O(f) for file processing +- **Overall**: O(n + f), typically fast + +### Memory Usage + +- Commits loaded: Only in specified range +- File states: Only final state preserved +- Efficient for large repositories + +## Integration Points + +### Existing Ivaldi Components Used + +1. **CAS (Content-Addressable Storage)** - File content storage +2. **Commit System** - Reading and creating commits +3. **Refs Manager** - Timeline management +4. **Seals** - Memorable commit names +5. **Workspace** - File state management +6. **GitHub Client** - Force push support + +### New Components Added + +1. **Shift Package** - Core squashing logic +2. **CommitSquasher** - Main squashing orchestrator +3. **Force Push** - Enhanced upload with force option + +## Future Enhancements + +Potential improvements for future versions: + +1. **Interactive Message Editor** - Launch editor for multi-line messages +2. **Squash Preview** - Show diff before confirming +3. **Undo Squash** - Quick rollback if mistake made +4. **Batch Squash** - Squash multiple ranges at once +5. **Auto-Backup** - Automatic backup branch creation +6. **Smart Messages** - AI-generated commit messages from changes + +## Comparison with Git + +### Advantages over Git Rebase + +1. **Simpler Interface** - Visual arrow key navigation vs text editor +2. **Clear Safety** - Multiple explicit confirmations +3. **No Conflicts** - Automatic state preservation (no conflict resolution) +4. **Guided Workflow** - Step-by-step with clear instructions +5. **Force Push Safety** - Built-in warnings and confirmations + +### Git Rebase Features Not Included + +1. **Reorder Commits** - Not yet supported (use travel + manual work) +2. **Edit Commits** - Not supported (use travel for divergence) +3. **Drop Commits** - Not directly supported +4. **Fixup** - Only squash available + +## Conclusion + +The `ivaldi shift` feature successfully implements an intuitive, safe, and user-friendly way to squash commits. It follows Ivaldi's design philosophy of simplicity while maintaining safety through multiple confirmation layers. The implementation is well-tested, thoroughly documented, and integrates cleanly with existing Ivaldi components. + +## Files Modified/Created + +### Created +- `internal/shift/squasher.go` - Core squashing logic +- `internal/shift/squasher_test.go` - Comprehensive tests +- `cli/shift.go` - CLI interface +- `docs/commands/shift.md` - User documentation +- `docs/SHIFT_FEATURE.md` - This implementation summary + +### Modified +- `cli/cli.go` - Registered shift command +- `cli/management.go` - Added --force flag and safety checks +- `internal/github/sync.go` - Added force push support +- `docs/comparison.md` - Updated Git comparison table + +## Verification + +All components verified: +- ✅ Code compiles successfully +- ✅ Tests pass (4/4 tests passing) +- ✅ Shift command available in CLI +- ✅ Upload --force flag available +- ✅ Help text displays correctly +- ✅ Documentation complete and comprehensive diff --git a/docs/commands/shift.md b/docs/commands/shift.md new file mode 100644 index 0000000..bbb46fd --- /dev/null +++ b/docs/commands/shift.md @@ -0,0 +1,460 @@ +--- +layout: default +title: ivaldi shift +--- + +# ivaldi shift + +Interactively squash multiple commits into a single commit for cleaner history. + +## Synopsis + +```bash +ivaldi shift # Interactive mode - select range with arrow keys +ivaldi shift --last N # Squash last N commits +ivaldi shift # Squash specific range by seal name or hash +``` + +## Description + +The `shift` command allows you to combine multiple commits into a single commit, creating a cleaner commit history. This is particularly useful before pushing to a remote repository or when you have many small WIP commits that should be combined. + +Unlike Git's interactive rebase, `shift` provides a simple, guided interface for squashing commits. + +## Features + +- Interactive commit range selection with arrow keys +- Automatic combined commit message generation +- Safety confirmations before rewriting history +- Force push warnings and guidance +- Support for multiple selection modes + +## Options + +- `--last N` - Squash the last N commits (must be at least 2) +- No options - Interactive mode with visual selection + +## Usage Modes + +### Interactive Mode (Recommended) + +The interactive mode provides a two-phase selection process: + +```bash +ivaldi shift +``` + +**Phase 1: Select START commit (oldest)** +- Navigate through commits using arrow keys +- Press Enter to select the oldest commit in range +- Press Q to cancel + +**Phase 2: Select END commit (newest)** +- Navigate through filtered commits +- Start commit is marked with [START] +- Press Enter to select the newest commit +- Press Q to cancel + +**Phase 3: Review and Confirm** +- Review all commits to be squashed +- Enter custom commit message or use suggested one +- Confirm the operation with 'yes' + +### Last N Commits Mode + +Quickly squash the most recent commits: + +```bash +# Squash last 3 commits +ivaldi shift --last 3 + +# Squash last 5 commits +ivaldi shift --last 5 +``` + +### Specific Range Mode + +Squash a specific range using seal names or hashes: + +```bash +# Using seal names +ivaldi shift swift-eagle bold-hawk + +# Using partial hashes +ivaldi shift 447abe9b 7bb05886 + +# Mixing seal names and hashes +ivaldi shift swift-eagle 7bb05886 +``` + +## Examples + +### Example 1: Clean Up WIP Commits + +```bash +# You have 3 WIP commits to clean up +$ ivaldi log --oneline +447abe9b swift-eagle-flies-high WIP: Add authentication +7bb05886 empty-phoenix-attacks WIP: Fix login bug +3c4d5e6f brave-wolf-runs-fast WIP: Update tests + +# Squash them interactively +$ ivaldi shift + +⏱ Select START of commit range (oldest): + +→ 1. swift-eagle-flies-high-447abe9b (447abe9b) + WIP: Add authentication + + 2. empty-phoenix-attacks-7bb05886 (7bb05886) + WIP: Fix login bug + + 3. brave-wolf-runs-fast-3c4d5e6f (3c4d5e6f) + WIP: Update tests + +[Select commit 3 with Enter] + +⏱ Select END of commit range (newest): + +→ 1. swift-eagle-flies-high-447abe9b (447abe9b) + WIP: Add authentication + + 2. empty-phoenix-attacks-7bb05886 (7bb05886) + WIP: Fix login bug + + 3. brave-wolf-runs-fast-3c4d5e6f (3c4d5e6f) [START] + WIP: Update tests + +[Select commit 1 with Enter] + +✓ Range selected: 3 commits will be squashed + +📋 Review commits to squash: + +[✓] 1. 3c4d5e6f - WIP: Update tests +[✓] 2. 7bb05886 - WIP: Fix login bug +[✓] 3. 447abe9b - WIP: Add authentication + +Suggested commit message: +Squashed 3 commits: + +WIP: Update tests +WIP: Fix login bug +WIP: Add authentication + +Enter new commit message (or press Enter to use suggested): +Add authentication with login fixes and tests + +⚠ WARNING: This will rewrite commit history! + • 3 commits will be replaced with 1 commit + • You will need to force push: ivaldi upload --force + +Confirm squash? (yes/no): yes + +🔨 Squashing commits... +✓ Created squashed commit: bold-hawk-soars-9a8b7c6d (9a8b7c6d) +✓ Timeline updated +✓ 3 commits squashed into 1 + +⚠ Remote history differs. Push with --force to update: + ivaldi upload --force +``` + +### Example 2: Squash Last N Commits + +```bash +# Quick squash of last 4 commits +$ ivaldi shift --last 4 + +✓ Range selected: 4 commits will be squashed + +📋 Review commits to squash: + +[✓] 1. 1a2b3c4d - Fix typo +[✓] 2. 5e6f7g8h - Add comment +[✓] 3. 9i0j1k2l - Refactor function +[✓] 4. 3m4n5o6p - Add feature X + +Enter new commit message: Implement feature X with refactoring + +Confirm squash? (yes/no): yes + +✓ Created squashed commit: clever-fox-jumps-high-8h9i0j1k +✓ 4 commits squashed into 1 +``` + +### Example 3: Force Push After Squash + +```bash +# After squashing, update remote +$ ivaldi upload --force + +⚠ WARNING: Force push will OVERWRITE remote history! +This is a destructive operation that: + • Rewrites commit history on the remote + • Can cause issues for collaborators + • Cannot be undone easily + +💡 Tip: Consider creating a backup branch first: + ivaldi timeline create backup-before-force-push + ivaldi upload github:owner/repo backup-before-force-push + +Type 'force push' to confirm: force push + +Uploading to GitHub: owner/repo (branch: main)... +✓ Force pushed to GitHub +⚠ Make sure to notify collaborators about the history rewrite +``` + +## Workflow + +### Complete Squash and Push Workflow + +```bash +# 1. Check your commits +ivaldi log + +# 2. Squash commits interactively +ivaldi shift + +# 3. Verify the result +ivaldi log + +# 4. Create backup (optional but recommended) +ivaldi timeline create backup-before-push + +# 5. Force push to remote +ivaldi upload --force +``` + +### Before Pushing to Remote + +Always squash before pushing if you have: +- Multiple WIP commits +- Debugging commits +- Experimental commits +- "Fix typo" commits + +```bash +# Make your changes +ivaldi gather . +ivaldi seal "WIP: Feature progress" +ivaldi seal "WIP: More work" +ivaldi seal "WIP: Almost done" +ivaldi seal "WIP: Final touches" + +# Clean up before pushing +ivaldi shift --last 4 +# Enter: "Implement feature X" + +# Now push clean history +ivaldi upload --force +``` + +## Safety Features + +### Confirmation Requirements + +`shift` requires explicit confirmation: +1. Interactive selection confirmation +2. Commit message entry +3. "yes" typed to confirm squash +4. "force push" typed to confirm force push + +### Pre-Squash Checks + +Before squashing, `shift`: +- Validates commit range +- Ensures commits are connected +- Checks for cycles in history +- Verifies all commits exist + +### Post-Squash Warnings + +After squashing, you'll see: +- Force push requirement warning +- Suggestion to notify collaborators +- Backup branch creation tip + +## Common Use Cases + +### 1. Clean Up Before Pull Request + +```bash +# You have messy local commits +ivaldi log --oneline + +# Squash into logical commits +ivaldi shift --last 10 +# Message: "Add user authentication feature" + +ivaldi upload --force +# Create clean pull request +``` + +### 2. Combine Related Changes + +```bash +# Multiple commits for one feature +$ ivaldi shift swift-eagle brave-wolf +# Combines all commits between these seals +``` + +### 3. Remove Debugging Commits + +```bash +# After debugging session with many commits +ivaldi shift --last 15 +# Message: "Fix authentication bug" +``` + +## Best Practices + +### DO: +- Squash before pushing to remote +- Create descriptive commit messages +- Review commits before confirming +- Create backup branches for safety +- Notify team before force pushing + +### DON'T: +- Squash after pushing (requires force push) +- Squash commits others have based work on +- Skip reviewing the commit list +- Force push without warning team +- Squash across merge commits + +## Safety Recommendations + +### 1. Create Backup Branch + +```bash +ivaldi timeline create backup-$(date +%Y%m%d) +ivaldi shift --last 5 +``` + +### 2. Test Locally First + +```bash +# Squash locally +ivaldi shift --last 3 + +# Test thoroughly +ivaldi status +ivaldi log + +# If good, then force push +ivaldi upload --force +``` + +### 3. Coordinate with Team + +Before force pushing to shared branches: +1. Notify team in chat/email +2. Ensure no one is working on the branch +3. Ask team to re-fetch after push + +## Troubleshooting + +### Error: Timeline has no commits yet + +``` +Error: timeline has no commits yet +``` + +Solution: Create at least one commit first: +```bash +ivaldi gather . +ivaldi seal "Initial commit" +ivaldi shift # Now works +``` + +### Error: Need at least 2 commits + +``` +Error: need at least 2 commits to squash +``` + +Solution: You can't squash a single commit. Make more commits: +```bash +ivaldi seal "Second commit" +ivaldi shift --last 2 +``` + +### Error: End commit is not a descendant + +``` +Error: end commit is not a descendant of start commit +``` + +Solution: Verify commit order. Start must be older than end: +```bash +# Wrong order: +ivaldi shift bold-hawk swift-eagle # bold-hawk is newer + +# Correct order: +ivaldi shift swift-eagle bold-hawk # swift-eagle is older +``` + +### Error: Invalid commit range + +``` +Error: invalid commit range: end commit is not a descendant of start commit +``` + +Solution: Selected commits are not connected. Use `ivaldi log` to find correct range. + +## Comparison with Git + +| Git | Ivaldi | +|-----|--------| +| `git rebase -i HEAD~3` | `ivaldi shift --last 3` | +| Edit, pick, squash in editor | Interactive arrow key selection | +| Manual conflict resolution | Automatic handling | +| Reword/edit/squash/fixup | Simple squash with message | +| `git push --force` | `ivaldi upload --force` | + +## Advanced Usage + +### Squash Non-Adjacent Commits + +To squash non-adjacent commits, create a new timeline: + +```bash +# Can't directly squash commit 1 and 3 (skipping 2) +# Instead: + +# 1. Travel to before unwanted commits +ivaldi travel +# Select commit before commit 2, diverge to 'cleaned' + +# 2. Cherry-pick wanted changes (manual) +# Apply changes from commit 1 and 3 + +# 3. Create new commit +ivaldi seal "Combined commits 1 and 3" +``` + +### Squash Across Branches + +```bash +# Merge first +ivaldi fuse feature-branch to main + +# Then squash +ivaldi shift --last 5 +``` + +## Related Commands + +- [seal](seal.md) - Create commits +- [travel](travel.md) - Time travel through history +- [timeline](timeline.md) - Manage timelines/branches +- [upload](upload.md) - Push to remote (with --force) +- [log](log.md) - View commit history + +## See Also + +- [Force Push Safety](../guides/force-push-safety.md) +- [Collaboration Guide](../guides/collaboration.md) +- [GitHub Integration](../guides/github-integration.md) diff --git a/docs/comparison.md b/docs/comparison.md index eedceff..b348a14 100644 --- a/docs/comparison.md +++ b/docs/comparison.md @@ -28,6 +28,7 @@ Understanding how Ivaldi differs from and improves upon Git. | Remote | `git remote add` | `ivaldi portal add` | | Stash | `git stash` | (automatic shelving) | | Rebase | `git rebase -i` | `ivaldi travel` (diverge) | +| Squash commits | `git rebase -i` (squash) | `ivaldi shift` | | Ignore files | Edit `.gitignore` | `ivaldi exclude` | ## Conceptual Differences diff --git a/internal/github/sync.go b/internal/github/sync.go index c2825fc..19a6153 100644 --- a/internal/github/sync.go +++ b/internal/github/sync.go @@ -480,10 +480,10 @@ type blobUploadJob struct { // blobUploadResult represents the result of a blob upload type blobUploadResult struct { - path string - mode string - sha string - err error + path string + mode string + sha string + err error } // createBlobsParallel uploads blobs in parallel @@ -622,8 +622,12 @@ func (rs *RepoSyncer) UploadFile(ctx context.Context, owner, repo, path, branch, } // PushCommit pushes an Ivaldi commit to GitHub as a single commit with delta optimization -func (rs *RepoSyncer) PushCommit(ctx context.Context, owner, repo, branch string, commitHash cas.Hash) error { - fmt.Printf("Pushing commit %s to GitHub...\n", commitHash.String()[:8]) +func (rs *RepoSyncer) PushCommit(ctx context.Context, owner, repo, branch string, commitHash cas.Hash, force bool) error { + if force { + fmt.Printf("Force pushing commit %s to GitHub...\n", commitHash.String()[:8]) + } else { + fmt.Printf("Pushing commit %s to GitHub...\n", commitHash.String()[:8]) + } // Check if branch exists on GitHub branchInfo, err := rs.client.GetBranch(ctx, owner, repo, branch) @@ -847,7 +851,8 @@ func (rs *RepoSyncer) PushCommit(ctx context.Context, owner, repo, branch string } else { // Update existing branch reference updateReq := UpdateRefRequest{ - SHA: commitResp.SHA, + SHA: commitResp.SHA, + Force: force, // Use force flag for ref update } err = rs.client.UpdateRef(ctx, owner, repo, fmt.Sprintf("heads/%s", branch), updateReq) if err != nil { diff --git a/internal/shift/squasher.go b/internal/shift/squasher.go new file mode 100644 index 0000000..10239f1 --- /dev/null +++ b/internal/shift/squasher.go @@ -0,0 +1,291 @@ +package shift + +import ( + "fmt" + "strings" + "time" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/commit" + "github.com/javanhut/Ivaldi-vcs/internal/filechunk" + "github.com/javanhut/Ivaldi-vcs/internal/hamtdir" + "github.com/javanhut/Ivaldi-vcs/internal/wsindex" +) + +// CommitSquasher handles the logic for squashing multiple commits into one. +type CommitSquasher struct { + CAS cas.CAS + Builder *commit.CommitBuilder + Reader *commit.CommitReader +} + +// NewCommitSquasher creates a new CommitSquasher. +func NewCommitSquasher(casStore cas.CAS, builder *commit.CommitBuilder) *CommitSquasher { + return &CommitSquasher{ + CAS: casStore, + Builder: builder, + Reader: commit.NewCommitReader(casStore), + } +} + +// CommitInfo holds information about a commit in the range. +type CommitInfo struct { + Hash cas.Hash + Message string + Author string + Timestamp time.Time + TreeHash cas.Hash +} + +// GetCommitRange retrieves all commits between start (oldest) and end (newest) inclusive. +// The commits are returned in chronological order (oldest first). +func (cs *CommitSquasher) GetCommitRange(start, end cas.Hash) ([]CommitInfo, error) { + var commits []CommitInfo + visited := make(map[cas.Hash]bool) + + // Walk backwards from end to start, collecting commits + currentHash := end + for { + if visited[currentHash] { + return nil, fmt.Errorf("cycle detected in commit history") + } + visited[currentHash] = true + + // Read commit + commitObj, err := cs.Reader.ReadCommit(currentHash) + if err != nil { + return nil, fmt.Errorf("failed to read commit %x: %w", currentHash[:4], err) + } + + // Add to commits list + commits = append(commits, CommitInfo{ + Hash: currentHash, + Message: commitObj.Message, + Author: commitObj.Author, + Timestamp: commitObj.CommitTime, + TreeHash: commitObj.TreeHash, + }) + + // If we've reached the start commit, we're done + if currentHash == start { + break + } + + // Move to parent + if len(commitObj.Parents) == 0 { + return nil, fmt.Errorf("reached root commit before finding start commit") + } + + // Use first parent for linear history + currentHash = commitObj.Parents[0] + } + + // Reverse to get chronological order (oldest first) + for i, j := 0, len(commits)-1; i < j; i, j = i+1, j-1 { + commits[i], commits[j] = commits[j], commits[i] + } + + return commits, nil +} + +// ExtractFinalState extracts the final workspace state from the end commit. +// This is the state we want to preserve in the squashed commit. +func (cs *CommitSquasher) ExtractFinalState(endCommit cas.Hash) ([]wsindex.FileMetadata, error) { + // Read the end commit + commitObj, err := cs.Reader.ReadCommit(endCommit) + if err != nil { + return nil, fmt.Errorf("failed to read end commit: %w", err) + } + + // Read the tree + tree, err := cs.Reader.ReadTree(commitObj) + if err != nil { + return nil, fmt.Errorf("failed to read tree: %w", err) + } + + // List all files in the tree + filePaths, err := cs.Reader.ListFiles(tree) + if err != nil { + return nil, fmt.Errorf("failed to list files: %w", err) + } + + // Create file metadata for each file + var files []wsindex.FileMetadata + for _, filePath := range filePaths { + // Get file content to determine checksum + content, err := cs.Reader.GetFileContent(tree, filePath) + if err != nil { + return nil, fmt.Errorf("failed to get content for %s: %w", filePath, err) + } + + // Get file ref by navigating tree structure + fileRef, err := cs.getFileRefFromTree(tree, filePath) + if err != nil { + return nil, fmt.Errorf("failed to get file ref for %s: %w", filePath, err) + } + + fileMetadata := wsindex.FileMetadata{ + Path: filePath, + FileRef: fileRef, + ModTime: commitObj.CommitTime, + Mode: 0644, + Size: int64(len(content)), + Checksum: cas.SumB3(content), + } + files = append(files, fileMetadata) + } + + return files, nil +} + +// getFileRefFromTree extracts the file reference from a tree by path. +func (cs *CommitSquasher) getFileRefFromTree(tree *commit.TreeObject, filePath string) (filechunk.NodeRef, error) { + // Split path into parts + parts := strings.Split(filePath, "/") + if len(parts) == 0 { + return filechunk.NodeRef{}, fmt.Errorf("invalid file path: %s", filePath) + } + + // Navigate through the HAMT structure + hamtLoader := hamtdir.NewLoader(cs.CAS) + currentDirRef := tree.DirRef + + for i, part := range parts { + entries, err := hamtLoader.List(currentDirRef) + if err != nil { + return filechunk.NodeRef{}, fmt.Errorf("failed to read directory entries: %w", err) + } + + if i == len(parts)-1 { + // This is the final file + for _, entry := range entries { + if entry.Name == part && entry.Type == hamtdir.FileEntry { + return *entry.File, nil + } + } + return filechunk.NodeRef{}, fmt.Errorf("file not found: %s", part) + } else { + // Navigate to subdirectory + found := false + for _, entry := range entries { + if entry.Name == part && entry.Type == hamtdir.DirEntry { + currentDirRef = *entry.Dir + found = true + break + } + } + if !found { + return filechunk.NodeRef{}, fmt.Errorf("directory not found: %s", part) + } + } + } + + return filechunk.NodeRef{}, fmt.Errorf("unexpected error in getFileRefFromTree") +} + +// CreateSquashedCommit creates a new commit with the final state and combined metadata. +func (cs *CommitSquasher) CreateSquashedCommit( + files []wsindex.FileMetadata, + parent cas.Hash, + author, message string, +) (*commit.CommitObject, cas.Hash, error) { + // Determine parents + var parents []cas.Hash + if parent != (cas.Hash{}) { + parents = append(parents, parent) + } + + // Create the commit + commitObj, err := cs.Builder.CreateCommit( + files, + parents, + author, + author, + message, + ) + if err != nil { + return nil, cas.Hash{}, fmt.Errorf("failed to create commit: %w", err) + } + + // Get commit hash + commitHash := cs.Builder.GetCommitHash(commitObj) + + return commitObj, commitHash, nil +} + +// GetCombinedMessage creates a combined commit message from a range of commits. +// Format: "Squashed N commits: | | ..." +func (cs *CommitSquasher) GetCombinedMessage(commits []CommitInfo) string { + if len(commits) == 0 { + return "Empty squash" + } + + if len(commits) == 1 { + return commits[0].Message + } + + var messages []string + for _, c := range commits { + // Trim message to first line only + firstLine := strings.Split(c.Message, "\n")[0] + messages = append(messages, firstLine) + } + + return fmt.Sprintf("Squashed %d commits:\n\n%s", + len(commits), + strings.Join(messages, "\n")) +} + +// ValidateRange validates that the commit range is valid. +func (cs *CommitSquasher) ValidateRange(start, end cas.Hash) error { + // Ensure both commits exist + if _, err := cs.Reader.ReadCommit(start); err != nil { + return fmt.Errorf("start commit not found: %w", err) + } + + if _, err := cs.Reader.ReadCommit(end); err != nil { + return fmt.Errorf("end commit not found: %w", err) + } + + // Verify end is a descendant of start + currentHash := end + visited := make(map[cas.Hash]bool) + + for { + if currentHash == start { + return nil // Valid range + } + + if visited[currentHash] { + return fmt.Errorf("cycle detected in commit history") + } + visited[currentHash] = true + + commitObj, err := cs.Reader.ReadCommit(currentHash) + if err != nil { + return fmt.Errorf("failed to read commit: %w", err) + } + + if len(commitObj.Parents) == 0 { + return fmt.Errorf("end commit is not a descendant of start commit") + } + + currentHash = commitObj.Parents[0] + } +} + +// GetParentOfStart returns the parent commit of the start commit. +// This will be the parent of the squashed commit. +func (cs *CommitSquasher) GetParentOfStart(start cas.Hash) (cas.Hash, error) { + commitObj, err := cs.Reader.ReadCommit(start) + if err != nil { + return cas.Hash{}, fmt.Errorf("failed to read start commit: %w", err) + } + + if len(commitObj.Parents) == 0 { + // No parent (root commit) + return cas.Hash{}, nil + } + + return commitObj.Parents[0], nil +} diff --git a/internal/shift/squasher_test.go b/internal/shift/squasher_test.go new file mode 100644 index 0000000..9524048 --- /dev/null +++ b/internal/shift/squasher_test.go @@ -0,0 +1,256 @@ +package shift + +import ( + "testing" + "time" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/commit" + "github.com/javanhut/Ivaldi-vcs/internal/filechunk" + "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/wsindex" +) + +func TestValidateRange(t *testing.T) { + // Create in-memory CAS + casStore := cas.NewMemoryCAS() + mmr := history.NewMMR() + builder := commit.NewCommitBuilder(casStore, mmr) + squasher := NewCommitSquasher(casStore, builder) + + // Create a chain of commits: C1 -> C2 -> C3 + files1 := []wsindex.FileMetadata{ + { + Path: "file1.txt", + FileRef: filechunk.NodeRef{ + Hash: cas.SumB3([]byte("content1")), + Kind: filechunk.Leaf, + Size: 8, + }, + ModTime: time.Now(), + Mode: 0644, + Size: 8, + Checksum: cas.SumB3([]byte("content1")), + }, + } + + commit1, err := builder.CreateCommit(files1, nil, "author", "author", "First commit") + if err != nil { + t.Fatalf("Failed to create commit 1: %v", err) + } + hash1 := builder.GetCommitHash(commit1) + + // Create second commit + files2 := append(files1, wsindex.FileMetadata{ + Path: "file2.txt", + FileRef: filechunk.NodeRef{ + Hash: cas.SumB3([]byte("content2")), + Kind: filechunk.Leaf, + Size: 8, + }, + ModTime: time.Now(), + Mode: 0644, + Size: 8, + Checksum: cas.SumB3([]byte("content2")), + }) + + commit2, err := builder.CreateCommit(files2, []cas.Hash{hash1}, "author", "author", "Second commit") + if err != nil { + t.Fatalf("Failed to create commit 2: %v", err) + } + hash2 := builder.GetCommitHash(commit2) + + // Create third commit + files3 := append(files2, wsindex.FileMetadata{ + Path: "file3.txt", + FileRef: filechunk.NodeRef{ + Hash: cas.SumB3([]byte("content3")), + Kind: filechunk.Leaf, + Size: 8, + }, + ModTime: time.Now(), + Mode: 0644, + Size: 8, + Checksum: cas.SumB3([]byte("content3")), + }) + + commit3, err := builder.CreateCommit(files3, []cas.Hash{hash2}, "author", "author", "Third commit") + if err != nil { + t.Fatalf("Failed to create commit 3: %v", err) + } + hash3 := builder.GetCommitHash(commit3) + + // Test valid range: hash1 to hash3 + err = squasher.ValidateRange(hash1, hash3) + if err != nil { + t.Errorf("Expected valid range hash1->hash3, got error: %v", err) + } + + // Test valid range: hash2 to hash3 + err = squasher.ValidateRange(hash2, hash3) + if err != nil { + t.Errorf("Expected valid range hash2->hash3, got error: %v", err) + } + + // Test invalid range: hash3 to hash1 (reversed) + err = squasher.ValidateRange(hash3, hash1) + if err == nil { + t.Error("Expected error for reversed range hash3->hash1, got nil") + } + + // Test invalid range: non-existent commit + fakeHash := cas.SumB3([]byte("fake")) + err = squasher.ValidateRange(hash1, fakeHash) + if err == nil { + t.Error("Expected error for non-existent end commit, got nil") + } +} + +func TestGetCommitRange(t *testing.T) { + // Create in-memory CAS + casStore := cas.NewMemoryCAS() + mmr := history.NewMMR() + builder := commit.NewCommitBuilder(casStore, mmr) + squasher := NewCommitSquasher(casStore, builder) + + // Create commits + files := []wsindex.FileMetadata{ + { + Path: "test.txt", + FileRef: filechunk.NodeRef{ + Hash: cas.SumB3([]byte("test")), + Kind: filechunk.Leaf, + Size: 4, + }, + ModTime: time.Now(), + Mode: 0644, + Size: 4, + Checksum: cas.SumB3([]byte("test")), + }, + } + + // Commit 1 + commit1, _ := builder.CreateCommit(files, nil, "author", "author", "Commit 1") + hash1 := builder.GetCommitHash(commit1) + + // Commit 2 + commit2, _ := builder.CreateCommit(files, []cas.Hash{hash1}, "author", "author", "Commit 2") + hash2 := builder.GetCommitHash(commit2) + + // Commit 3 + commit3, _ := builder.CreateCommit(files, []cas.Hash{hash2}, "author", "author", "Commit 3") + hash3 := builder.GetCommitHash(commit3) + + // Get range from hash1 to hash3 + commits, err := squasher.GetCommitRange(hash1, hash3) + if err != nil { + t.Fatalf("Failed to get commit range: %v", err) + } + + // Verify we got all 3 commits in chronological order + if len(commits) != 3 { + t.Errorf("Expected 3 commits, got %d", len(commits)) + } + + if commits[0].Message != "Commit 1" { + t.Errorf("Expected first commit message 'Commit 1', got '%s'", commits[0].Message) + } + + if commits[2].Message != "Commit 3" { + t.Errorf("Expected third commit message 'Commit 3', got '%s'", commits[2].Message) + } +} + +func TestGetCombinedMessage(t *testing.T) { + casStore := cas.NewMemoryCAS() + mmr := history.NewMMR() + builder := commit.NewCommitBuilder(casStore, mmr) + squasher := NewCommitSquasher(casStore, builder) + + // Test empty commits + commits := []CommitInfo{} + msg := squasher.GetCombinedMessage(commits) + if msg != "Empty squash" { + t.Errorf("Expected 'Empty squash' for empty commits, got '%s'", msg) + } + + // Test single commit + commits = []CommitInfo{ + {Message: "Single commit message"}, + } + msg = squasher.GetCombinedMessage(commits) + if msg != "Single commit message" { + t.Errorf("Expected single message to be preserved, got '%s'", msg) + } + + // Test multiple commits + commits = []CommitInfo{ + {Message: "First commit"}, + {Message: "Second commit"}, + {Message: "Third commit"}, + } + msg = squasher.GetCombinedMessage(commits) + expected := "Squashed 3 commits:\n\nFirst commit\nSecond commit\nThird commit" + if msg != expected { + t.Errorf("Expected combined message:\n%s\n\nGot:\n%s", expected, msg) + } + + // Test multi-line commits (should only use first line) + commits = []CommitInfo{ + {Message: "First line\nSecond line\nThird line"}, + {Message: "Another commit\nWith details"}, + } + msg = squasher.GetCombinedMessage(commits) + expected = "Squashed 2 commits:\n\nFirst line\nAnother commit" + if msg != expected { + t.Errorf("Expected first lines only:\n%s\n\nGot:\n%s", expected, msg) + } +} + +func TestGetParentOfStart(t *testing.T) { + casStore := cas.NewMemoryCAS() + mmr := history.NewMMR() + builder := commit.NewCommitBuilder(casStore, mmr) + squasher := NewCommitSquasher(casStore, builder) + + files := []wsindex.FileMetadata{ + { + Path: "test.txt", + FileRef: filechunk.NodeRef{ + Hash: cas.SumB3([]byte("test")), + Kind: filechunk.Leaf, + Size: 4, + }, + ModTime: time.Now(), + Mode: 0644, + Size: 4, + Checksum: cas.SumB3([]byte("test")), + }, + } + + // Create root commit (no parent) + commit1, _ := builder.CreateCommit(files, nil, "author", "author", "Root commit") + hash1 := builder.GetCommitHash(commit1) + + // Create child commit + commit2, _ := builder.CreateCommit(files, []cas.Hash{hash1}, "author", "author", "Child commit") + hash2 := builder.GetCommitHash(commit2) + + // Test root commit (should return zero hash) + parent, err := squasher.GetParentOfStart(hash1) + if err != nil { + t.Fatalf("Failed to get parent of root: %v", err) + } + if parent != (cas.Hash{}) { + t.Error("Expected zero hash for root commit parent") + } + + // Test child commit (should return hash1) + parent, err = squasher.GetParentOfStart(hash2) + if err != nil { + t.Fatalf("Failed to get parent of child: %v", err) + } + if parent != hash1 { + t.Error("Expected parent to be hash1") + } +} From 8b612b2fe810fa22d4f619bbd65c9cf9d8e089d5 Mon Sep 17 00:00:00 2001 From: javanhut Date: Thu, 13 Nov 2025 08:31:39 -0500 Subject: [PATCH 4/4] Updated docs --- README.md | 43 +++++++++++++++++++++++--- docs/commands/index.md | 24 +++++++++++++++ docs/comparison.md | 53 +++++++++++++++++++++++++++----- docs/getting-started.md | 56 ++++++++++++++++++++++++++++++++++ docs/index.md | 51 +++++++++++++++++++++++++++++-- internal/butterfly/metadata.go | 11 ------- 6 files changed, 212 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index eb1888b..86479e2 100644 --- a/README.md +++ b/README.md @@ -22,10 +22,13 @@ https://github.com/user-attachments/assets/128a8407-c5e5-4115-a3cb-24c2f48a71dd ## Features - **Timeline-Based Branching**: Intuitive branch management with auto-shelving +- **Butterfly Timelines**: Experimental sandboxes with bidirectional sync for safe development +- **Interactive Commit Squashing**: Clean up history with arrow-key navigation before pushing - **Modern Cryptography**: BLAKE3 hashing for security and performance - **Content-Addressable Storage**: Efficient deduplication and storage - **GitHub Integration**: Seamless clone, push, and pull operations - **Auto-Shelving**: Never lose work when switching branches +- **Submodule Support**: Automatic Git submodule detection, conversion, and dual-hash tracking - **Selective Sync**: Download only the branches you need - **Merkle Mountain Range**: Append-only commit history with cryptographic proofs @@ -141,6 +144,14 @@ ivaldi seals list # Create and switch timeline (branch) ivaldi timeline create feature-auth ivaldi timeline switch main + +# Create experimental timeline (butterfly) +ivaldi timeline butterfly experiment +# Make changes, test safely +ivaldi timeline butterfly up # Merge to parent when ready + +# Clean up commit history before pushing +ivaldi shift --last 5 # Interactive squash ``` ### GitHub Integration @@ -155,7 +166,7 @@ ivaldi auth status # Connect to GitHub repository ivaldi portal add owner/repo -# Clone from GitHub +# Clone from GitHub (auto-converts submodules!) ivaldi download owner/awesome-project # Discover remote branches @@ -166,6 +177,9 @@ ivaldi harvest feature-auth bugfix-db # Push changes ivaldi upload + +# Force push after squashing +ivaldi upload --force ``` ## Core Concepts @@ -174,6 +188,7 @@ ivaldi upload Timelines are Ivaldi's equivalent to Git branches, but with enhanced features: - **Auto-shelving**: Uncommitted changes are automatically preserved when switching - **Workspace isolation**: Each timeline maintains its own workspace state +- **Butterfly timelines**: Experimental sandboxes with bidirectional sync (up/down) - **Efficient storage**: Shared content between timelines via content-addressable storage ### Gather and Seal @@ -182,11 +197,23 @@ Ivaldi uses intuitive command names with enhanced user experience: - `seal`: Create commit with auto-generated human-friendly names (like `git commit`) - `seals`: Manage seals with memorable names like "swift-eagle-flies-high-447abe9b" +### Shift (Commit Squashing) +Clean up your commit history before pushing: +- **Interactive selection**: Use arrow keys to select commit range +- **Safety confirmations**: Multiple steps to prevent accidents +- **Force push warnings**: Clear guidance on rewriting history + ### Scout and Harvest Remote operations are designed for selective collaboration: - `scout`: Discover available remote branches - `harvest`: Download only the branches you need +### Submodules +Automatic Git submodule support: +- **Auto-detection**: Finds and converts Git submodules automatically +- **Dual-hash tracking**: BLAKE3 for Ivaldi, Git SHA-1 for GitHub compatibility +- **Timeline-aware**: Submodules track state per timeline + ## Documentation - **[Documentation Wiki](https://javanhut.github.io/IvaldiVCS)** @@ -336,20 +363,26 @@ You upload the code to github with upload keyword not a push. | Diff | `git diff` | `ivaldi diff` | | Branch | `git branch` | `ivaldi timeline create` | | Switch branch | `git checkout` | `ivaldi timeline switch` | +| Experimental branch | (manual) | `ivaldi timeline butterfly` | | Merge | `git merge` | `ivaldi fuse` | +| Squash commits | `git rebase -i` | `ivaldi shift` | | Clone | `git clone` | `ivaldi download` | | Push | `git push` | `ivaldi upload` | | Fetch | `git fetch` | `ivaldi harvest` | +| Submodules | `git submodule` | (automatic) | ## Advantages over Git 1. **Intuitive Commands**: Clear, descriptive command names 2. **Human-Friendly Seals**: Commits get memorable names like "swift-eagle-flies-high-447abe9b" 3. **Auto-Shelving**: Never lose work when switching branches -4. **Selective Sync**: Download only branches you need -5. **Modern Hashing**: BLAKE3 for better security and performance -6. **Clean Storage**: Content-addressable storage with automatic deduplication -7. **GitHub Integration**: First-class GitHub support built-in +4. **Butterfly Timelines**: Safe experimental sandboxes with easy merge up/down +5. **Interactive Squashing**: Arrow-key commit selection vs text editor rebase +6. **Automatic Submodules**: Git submodules auto-detected and converted +7. **Selective Sync**: Download only branches you need +8. **Modern Hashing**: BLAKE3 for better security and performance +9. **Clean Storage**: Content-addressable storage with automatic deduplication +10. **GitHub Integration**: First-class GitHub support built-in ## Development diff --git a/docs/commands/index.md b/docs/commands/index.md index a612b61..ea7b97e 100644 --- a/docs/commands/index.md +++ b/docs/commands/index.md @@ -20,6 +20,8 @@ Complete reference for all Ivaldi commands. | [diff](diff.md) | Compare changes | `git diff` | | [reset](reset.md) | Unstage or reset | `git reset` | | [timeline](timeline.md) | Manage timelines | `git branch` / `git checkout` | +| [butterfly](butterfly.md) | Experimental timelines | (custom) | +| [shift](shift.md) | Squash commits interactively | `git rebase -i` (squash) | | [travel](travel.md) | Interactive time travel | (interactive `git log` + checkout) | | [fuse](fuse.md) | Merge timelines | `git merge` | | [auth](auth.md) | Authenticate with GitHub | (similar to `gh auth`) | @@ -30,6 +32,7 @@ Complete reference for all Ivaldi commands. | [harvest](harvest.md) | Fetch branches | `git fetch` (data) | | [config](config.md) | Configure settings | `git config` | | [exclude](exclude.md) | Ignore files | (edit `.gitignore`) | +| [submodule](submodule.md) | Manage submodules | `git submodule` (auto) | ## Commands by Category @@ -49,9 +52,11 @@ Complete reference for all Ivaldi commands. - [log](log.md) - View commit history - [diff](diff.md) - Compare file changes - [travel](travel.md) - Interactively browse and navigate history +- [shift](shift.md) - Squash commits for cleaner history ### Timeline Management - [timeline](timeline.md) - Create, switch, list, and remove timelines +- [butterfly](butterfly.md) - Create and manage experimental timelines - [fuse](fuse.md) - Merge timelines together ### Remote Operations @@ -62,6 +67,9 @@ Complete reference for all Ivaldi commands. - [scout](scout.md) - Discover available remote timelines - [harvest](harvest.md) - Download specific remote timelines +### Submodule Management +- [submodule](submodule.md) - Automatic Git submodule conversion and management + ## Command Details Click on any command above to see detailed documentation including: @@ -90,6 +98,22 @@ ivaldi timeline switch main # Switch to main ivaldi fuse feature-name to main # Merge ``` +### Experimental Development with Butterflies +```bash +ivaldi timeline butterfly experiment # Create experimental timeline +# ... make experimental changes ... +ivaldi seal "Try new approach" # Commit changes +ivaldi timeline butterfly up # Merge to parent if successful +# Or: ivaldi timeline butterfly down # Pull parent changes +``` + +### Clean History Before Push +```bash +ivaldi shift --last 5 # Squash last 5 commits +# Enter clean commit message +ivaldi upload # Push clean history +``` + ### Collaboration ```bash ivaldi scout # See remote branches diff --git a/docs/comparison.md b/docs/comparison.md index b348a14..b17a578 100644 --- a/docs/comparison.md +++ b/docs/comparison.md @@ -176,12 +176,13 @@ ivaldi fuse --strategy=theirs feature to main | Branches | Timelines | Enhanced with auto-shelving | | Commits | Seals | Added memorable names | | Tags | Not yet implemented | Coming soon | -| Submodules | Not yet implemented | Coming soon | +| Submodules | Automatic conversion | Auto-detects and converts Git submodules | | Hooks | Not yet implemented | Coming soon | | LFS | Built-in chunking | No separate extension needed | | Reflog | MMR history | Append-only, tamper-proof | | Cherry-pick | Time travel diverge | Interactive | -| Rebase | Time travel | Non-destructive option | +| Rebase (squash) | Shift | Interactive with arrow keys | +| Rebase (interactive) | Time travel | Non-destructive option | | Bisect | Not yet implemented | Coming soon | ### Ivaldi-Only Features @@ -190,12 +191,15 @@ Features Ivaldi has that Git doesn't: 1. **Auto-Shelving**: Automatic preservation of changes when switching 2. **Memorable Seal Names**: Human-friendly commit identifiers -3. **Interactive Time Travel**: Arrow-key navigation through history -4. **Selective Sync**: Download only specific branches -5. **Chunk-Level Merging**: 64KB chunks with BLAKE3 hashing -6. **Clean Conflict Resolution**: No markers in workspace files -7. **Content-Addressable Storage**: Automatic deduplication -8. **Merkle Mountain Range**: Cryptographic commit proofs +3. **Butterfly Timelines**: Experimental sandboxes with bidirectional sync +4. **Interactive Commit Squashing**: Arrow-key selection for clean history +5. **Interactive Time Travel**: Arrow-key navigation through history +6. **Automatic Submodule Conversion**: Seamless Git submodule migration +7. **Selective Sync**: Download only specific branches +8. **Chunk-Level Merging**: 64KB chunks with BLAKE3 hashing +9. **Clean Conflict Resolution**: No markers in workspace files +10. **Content-Addressable Storage**: Automatic deduplication +11. **Merkle Mountain Range**: Cryptographic commit proofs ## Workflow Comparison @@ -245,6 +249,33 @@ ivaldi upload ivaldi timeline remove feature ``` +### Experimental Development + +**Git:** +```bash +git checkout -b experiment +# make multiple WIP commits +git add . +git commit -m "WIP 1" +git commit -m "WIP 2" +git commit -m "WIP 3" +# Clean up with rebase +git rebase -i HEAD~3 # Opens editor, manual squash +git push --force +``` + +**Ivaldi:** +```bash +ivaldi timeline butterfly experiment +# make multiple commits +ivaldi seal "WIP 1" +ivaldi seal "WIP 2" +ivaldi seal "WIP 3" +# Clean up with shift +ivaldi shift --last 3 # Interactive arrow-key selection +ivaldi timeline butterfly up # Merge to parent +``` + ### Hotfix **Git:** @@ -338,7 +369,10 @@ Ivaldi repositories are Git-compatible: - **Intuitive commands**: `forge`, `gather`, `seal` vs `init`, `add`, `commit` - **Memorable names**: `swift-eagle` vs `a1b2c3d` - **Auto-shelving**: No manual stashing +- **Butterfly timelines**: Safe experimental sandboxes with easy sync +- **Interactive squashing**: Arrow-key selection vs text editor - **Clean merges**: No conflict markers in files +- **Auto-submodules**: Automatic Git submodule conversion ### 2. Performance @@ -424,8 +458,11 @@ Ivaldi may be easier to learn: **Ivaldi improves on Git with:** - Auto-shelving timelines +- Butterfly experimental timelines - Memorable seal names +- Interactive commit squashing - Selective sync +- Automatic submodule conversion - Clean conflict resolution - Modern cryptography (BLAKE3) - Interactive time travel diff --git a/docs/getting-started.md b/docs/getting-started.md index 89fe6c1..9678b65 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -168,6 +168,27 @@ ivaldi timeline switch main ivaldi fuse feature-login to main ``` +### Experimental Timelines (Butterflies) + +For safe experimentation without polluting your main timeline: + +```bash +# Create an experimental timeline (butterfly) +ivaldi timeline butterfly experiment + +# Make experimental changes +ivaldi gather . +ivaldi seal "Try new approach" + +# If successful, merge up to parent +ivaldi timeline butterfly up + +# Or pull parent changes down +ivaldi timeline butterfly down +``` + +Butterflies are perfect for trying out ideas without committing to a full feature branch. + ## GitHub Integration ### Connect to GitHub @@ -213,6 +234,39 @@ ivaldi harvest feature-payments ivaldi harvest ``` +## Advanced Workflows + +### Clean Up Commit History + +Before pushing to GitHub, you can squash multiple commits into one: + +```bash +# Squash last 3 commits interactively +ivaldi shift --last 3 + +# Or use interactive selection +ivaldi shift +# Use arrow keys to select range + +# After squashing, force push +ivaldi upload --force +``` + +### Working with Submodules + +Ivaldi automatically handles Git submodules: + +```bash +# Clone a repository with submodules +ivaldi download owner/repo-with-submodules +# Submodules are automatically detected and converted! + +# Or convert existing Git repo with submodules +cd git-repo-with-submodules +ivaldi forge +# All submodules automatically initialized and converted +``` + ## Essential Commands Summary | Command | Purpose | Example | @@ -224,7 +278,9 @@ ivaldi harvest | `ivaldi whereami` | Current position | `ivaldi whereami` | | `ivaldi log` | View history | `ivaldi log --limit 10` | | `ivaldi timeline create` | New timeline | `ivaldi timeline create feature-x` | +| `ivaldi timeline butterfly` | Experimental timeline | `ivaldi timeline butterfly experiment` | | `ivaldi timeline switch` | Change timeline | `ivaldi timeline switch main` | +| `ivaldi shift` | Squash commits | `ivaldi shift --last 3` | | `ivaldi fuse` | Merge timelines | `ivaldi fuse feature to main` | | `ivaldi portal add` | Connect GitHub | `ivaldi portal add owner/repo` | | `ivaldi upload` | Push to GitHub | `ivaldi upload` | diff --git a/docs/index.md b/docs/index.md index f3761d7..d659420 100644 --- a/docs/index.md +++ b/docs/index.md @@ -13,11 +13,14 @@ Ivaldi is a next-generation version control system designed as a Git alternative - **Intuitive Commands**: Clear, descriptive command names (`forge`, `gather`, `seal`) - **Timeline-Based Branching**: Enhanced branch management with auto-shelving +- **Butterfly Timelines**: Experimental sandboxes with bidirectional sync and auto-conflict resolution - **Human-Friendly Commits**: Memorable seal names like "swift-eagle-flies-high-447abe9b" - **Never Lose Work**: Auto-shelving preserves changes when switching timelines +- **Clean History**: Interactive commit squashing with safety confirmations - **Selective Sync**: Download only the branches you need - **Modern Cryptography**: BLAKE3 hashing for security and performance - **GitHub Integration**: First-class GitHub support built-in +- **Submodule Support**: Automatic Git submodule conversion with dual-hash tracking - **Interactive Time Travel**: Browse and navigate commits with arrow keys ## Quick Example @@ -31,6 +34,15 @@ ivaldi gather README.md ivaldi seal "Initial commit" # Created seal: swift-eagle-flies-high-447abe9b +# Create experimental timeline (butterfly) +ivaldi timeline butterfly experiment +# Make changes, test safely +ivaldi timeline butterfly up # Merge back to parent + +# Clean up commits before pushing +ivaldi shift --last 3 +# Squash into one clean commit + # Create and switch timelines ivaldi timeline create feature-auth ivaldi timeline switch main @@ -51,15 +63,17 @@ ivaldi upload - [All Commands Overview](commands/index.md) - **Repository**: [forge](commands/forge.md) • [status](commands/status.md) • [whereami](commands/whereami.md) • [config](commands/config.md) - **Files**: [gather](commands/gather.md) • [seal](commands/seal.md) • [reset](commands/reset.md) • [exclude](commands/exclude.md) -- **History**: [log](commands/log.md) • [diff](commands/diff.md) • [travel](commands/travel.md) -- **Timelines**: [timeline](commands/timeline.md) • [fuse](commands/fuse.md) +- **History**: [log](commands/log.md) • [diff](commands/diff.md) • [travel](commands/travel.md) • [shift](commands/shift.md) +- **Timelines**: [timeline](commands/timeline.md) • [butterfly](commands/butterfly.md) • [fuse](commands/fuse.md) - **Remote**: [portal](commands/portal.md) • [download](commands/download.md) • [upload](commands/upload.md) • [sync](sync-command.md) • [scout](commands/scout.md) • [harvest](commands/harvest.md) +- **Submodules**: [submodule](commands/submodule.md) ### Guides - [Basic Workflow](guides/basic-workflow.md) - [Timeline Branching](guides/branching.md) - [Team Collaboration](guides/collaboration.md) - [GitHub Integration](guides/github-integration.md) +- [Git Migration with Submodules](guides/git-migration-with-submodules.md) ### Reference - [Comparison with Git](comparison.md) @@ -73,6 +87,18 @@ Timelines are Ivaldi's enhanced version of Git branches: - **Workspace isolation**: Each timeline maintains its own state - **Efficient storage**: Shared content via content-addressable storage +### Butterfly Timelines +Experimental sandboxes for safe development: +- **Lightweight branching**: Create experimental timelines from any parent +- **Bidirectional sync**: Merge changes up to parent or pull parent changes down +- **Nested butterflies**: Chain butterflies for complex feature development +- **Auto-conflict resolution**: Fast-forward merge strategy handles conflicts automatically +```bash +ivaldi timeline butterfly experiment +# Work safely in isolated sandbox +ivaldi timeline butterfly up # Merge to parent when ready +``` + ### Human-Friendly Seal Names Every commit gets a memorable name: ``` @@ -80,6 +106,15 @@ swift-eagle-flies-high-447abe9b ``` Much easier to remember than `447abe9b1234567890abcdef`! +### Interactive Commit Squashing +Clean up your history before pushing: +```bash +ivaldi shift --last 5 +# Interactive arrow-key selection +# Safe multi-step confirmation +# Clean single commit result +``` + ### Interactive Time Travel Browse commits with arrow keys and create branches from any point: ```bash @@ -96,6 +131,16 @@ ivaldi fuse feature-auth to main # Clean interactive resolution ``` +### Automatic Submodule Support +Seamless Git submodule conversion: +- **Auto-detection**: Automatically finds and converts Git submodules +- **Dual-hash tracking**: BLAKE3 for Ivaldi, Git SHA-1 for GitHub compatibility +- **Timeline-aware**: Submodules track per-timeline state +```bash +ivaldi download owner/repo-with-submodules +# Automatically clones and converts all submodules +``` + ## Quick Reference | Git Command | Ivaldi Command | @@ -106,12 +151,14 @@ ivaldi fuse feature-auth to main | `git branch` | `ivaldi timeline create` | | `git checkout` | `ivaldi timeline switch` | | `git merge` | `ivaldi fuse` | +| `git rebase -i` | `ivaldi shift` | | `git clone` | `ivaldi download` | | `git push` | `ivaldi upload` | | `git pull` | `ivaldi sync` | | `git fetch` | `ivaldi harvest` | | `git status` | `ivaldi status` | | `git log` | `ivaldi log` | +| `git submodule` | `ivaldi submodule` (auto) | ## Get Started diff --git a/internal/butterfly/metadata.go b/internal/butterfly/metadata.go index ad14dad..d07ada9 100644 --- a/internal/butterfly/metadata.go +++ b/internal/butterfly/metadata.go @@ -5,7 +5,6 @@ import ( "fmt" "path/filepath" - "github.com/javanhut/Ivaldi-vcs/internal/cas" bolt "go.etcd.io/bbolt" ) @@ -190,13 +189,3 @@ func (s *MetadataStore) MarkOrphaned(name string, originalParent string) error { bf.OriginalParent = originalParent return s.StoreButterfly(bf) } - -func hashToBytes(h cas.Hash) []byte { - return h[:] -} - -func bytesToHash(b []byte) cas.Hash { - var h cas.Hash - copy(h[:], b) - return h -}