From 3e2c21b52c97196c8dae52465ad4b3e18dad5d3f Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 15:00:05 +0000 Subject: [PATCH 01/20] docs: add Bruno import & YAML folder sync implementation plan Comprehensive plan covering: - Phase 1-3: Bruno .bru format parser and CLI import command - Phase 4: DevTools-native YAML collection format for folder sync - Phase 5-6: fsnotify-based file watcher with bidirectional sync - Phase 7: RPC endpoints and desktop integration - Phase 8: CLI collection runner Adapted to fit existing DevTools architecture (translate layer, Reader/Writer services, eventstream, mhttp/mfile models, ioworkspace bundle pattern). https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../server/docs/specs/BRUNO_FOLDER_SYNC.md | 1048 +++++++++++++++++ 1 file changed, 1048 insertions(+) create mode 100644 packages/server/docs/specs/BRUNO_FOLDER_SYNC.md diff --git a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md new file mode 100644 index 00000000..c40bfa48 --- /dev/null +++ b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md @@ -0,0 +1,1048 @@ +# Bruno Import & YAML Folder Sync Plan + +## Overview + +This document describes the plan to add: + +1. **Bruno Import** — Parse `.bru` collections and convert them into DevTools' data model +2. **DevTools YAML Folder Sync** — Bidirectional filesystem sync using DevTools' own YAML format, inspired by Bruno's folder sync architecture +3. **CLI Runner Integration** — Execute imported/synced collections from the CLI + +The goal is to support Bruno users migrating to DevTools, while building a first-class filesystem-based workflow using DevTools' own YAML format for git-friendly, local-first API development. + +--- + +## Part 1: Bruno Import + +### 1.1 Architecture Fit + +Bruno import follows the exact same pattern as the existing HAR, Postman, and curl importers: + +``` +.bru files on disk + → tbruno.ConvertBrunoCollection() + → BrunoResolved (mhttp.HTTP, mfile.File, etc.) + → importer.RunImport() + services.Create() + → SQLite DB +``` + +| Layer | Location | Pattern | +|-------|----------|---------| +| CLI Command | `apps/cli/cmd/import.go` | Add `importBrunoCmd` | +| Translator | `packages/server/pkg/translate/tbruno/` | New package | +| BRU Parser | `packages/server/pkg/translate/tbruno/bruparser/` | Hand-written recursive descent | +| Importer | `apps/cli/internal/importer/` | Existing `RunImport()` callback | + +### 1.2 BRU Parser (`tbruno/bruparser/`) + +The `.bru` format has three block types that need a hand-written parser (no external dependency needed): + +**Dictionary blocks** (key-value): +```bru +headers { + content-type: application/json + ~disabled-header: value +} +``` + +**Text blocks** (freeform content): +```bru +body:json { + { + "key": "value" + } +} +``` + +**List blocks** (arrays inside dictionary blocks): +```bru +meta { + tags: [ + regression + smoke + ] +} +``` + +#### Files + +``` +packages/server/pkg/translate/tbruno/bruparser/ +├── parser.go # .bru → BruFile struct (recursive descent) +├── serializer.go # BruFile → .bru string (for round-trip tests) +├── types.go # BruFile, Block, KeyValue types +└── parser_test.go # Test with real .bru samples +``` + +#### Core Types + +```go +package bruparser + +// BruFile represents a fully parsed .bru file +type BruFile struct { + Meta Meta + HTTP *HTTPBlock // get/post/put/delete/patch/options/head/connect/trace block + GraphQL *GraphQLBlock + Params *ParamsBlock // params:query, params:path + Headers []KeyValue + Auth *AuthBlock // auth:bearer, auth:basic, auth:apikey, etc. + Body *BodyBlock // body:json, body:xml, body:text, body:form-urlencoded, body:multipart-form, body:graphql + Script *ScriptBlock // script:pre-request, script:post-response + Tests string // freeform JS + Vars *VarsBlock // vars:pre-request, vars:post-response + Assertions []AssertEntry // assert block + Docs string // docs block +} + +type Meta struct { + Name string + Type string // "http", "graphql", "grpc" + Seq int + Tags []string +} + +type KeyValue struct { + Key string + Value string + Enabled bool // false if prefixed with ~ +} + +type AssertEntry struct { + Expression string // e.g. "res.status eq 200" + Enabled bool +} +``` + +#### Parser Implementation Notes + +- **Line-based parsing**: Read line by line, detect block openings (`blockname {`), track nesting depth +- **Dictionary vs Text blocks**: Dictionary blocks have `key: value` lines; text blocks (body, script, tests, docs) have freeform content with 2-space indent +- **Disabled items**: `~` prefix means disabled (`Enabled: false`) +- **Multiline values**: `'''...'''` syntax with optional `@contentType()` annotation +- **Quoted keys**: Keys can be quoted with single quotes for special characters +- **Assert delimiter**: Assert keys use `: ` (space after colon) as the delimiter since keys can contain colons + +### 1.3 Bruno Collection Reader (`tbruno/`) + +Reads the full Bruno collection directory structure and converts to DevTools models. + +#### Files + +``` +packages/server/pkg/translate/tbruno/ +├── bruparser/ # .bru parser (above) +├── converter.go # Main conversion: directory → BrunoResolved +├── converter_test.go # Tests with sample collections +├── types.go # BrunoResolved, ConvertOptions +├── collection.go # bruno.json / opencollection.yml detection + parsing +├── environment.go # Environment .bru/.yml → menv conversion +└── testdata/ # Sample Bruno collections for tests + ├── bru-format/ + │ ├── bruno.json + │ ├── collection.bru + │ ├── environments/ + │ │ ├── dev.bru + │ │ └── prod.bru + │ ├── users/ + │ │ ├── folder.bru + │ │ ├── get-users.bru + │ │ └── create-user.bru + │ └── auth/ + │ ├── folder.bru + │ └── login.bru + └── yml-format/ + ├── opencollection.yml + └── ... +``` + +#### Core Types + +```go +package tbruno + +// BrunoResolved contains all entities extracted from a Bruno collection, +// following the same pattern as tpostmanv2.PostmanResolvedV2 and harv2.HARResolved. +type BrunoResolved struct { + // HTTP entities + HTTPRequests []mhttp.HTTP + HTTPHeaders []mhttp.HTTPHeader + HTTPSearchParams []mhttp.HTTPSearchParam + HTTPBodyForms []mhttp.HTTPBodyForm + HTTPBodyUrlencoded []mhttp.HTTPBodyUrlencoded + HTTPBodyRaw []mhttp.HTTPBodyRaw + HTTPAsserts []mhttp.HTTPAssert + + // File hierarchy (folders + request files) + Files []mfile.File + + // Environments + Environments []menv.Env + EnvironmentVars []menv.Variable + + // Flow (optional — one flow with sequential request nodes) + Flow *mflow.Flow + FlowNodes []mflow.Node + RequestNodes []mflow.NodeRequest + FlowEdges []mflow.Edge +} + +type ConvertOptions struct { + WorkspaceID idwrap.IDWrap + FolderID *idwrap.IDWrap // Optional parent folder to import into + CollectionName string // Override collection name + CreateFlow bool // Whether to generate a flow from the collection + GenerateFiles bool // Whether to create File entries for hierarchy +} + +// ConvertBrunoCollection reads a Bruno collection directory and returns resolved entities. +func ConvertBrunoCollection(collectionPath string, opts ConvertOptions) (*BrunoResolved, error) +``` + +#### Conversion Logic + +1. **Detect format**: Check for `bruno.json` (BRU format) or `opencollection.yml` (YML format) +2. **Parse config**: Read collection name, version, ignore patterns from config file +3. **Walk directory tree** (depth-first): + - Skip ignored paths (`node_modules`, `.git`, dotenv files) + - For each directory: create `mfile.File` with `ContentTypeFolder` + - For each `.bru` file: parse with `bruparser.Parse()`, convert to `mhttp.HTTP` + children + - For `folder.bru`: extract folder-level metadata (auth, headers) — apply as defaults to child requests + - For `collection.bru`: extract collection-level defaults + - For `environments/*.bru`: convert to `menv.Env` + `menv.Variable` +4. **Map to DevTools models**: + - BRU `meta.name` → `mhttp.HTTP.Name` + - BRU method block (`get`, `post`, etc.) → `mhttp.HTTP.Method` + `mhttp.HTTP.Url` + - BRU `headers {}` → `[]mhttp.HTTPHeader` + - BRU `params:query {}` → `[]mhttp.HTTPSearchParam` + - BRU `body:json {}` → `mhttp.HTTPBodyRaw` with `BodyKind = Raw` + - BRU `body:form-urlencoded {}` → `[]mhttp.HTTPBodyUrlencoded` + - BRU `body:multipart-form {}` → `[]mhttp.HTTPBodyForm` + - BRU `assert {}` → `[]mhttp.HTTPAssert` + - BRU `meta.seq` → `mfile.File.Order` + - Directory nesting → `mfile.File.ParentID` hierarchy +5. **Generate flow** (optional): Create a linear flow with request nodes ordered by `seq` + +#### Mapping Table: Bruno → DevTools + +| Bruno (.bru) | DevTools Model | Notes | +|---|---|---| +| `meta.name` | `mhttp.HTTP.Name` | | +| `meta.seq` | `mfile.File.Order` | Float64 ordering | +| `meta.type` | Determines request type | "http", "graphql" | +| `get/post/put/...` block | `mhttp.HTTP.Method` | Block name = method | +| `url` in method block | `mhttp.HTTP.Url` | | +| `body` in method block | `mhttp.HTTP.BodyKind` | "json"→Raw, "form"→FormData, etc. | +| `headers {}` | `[]mhttp.HTTPHeader` | `~` prefix → `Enabled: false` | +| `params:query {}` | `[]mhttp.HTTPSearchParam` | `~` prefix → `Enabled: false` | +| `params:path {}` | Embedded in URL | DevTools uses URL template vars | +| `body:json {}` | `mhttp.HTTPBodyRaw` | `BodyKind: Raw` | +| `body:xml {}` | `mhttp.HTTPBodyRaw` | `BodyKind: Raw` | +| `body:text {}` | `mhttp.HTTPBodyRaw` | `BodyKind: Raw` | +| `body:form-urlencoded {}` | `[]mhttp.HTTPBodyUrlencoded` | | +| `body:multipart-form {}` | `[]mhttp.HTTPBodyForm` | | +| `auth:bearer` | `mhttp.HTTPHeader` | Converted to Authorization header | +| `auth:basic` | `mhttp.HTTPHeader` | Converted to Authorization header | +| `auth:apikey` | `mhttp.HTTPHeader` | Key/value in header or query param | +| `assert {}` | `[]mhttp.HTTPAssert` | Expression syntax may differ | +| `script:pre-request {}` | Not imported (log warning) | DevTools uses JS nodes in flows | +| `script:post-response {}` | Not imported (log warning) | DevTools uses JS nodes in flows | +| `tests {}` | Not imported (log warning) | DevTools uses assert system | +| `vars:pre-request {}` | Not imported (log warning) | DevTools uses flow variables | +| `docs {}` | `mhttp.HTTP.Description` | | +| Directory structure | `mfile.File` hierarchy | Folder nesting preserved | +| `folder.bru` | Folder metadata | Auth/headers applied to children | +| `environments/*.bru` | `menv.Env` + `menv.Variable` | | +| `bruno.json` | Collection config | Name used as root folder name | + +### 1.4 CLI Command + +Add `import bruno ` to the existing import command tree: + +```go +// apps/cli/cmd/import.go — add alongside importCurlCmd, importPostmanCmd, importHarCmd + +var importBrunoCmd = &cobra.Command{ + Use: "bruno [directory]", + Short: "Import a Bruno collection directory", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + collectionPath := args[0] + return importer.RunImport(ctx, logger, workspaceID, folderID, + func(ctx context.Context, services *common.Services, wsID idwrap.IDWrap, folderIDPtr *idwrap.IDWrap) error { + resolved, err := tbruno.ConvertBrunoCollection(collectionPath, tbruno.ConvertOptions{ + WorkspaceID: wsID, + FolderID: folderIDPtr, + GenerateFiles: true, + CreateFlow: false, + }) + if err != nil { + return err + } + // Save all resolved entities via services (same pattern as Postman/HAR) + return saveResolved(ctx, services, resolved) + }, + ) + }, +} +``` + +### 1.5 Testing Strategy + +- **Parser unit tests**: Port/create test cases from real `.bru` files — parse, verify struct output +- **Round-trip tests**: Parse `.bru` → serialize back → parse again, verify equality +- **Converter tests**: Use `testdata/` sample collections → verify `BrunoResolved` output +- **Integration test**: Import sample collection into in-memory SQLite → verify all entities created correctly + +--- + +## Part 2: DevTools YAML Folder Sync + +This is the core feature — a bidirectional filesystem sync that uses DevTools' own YAML format, inspired by Bruno's chokidar-based sync but built for Go. + +### 2.1 Design Goals + +1. **Git-friendly**: YAML files that diff and merge cleanly +2. **Human-readable**: Developers can edit requests in their editor/IDE +3. **Bidirectional**: Changes in the UI persist to disk; changes on disk reflect in the UI +4. **Compatible**: Similar UX to Bruno's folder sync (users understand the concept) +5. **DevTools-native**: Uses DevTools' own YAML format, not `.bru` format + +### 2.2 DevTools Collection YAML Format + +Inspired by `yamlflowsimplev2` but simplified for individual request files (not full flows). + +#### Directory Structure + +``` +my-collection/ +├── devtools.yaml # Collection config (name, version, settings) +├── environments/ +│ ├── dev.yaml +│ └── prod.yaml +├── users/ +│ ├── _folder.yaml # Folder metadata (optional) +│ ├── get-users.yaml # Individual request +│ └── create-user.yaml +├── auth/ +│ ├── _folder.yaml +│ └── login.yaml +└── flows/ # Optional: flow definitions + └── smoke-test.yaml # Uses yamlflowsimplev2 format +``` + +#### Collection Config (`devtools.yaml`) + +```yaml +version: "1" +name: My API Collection +settings: + base_url: "https://api.example.com" + timeout: 30 +``` + +#### Request File (`get-users.yaml`) + +```yaml +name: Get Users +method: GET +url: "{{base_url}}/users" +description: "Fetch all users with optional pagination" + +headers: + - name: Authorization + value: "Bearer {{token}}" + - name: Accept + value: application/json + - name: X-Debug + value: "true" + enabled: false + +query_params: + - name: page + value: "1" + - name: limit + value: "10" + enabled: false + +body: + type: none # none | raw | form-data | urlencoded +``` + +#### Request with JSON body (`create-user.yaml`) + +```yaml +name: Create User +method: POST +url: "{{base_url}}/users" + +headers: + - name: Content-Type + value: application/json + +body: + type: raw + content: | + { + "name": "John Doe", + "email": "john@example.com" + } + +assertions: + - "res.status eq 201" + - "res.body.id neq null" +``` + +#### Request with form body (`upload.yaml`) + +```yaml +name: Upload File +method: POST +url: "{{base_url}}/upload" + +body: + type: form-data + fields: + - name: file + value: "@./fixtures/test.png" + description: "File to upload" + - name: description + value: "Test upload" +``` + +#### Environment (`environments/dev.yaml`) + +```yaml +name: Development +variables: + - name: base_url + value: "http://localhost:3000" + - name: token + value: "dev-token-123" + secret: true +``` + +#### Folder metadata (`_folder.yaml`) + +```yaml +name: Users API +order: 1 +description: "User management endpoints" + +# Folder-level defaults (applied to all requests in this folder) +defaults: + headers: + - name: X-Api-Version + value: "v2" +``` + +#### Flow file (`flows/smoke-test.yaml`) + +```yaml +# Uses existing yamlflowsimplev2 format exactly +name: Smoke Test +steps: + - request: + name: Login + method: POST + url: "{{base_url}}/auth/login" + body: + type: raw + content: '{"email": "test@example.com", "password": "test"}' + - request: + name: Get Profile + depends_on: [Login] + method: GET + url: "{{base_url}}/users/me" + headers: + Authorization: "Bearer {{Login.response.body.token}}" +``` + +### 2.3 YAML Serializer/Deserializer (`tyamlcollection/`) + +New package for the collection YAML format: + +``` +packages/server/pkg/translate/tyamlcollection/ +├── types.go # YAML struct definitions with yaml tags +├── parser.go # YAML file → DevTools models +├── serializer.go # DevTools models → YAML files +├── collection.go # devtools.yaml config handling +├── request.go # Single request YAML ↔ mhttp conversion +├── environment.go # Environment YAML ↔ menv conversion +├── folder.go # _folder.yaml ↔ folder metadata +└── parser_test.go # Round-trip tests +``` + +#### Core Types + +```go +package tyamlcollection + +// CollectionConfig represents devtools.yaml +type CollectionConfig struct { + Version string `yaml:"version"` + Name string `yaml:"name"` + Settings *CollectionSettings `yaml:"settings,omitempty"` +} + +type CollectionSettings struct { + BaseURL string `yaml:"base_url,omitempty"` + Timeout int `yaml:"timeout,omitempty"` +} + +// RequestFile represents a single .yaml request file +type RequestFile struct { + Name string `yaml:"name"` + Method string `yaml:"method"` + URL string `yaml:"url"` + Description string `yaml:"description,omitempty"` + Headers []HeaderEntry `yaml:"headers,omitempty"` + QueryParams []HeaderEntry `yaml:"query_params,omitempty"` + Body *BodyDef `yaml:"body,omitempty"` + Assertions []AssertionEntry `yaml:"assertions,omitempty"` +} + +type HeaderEntry struct { + Name string `yaml:"name"` + Value string `yaml:"value"` + Enabled *bool `yaml:"enabled,omitempty"` // Default: true + Description string `yaml:"description,omitempty"` +} + +type BodyDef struct { + Type string `yaml:"type"` // "none", "raw", "form-data", "urlencoded" + Content string `yaml:"content,omitempty"` // For raw bodies + Fields []HeaderEntry `yaml:"fields,omitempty"` // For form-data / urlencoded +} + +type AssertionEntry struct { + Value string `yaml:"value,omitempty"` + Enabled *bool `yaml:"enabled,omitempty"` + Description string `yaml:"description,omitempty"` +} + +// Simplified assertion: can be just a string +// Custom unmarshaler handles both "res.status eq 200" and {value: ..., enabled: false} + +type EnvironmentFile struct { + Name string `yaml:"name"` + Variables []EnvVariable `yaml:"variables"` +} + +type EnvVariable struct { + Name string `yaml:"name"` + Value string `yaml:"value"` + Secret bool `yaml:"secret,omitempty"` +} + +type FolderMeta struct { + Name string `yaml:"name,omitempty"` + Order int `yaml:"order,omitempty"` + Description string `yaml:"description,omitempty"` + Defaults *FolderDefaults `yaml:"defaults,omitempty"` +} + +type FolderDefaults struct { + Headers []HeaderEntry `yaml:"headers,omitempty"` +} +``` + +#### Conversion Functions + +```go +// ParseRequestFile reads a .yaml request file and returns DevTools model entities. +func ParseRequestFile(data []byte, opts ParseOptions) (*RequestResolved, error) + +// SerializeRequest converts DevTools model entities to YAML bytes for a single request. +func SerializeRequest(http mhttp.HTTP, headers []mhttp.HTTPHeader, + params []mhttp.HTTPSearchParam, bodyRaw *mhttp.HTTPBodyRaw, + bodyForms []mhttp.HTTPBodyForm, bodyUrlencoded []mhttp.HTTPBodyUrlencoded, + asserts []mhttp.HTTPAssert) ([]byte, error) + +// ReadCollection reads a full collection directory into DevTools models. +func ReadCollection(collectionPath string, opts ConvertOptions) (*CollectionResolved, error) + +// WriteCollection writes a full workspace/collection to disk as YAML files. +func WriteCollection(collectionPath string, bundle *ioworkspace.WorkspaceBundle) error + +// WriteRequest writes a single request to a YAML file on disk. +func WriteRequest(filePath string, http mhttp.HTTP, children RequestChildren) error +``` + +### 2.4 File Watcher (`packages/server/pkg/foldersync/`) + +Replaces Bruno's chokidar with Go's `fsnotify`. This is a new package in the server. + +``` +packages/server/pkg/foldersync/ +├── watcher.go # Core CollectionWatcher using fsnotify +├── debouncer.go # Write stabilization (coalesce rapid events) +├── filter.go # Ignore patterns (.git, node_modules, etc.) +├── sync.go # Bidirectional sync coordinator +├── types.go # Event types, config +└── watcher_test.go # Integration tests with temp directories +``` + +#### Watcher + +```go +package foldersync + +import "github.com/fsnotify/fsnotify" + +type EventType int +const ( + EventFileCreated EventType = iota + EventFileChanged + EventFileDeleted + EventDirCreated + EventDirDeleted +) + +type WatchEvent struct { + Type EventType + Path string // Absolute path + RelPath string // Relative to collection root +} + +// CollectionWatcher watches a collection directory for filesystem changes. +type CollectionWatcher struct { + collectionPath string + ignorePatterns []string + watcher *fsnotify.Watcher + events chan WatchEvent + debouncer *Debouncer + selfWrites *SelfWriteTracker // Suppress events from our own writes +} + +func NewCollectionWatcher(collectionPath string, opts WatcherOptions) (*CollectionWatcher, error) +func (w *CollectionWatcher) Start(ctx context.Context) error +func (w *CollectionWatcher) Events() <-chan WatchEvent +func (w *CollectionWatcher) Stop() error +``` + +#### Debouncer + +```go +// Debouncer coalesces rapid filesystem events for the same path. +// fsnotify fires multiple events for a single write operation. +// We wait for stabilityThreshold (80ms) of no events before emitting. +type Debouncer struct { + stabilityThreshold time.Duration // 80ms (matching Bruno) + timers map[string]*time.Timer + mu sync.Mutex + output chan WatchEvent +} + +func NewDebouncer(threshold time.Duration) *Debouncer +func (d *Debouncer) Add(event WatchEvent) +func (d *Debouncer) Events() <-chan WatchEvent +``` + +#### Self-Write Tracker + +```go +// SelfWriteTracker prevents infinite loops when the sync engine writes a file +// and the watcher detects the change. +type SelfWriteTracker struct { + mu sync.Mutex + writes map[string]time.Time // path → write timestamp + lifetime time.Duration // How long to suppress (e.g., 2s) +} + +func (t *SelfWriteTracker) MarkWrite(path string) +func (t *SelfWriteTracker) IsSelfWrite(path string) bool +``` + +#### Key Implementation Notes + +- **Recursive watching**: `fsnotify` doesn't watch subdirectories automatically. Walk the tree on start and add watchers for new directories on `EventDirCreated` +- **Initial scan**: On start, walk the tree and emit `EventFileCreated` for all existing `.yaml` files (like chokidar's `ignoreInitial: false`) +- **Ignore patterns**: Filter `.git`, `node_modules`, dotfiles, non-`.yaml` files +- **WSL compatibility**: Detect WSL paths and use polling mode if needed +- **Max depth**: Limit to 20 levels (matching Bruno) + +### 2.5 Sync Coordinator (`foldersync/sync.go`) + +The central orchestrator that bridges the filesystem watcher with the DevTools database/services. + +```go +// SyncCoordinator manages bidirectional sync between filesystem and database. +type SyncCoordinator struct { + collectionPath string + workspaceID idwrap.IDWrap + format tyamlcollection.CollectionConfig + + // Filesystem → Database + watcher *CollectionWatcher + parser *tyamlcollection.Parser + + // Database → Filesystem + selfWrites *SelfWriteTracker + serializer *tyamlcollection.Serializer + autosaver *AutoSaver + + // State mapping + mu sync.RWMutex + pathToID map[string]idwrap.IDWrap // filepath → entity ID (UID preservation) + idToPath map[idwrap.IDWrap]string // entity ID → filepath (reverse mapping) + + // Services + services *common.Services + + // Event publishing for real-time sync to UI + publisher *eventstream.Publisher +} + +func NewSyncCoordinator(opts SyncOptions) (*SyncCoordinator, error) +func (s *SyncCoordinator) Start(ctx context.Context) error +func (s *SyncCoordinator) Stop() error +``` + +#### Disk → Database Flow + +``` +Filesystem event (watcher) + → Debounce (80ms stabilization) + → Check self-write tracker (skip if we wrote it) + → Parse YAML file + → Look up existing entity by path→ID mapping + → If new file: Create HTTP + File + children via services + → If changed file: Update HTTP + children via services + → If deleted file: Delete HTTP + File via services + → Publish events to eventstream (UI updates in real-time) +``` + +#### Database → Disk Flow + +``` +User edits in UI + → RPC handler persists to database + → Eventstream publishes change event + → SyncCoordinator receives event via subscription + → AutoSaver debounces (500ms, matching Bruno) + → Serialize entity to YAML + → Mark path in self-write tracker + → Write YAML file to disk + → Watcher detects change → self-write tracker suppresses +``` + +#### AutoSaver + +```go +// AutoSaver handles debounced persistence from database changes to disk. +// Matches Bruno's 500ms debounce behavior. +type AutoSaver struct { + delay time.Duration // 500ms + timers map[idwrap.IDWrap]*time.Timer + mu sync.Mutex + writeFn func(entityID idwrap.IDWrap) error +} + +func (a *AutoSaver) ScheduleSave(entityID idwrap.IDWrap) +func (a *AutoSaver) Flush() // Force all pending saves (for graceful shutdown) +``` + +### 2.6 Integration with Existing Architecture + +#### RPC Layer Integration + +New RPC endpoints for folder sync management: + +``` +// In packages/spec — new TypeSpec definitions + +@route("/folder-sync") +interface FolderSync { + // Open a collection folder for sync + @post open(workspaceId: string, collectionPath: string): FolderSyncStatus; + + // Close/stop syncing a collection + @post close(workspaceId: string): void; + + // Get current sync status + @get status(workspaceId: string): FolderSyncStatus; + + // Export workspace as a collection folder + @post export(workspaceId: string, outputPath: string): void; +} +``` + +#### Server Startup + +The folder sync coordinator starts/stops with the server: + +```go +// packages/server/internal/app/app.go or similar + +// On workspace open with folder sync enabled: +coordinator := foldersync.NewSyncCoordinator(foldersync.SyncOptions{ + CollectionPath: "/path/to/collection", + WorkspaceID: workspaceID, + Services: services, + Publisher: publisher, +}) +coordinator.Start(ctx) + +// On workspace close: +coordinator.Stop() +``` + +#### Eventstream Integration + +Subscribe to entity change events for Database → Disk sync: + +```go +// Subscribe to HTTP entity changes for this workspace +sub := publisher.Subscribe(eventstream.Topic{ + WorkspaceID: workspaceID, + EntityTypes: []eventstream.EntityType{ + eventstream.EntityHTTP, + eventstream.EntityHTTPHeader, + eventstream.EntityHTTPSearchParam, + eventstream.EntityHTTPBodyRaw, + eventstream.EntityHTTPBodyForm, + eventstream.EntityHTTPBodyUrlencoded, + eventstream.EntityHTTPAssert, + eventstream.EntityFile, + }, +}) + +for event := range sub.Events() { + switch event.Op { + case eventstream.OpInsert, eventstream.OpUpdate: + autosaver.ScheduleSave(event.ID) + case eventstream.OpDelete: + deleteFileFromDisk(event.ID) + } +} +``` + +### 2.7 Safety Mechanisms + +| Mechanism | Implementation | Matches Bruno? | +|---|---|---| +| Path validation | `filepath.Rel()` must not escape collection root | Yes | +| Filename sanitization | Strip invalid chars, truncate at 255 | Yes | +| Write stabilization | 80ms debounce on watcher events | Yes (stabilityThreshold: 80) | +| Autosave debounce | 500ms debounce on UI changes | Yes | +| Self-write suppression | Track recently-written paths (2s window) | Improved (Bruno re-parses) | +| Atomic writes | Write to temp file, then `os.Rename()` | Improved (Bruno doesn't) | +| UID preservation | `pathToID` map persists across re-parses | Yes | +| Conflict resolution | Disk wins over in-memory (matching Bruno) | Yes | +| Large file handling | Skip files >5MB, warn on collections >20MB | Yes | +| Cross-platform paths | `filepath.Clean/Rel/Join` consistently | Yes | +| Line endings | Handle `\r\n` and `\n` | Yes | + +--- + +## Part 3: CLI Runner Integration + +### 3.1 Run from Collection Folder + +Add a CLI command to run requests directly from a synced collection folder: + +``` +devtools run ./my-collection # Run all requests sequentially +devtools run ./my-collection/users/get-users.yaml # Run single request +devtools run ./my-collection --env dev # With environment +devtools run ./my-collection/flows/smoke-test.yaml # Run a flow +``` + +This reuses the existing `apps/cli/internal/runner/` infrastructure: + +1. Read collection folder → `tyamlcollection.ReadCollection()` +2. Create in-memory SQLite → populate with resolved entities +3. Execute via existing `runner.RunFlow()` or direct HTTP execution +4. Report results + +### 3.2 CLI Commands + +```go +// apps/cli/cmd/run.go — extend existing run command + +var runCollectionCmd = &cobra.Command{ + Use: "collection [directory-or-file]", + Short: "Run requests from a DevTools collection folder", + RunE: func(cmd *cobra.Command, args []string) error { + // 1. Read collection + // 2. Import into in-memory DB + // 3. Execute via runner + // 4. Report results + }, +} +``` + +--- + +## Part 4: Implementation Phases + +### Phase 1: BRU Parser (Foundation) + +**Scope**: Hand-written `.bru` parser + serializer with full test coverage. + +**Files**: +- `packages/server/pkg/translate/tbruno/bruparser/types.go` +- `packages/server/pkg/translate/tbruno/bruparser/parser.go` +- `packages/server/pkg/translate/tbruno/bruparser/serializer.go` +- `packages/server/pkg/translate/tbruno/bruparser/parser_test.go` + +**Dependencies**: None (pure Go, no external libs) + +**Testing**: Parse real `.bru` files, verify struct output, round-trip test + +**Estimate**: Self-contained, no cross-cutting concerns + +### Phase 2: Bruno Collection Converter + +**Scope**: Walk Bruno collection directory → produce `BrunoResolved` with all DevTools models. + +**Files**: +- `packages/server/pkg/translate/tbruno/types.go` +- `packages/server/pkg/translate/tbruno/converter.go` +- `packages/server/pkg/translate/tbruno/collection.go` +- `packages/server/pkg/translate/tbruno/environment.go` +- `packages/server/pkg/translate/tbruno/converter_test.go` +- `packages/server/pkg/translate/tbruno/testdata/` (sample collections) + +**Dependencies**: Phase 1 (BRU parser), existing models (`mhttp`, `mfile`, `menv`) + +**Testing**: Convert sample Bruno collections, verify all entities + +### Phase 3: CLI Import Command + +**Scope**: Add `import bruno ` CLI command. + +**Files**: +- `apps/cli/cmd/import.go` (add `importBrunoCmd`) + +**Dependencies**: Phase 2 (converter), existing importer infrastructure + +**Testing**: End-to-end import into in-memory SQLite + +### Phase 4: DevTools YAML Collection Format + +**Scope**: Define and implement DevTools' own YAML format for individual request files. + +**Files**: +- `packages/server/pkg/translate/tyamlcollection/types.go` +- `packages/server/pkg/translate/tyamlcollection/parser.go` +- `packages/server/pkg/translate/tyamlcollection/serializer.go` +- `packages/server/pkg/translate/tyamlcollection/request.go` +- `packages/server/pkg/translate/tyamlcollection/environment.go` +- `packages/server/pkg/translate/tyamlcollection/folder.go` +- `packages/server/pkg/translate/tyamlcollection/collection.go` +- `packages/server/pkg/translate/tyamlcollection/parser_test.go` + +**Dependencies**: Existing models, `gopkg.in/yaml.v3` + +**Testing**: Round-trip tests (parse → serialize → parse), verify equivalence + +### Phase 5: File Watcher + +**Scope**: `fsnotify`-based watcher with debouncing, filtering, self-write tracking. + +**Files**: +- `packages/server/pkg/foldersync/watcher.go` +- `packages/server/pkg/foldersync/debouncer.go` +- `packages/server/pkg/foldersync/filter.go` +- `packages/server/pkg/foldersync/types.go` +- `packages/server/pkg/foldersync/watcher_test.go` + +**Dependencies**: `github.com/fsnotify/fsnotify` + +**Testing**: Create temp dirs, write files, verify events + +### Phase 6: Sync Coordinator + +**Scope**: Bidirectional sync engine — disk↔database with eventstream integration. + +**Files**: +- `packages/server/pkg/foldersync/sync.go` +- `packages/server/pkg/foldersync/autosaver.go` + +**Dependencies**: Phase 4 (YAML format), Phase 5 (watcher), services, eventstream + +**Testing**: Full integration tests — modify files on disk, verify DB updates; modify DB, verify files written + +### Phase 7: RPC Endpoints + Desktop Integration + +**Scope**: TypeSpec definitions for folder sync management, RPC handlers, desktop UI. + +**Files**: +- `packages/spec/` (TypeSpec definitions) +- `packages/server/internal/api/` (RPC handlers) +- `packages/client/` (React hooks/services) +- `apps/desktop/` (Electron integration — folder picker, sync status) + +**Dependencies**: Phase 6 (sync coordinator) + +### Phase 8: CLI Collection Runner + +**Scope**: Run requests/flows directly from collection folders. + +**Files**: +- `apps/cli/cmd/run.go` (extend with `collection` subcommand) + +**Dependencies**: Phase 4 (YAML format), existing runner + +--- + +## Phase Dependency Graph + +``` +Phase 1: BRU Parser ──────────────┐ + ├──→ Phase 2: Bruno Converter ──→ Phase 3: CLI Import + │ +Phase 4: YAML Collection Format ──┼──→ Phase 8: CLI Collection Runner + │ +Phase 5: File Watcher ────────────┤ + │ + └──→ Phase 6: Sync Coordinator ──→ Phase 7: RPC + Desktop +``` + +Phases 1 and 4-5 can be developed in parallel (no dependencies between them). + +--- + +## External Dependencies + +| Dependency | Purpose | Phase | +|---|---|---| +| `github.com/fsnotify/fsnotify` | Cross-platform file system notifications | 5 | +| `gopkg.in/yaml.v3` | YAML parsing (already in use by `yamlflowsimplev2`) | 4 | +| No new dependencies | BRU parser is hand-written | 1 | + +--- + +## Key Design Decisions + +### Why DevTools YAML instead of .bru format? + +1. **YAML is universal** — every developer knows it, every editor supports it +2. **No custom parser maintenance** — leverage `gopkg.in/yaml.v3` vs maintaining a PEG parser +3. **Extensible** — easy to add new fields as DevTools evolves +4. **Consistent with existing YAML flow format** — `yamlflowsimplev2` already uses YAML +5. **Better tooling** — YAML schema validation, IDE autocomplete via JSON Schema +6. **Import, don't adopt** — import Bruno collections but don't tie DevTools to Bruno's format + +### Why hand-written BRU parser instead of PEG library? + +1. **Simple grammar** — only 3 block types, line-based parsing +2. **No external dependency** — keeps the import path dependency-free +3. **Better error messages** — hand-written parsers produce clearer diagnostics +4. **One-way** — we only need to parse `.bru` for import, not write it back + +### Why fsnotify instead of polling? + +1. **Efficient** — kernel-level notifications, no CPU overhead from polling +2. **Low latency** — events arrive in milliseconds vs polling interval +3. **Standard** — most Go file-watching libraries use fsnotify +4. **Exception**: WSL paths use polling (matching Bruno's approach) + +### Why autosave debounce at 500ms? + +1. **Matches Bruno** — users expect similar behavior +2. **Prevents rapid writes** — typing in URL field doesn't cause per-keystroke disk writes +3. **Balances responsiveness** — changes appear on disk within ~500ms, fast enough for git workflows From 104c0d2b24ff22c8c76b1907ce69ccb05586aae3 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 15:25:51 +0000 Subject: [PATCH 02/20] docs: rewrite plan to use OpenCollection YAML instead of .bru format Bruno is moving to OpenCollection YAML as the recommended format. Drop the .bru parser phase entirely and focus on: - Phase 1: OpenCollection YAML parser using gopkg.in/yaml.v3 (no custom parser) - Phase 2: CLI import command - Phase 3-5: DevTools YAML folder sync with fsnotify watcher - Phase 6-7: RPC endpoints and CLI runner Added complete OpenCollection YAML format reference with all field definitions, auth variants, body variants, and mapping table to DevTools models. https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../server/docs/specs/BRUNO_FOLDER_SYNC.md | 751 ++++++++++++------ 1 file changed, 492 insertions(+), 259 deletions(-) diff --git a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md index c40bfa48..04cf3426 100644 --- a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md +++ b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md @@ -1,27 +1,45 @@ -# Bruno Import & YAML Folder Sync Plan +# OpenCollection Import & YAML Folder Sync Plan ## Overview This document describes the plan to add: -1. **Bruno Import** — Parse `.bru` collections and convert them into DevTools' data model +1. **OpenCollection YAML Import** — Parse Bruno's OpenCollection YAML collections (`.yml` format with `opencollection.yml` root) and convert them into DevTools' data model 2. **DevTools YAML Folder Sync** — Bidirectional filesystem sync using DevTools' own YAML format, inspired by Bruno's folder sync architecture 3. **CLI Runner Integration** — Execute imported/synced collections from the CLI The goal is to support Bruno users migrating to DevTools, while building a first-class filesystem-based workflow using DevTools' own YAML format for git-friendly, local-first API development. +### Why OpenCollection YAML only (no .bru)? + +Bruno is moving to the OpenCollection YAML format as the recommended standard going forward. The legacy `.bru` DSL format is being phased out. By focusing exclusively on the YAML format: + +- No custom parser needed — we use standard `gopkg.in/yaml.v3` (already a dependency) +- Forward-compatible with Bruno's direction +- Simpler codebase to maintain +- YAML tooling (linting, schema validation, IDE support) works out of the box + +### Sources + +- [OpenCollection YAML Format (Bruno Docs)](https://docs.usebruno.com/opencollection-yaml/overview) +- [YAML Structure Reference](https://docs.usebruno.com/opencollection-yaml/structure-reference) +- [YAML Samples](https://docs.usebruno.com/opencollection-yaml/samples) +- [OpenCollection Spec](https://spec.opencollection.com/) +- [OpenCollection GitHub](https://github.com/opencollection-dev/opencollection) +- [RFC Discussion](https://github.com/usebruno/bruno/discussions/6634) + --- -## Part 1: Bruno Import +## Part 1: OpenCollection YAML Import ### 1.1 Architecture Fit -Bruno import follows the exact same pattern as the existing HAR, Postman, and curl importers: +OpenCollection import follows the exact same pattern as existing HAR, Postman, and curl importers: ``` -.bru files on disk - → tbruno.ConvertBrunoCollection() - → BrunoResolved (mhttp.HTTP, mfile.File, etc.) +OpenCollection .yml files on disk + → topencollection.ConvertOpenCollection() + → OpenCollectionResolved (mhttp.HTTP, mfile.File, etc.) → importer.RunImport() + services.Create() → SQLite DB ``` @@ -29,141 +47,359 @@ Bruno import follows the exact same pattern as the existing HAR, Postman, and cu | Layer | Location | Pattern | |-------|----------|---------| | CLI Command | `apps/cli/cmd/import.go` | Add `importBrunoCmd` | -| Translator | `packages/server/pkg/translate/tbruno/` | New package | -| BRU Parser | `packages/server/pkg/translate/tbruno/bruparser/` | Hand-written recursive descent | +| Translator | `packages/server/pkg/translate/topencollection/` | New package | | Importer | `apps/cli/internal/importer/` | Existing `RunImport()` callback | -### 1.2 BRU Parser (`tbruno/bruparser/`) +### 1.2 OpenCollection YAML Format Reference -The `.bru` format has three block types that need a hand-written parser (no external dependency needed): +#### Directory Structure -**Dictionary blocks** (key-value): -```bru -headers { - content-type: application/json - ~disabled-header: value -} +``` +my-collection/ +├── opencollection.yml # Collection root config +├── environments/ +│ └── development.yml +├── users/ +│ ├── folder.yml # Folder configuration +│ ├── create-user.yml +│ ├── get-user.yml +│ └── delete-user.yml +└── orders/ + └── create-order.yml ``` -**Text blocks** (freeform content): -```bru -body:json { - { - "key": "value" - } -} +#### Collection Root (`opencollection.yml`) + +```yaml +opencollection: "1.0.0" +info: + name: "My API Collection" + summary: "A collection for testing our REST API" + version: "2.1.0" + authors: + - name: "Jane Doe" + email: "[email protected]" ``` -**List blocks** (arrays inside dictionary blocks): -```bru -meta { - tags: [ - regression - smoke - ] -} +#### Request File Structure + +Each `.yml` request file has these top-level sections: + +```yaml +info: + name: Create User + type: http # http | graphql | grpc | ws + seq: 5 + tags: + - smoke + - regression + +http: + method: POST # GET|POST|PUT|PATCH|DELETE|OPTIONS|HEAD|TRACE|CONNECT + url: https://api.example.com/users + headers: + - name: Content-Type + value: application/json + - name: Authorization + value: "Bearer {{token}}" + disabled: true # Optional, marks header as disabled + params: + - name: filter + value: active + type: query # query | path + - name: id + value: "123" + type: path + body: + type: json # json | xml | text | form-urlencoded | multipart-form | graphql | none + data: |- + { + "name": "John Doe", + "email": "john@example.com" + } + auth: # none | inherit | basic | bearer | apikey | digest | oauth2 | awsv4 | ntlm + type: bearer + token: "{{token}}" + +runtime: + scripts: + - type: before-request + code: |- + const timestamp = Date.now(); + bru.setVar("timestamp", timestamp); + - type: after-response + code: |- + console.log(res.status); + - type: tests + code: |- + test("should return 201", function() { + expect(res.status).to.equal(201); + }); + assertions: + - expression: res.status + operator: eq + value: "201" + - expression: res.body.name + operator: isString + actions: + - type: set-variable + phase: after-response + selector: + expression: res.body.token + method: jsonPath + variable: + name: auth_token + scope: collection + +settings: + encodeUrl: true + timeout: 0 + followRedirects: true + maxRedirects: 5 + +docs: |- + # Create User + Creates a new user account in the system. +``` + +#### Auth Variants + +```yaml +# No auth +auth: none + +# Inherit from parent +auth: inherit + +# Bearer token +auth: + type: bearer + token: "{{token}}" + +# Basic auth +auth: + type: basic + username: admin + password: secret + +# API Key +auth: + type: apikey + key: x-api-key + value: "{{api-key}}" + placement: header # header | query ``` -#### Files +#### Body Variants + +```yaml +# JSON body +body: + type: json + data: |- + {"key": "value"} + +# XML body +body: + type: xml + data: |- + value + +# Text body +body: + type: text + data: "plain text content" +# Form URL-encoded +body: + type: form-urlencoded + data: + - name: username + value: johndoe + - name: password + value: secret123 + +# Multipart form data +body: + type: multipart-form + data: + - name: file + value: "@/path/to/file.pdf" + contentType: application/pdf + - name: description + value: "My file" ``` -packages/server/pkg/translate/tbruno/bruparser/ -├── parser.go # .bru → BruFile struct (recursive descent) -├── serializer.go # BruFile → .bru string (for round-trip tests) -├── types.go # BruFile, Block, KeyValue types -└── parser_test.go # Test with real .bru samples + +#### Environment File (`environments/dev.yml`) + +```yaml +name: development +variables: + - name: api_url + value: http://localhost:3000 + enabled: true + secret: false + type: text ``` -#### Core Types +#### Folder Config (`folder.yml`) + +Contains folder-level metadata, auth, headers, and scripts that apply to all requests in the folder. + +### 1.3 Go Types for OpenCollection Parsing ```go -package bruparser - -// BruFile represents a fully parsed .bru file -type BruFile struct { - Meta Meta - HTTP *HTTPBlock // get/post/put/delete/patch/options/head/connect/trace block - GraphQL *GraphQLBlock - Params *ParamsBlock // params:query, params:path - Headers []KeyValue - Auth *AuthBlock // auth:bearer, auth:basic, auth:apikey, etc. - Body *BodyBlock // body:json, body:xml, body:text, body:form-urlencoded, body:multipart-form, body:graphql - Script *ScriptBlock // script:pre-request, script:post-response - Tests string // freeform JS - Vars *VarsBlock // vars:pre-request, vars:post-response - Assertions []AssertEntry // assert block - Docs string // docs block +package topencollection + +// --- Collection Root --- + +type OpenCollectionRoot struct { + OpenCollection string `yaml:"opencollection"` + Info OpenCollectionInfo `yaml:"info"` } -type Meta struct { - Name string - Type string // "http", "graphql", "grpc" - Seq int - Tags []string +type OpenCollectionInfo struct { + Name string `yaml:"name"` + Summary string `yaml:"summary,omitempty"` + Version string `yaml:"version,omitempty"` + Authors []OpenCollectionAuthor `yaml:"authors,omitempty"` } -type KeyValue struct { - Key string - Value string - Enabled bool // false if prefixed with ~ +type OpenCollectionAuthor struct { + Name string `yaml:"name"` + Email string `yaml:"email,omitempty"` } -type AssertEntry struct { - Expression string // e.g. "res.status eq 200" - Enabled bool +// --- Request File --- + +type OCRequest struct { + Info OCRequestInfo `yaml:"info"` + HTTP *OCHTTPBlock `yaml:"http,omitempty"` + Runtime *OCRuntime `yaml:"runtime,omitempty"` + Settings *OCSettings `yaml:"settings,omitempty"` + Docs string `yaml:"docs,omitempty"` } -``` -#### Parser Implementation Notes +type OCRequestInfo struct { + Name string `yaml:"name"` + Type string `yaml:"type"` // "http", "graphql", "grpc", "ws" + Seq int `yaml:"seq,omitempty"` + Tags []string `yaml:"tags,omitempty"` +} -- **Line-based parsing**: Read line by line, detect block openings (`blockname {`), track nesting depth -- **Dictionary vs Text blocks**: Dictionary blocks have `key: value` lines; text blocks (body, script, tests, docs) have freeform content with 2-space indent -- **Disabled items**: `~` prefix means disabled (`Enabled: false`) -- **Multiline values**: `'''...'''` syntax with optional `@contentType()` annotation -- **Quoted keys**: Keys can be quoted with single quotes for special characters -- **Assert delimiter**: Assert keys use `: ` (space after colon) as the delimiter since keys can contain colons +type OCHTTPBlock struct { + Method string `yaml:"method"` + URL string `yaml:"url"` + Headers []OCHeader `yaml:"headers,omitempty"` + Params []OCParam `yaml:"params,omitempty"` + Body *OCBody `yaml:"body,omitempty"` + Auth *OCAuth `yaml:"auth,omitempty"` // Can also be string "inherit"/"none" +} -### 1.3 Bruno Collection Reader (`tbruno/`) +type OCHeader struct { + Name string `yaml:"name"` + Value string `yaml:"value"` + Disabled bool `yaml:"disabled,omitempty"` +} -Reads the full Bruno collection directory structure and converts to DevTools models. +type OCParam struct { + Name string `yaml:"name"` + Value string `yaml:"value"` + Type string `yaml:"type"` // "query" | "path" + Disabled bool `yaml:"disabled,omitempty"` +} -#### Files +type OCBody struct { + Type string `yaml:"type"` // "json"|"xml"|"text"|"form-urlencoded"|"multipart-form"|"graphql"|"none" + Data interface{} `yaml:"data"` // string for raw types, []OCFormField for form types +} -``` -packages/server/pkg/translate/tbruno/ -├── bruparser/ # .bru parser (above) -├── converter.go # Main conversion: directory → BrunoResolved -├── converter_test.go # Tests with sample collections -├── types.go # BrunoResolved, ConvertOptions -├── collection.go # bruno.json / opencollection.yml detection + parsing -├── environment.go # Environment .bru/.yml → menv conversion -└── testdata/ # Sample Bruno collections for tests - ├── bru-format/ - │ ├── bruno.json - │ ├── collection.bru - │ ├── environments/ - │ │ ├── dev.bru - │ │ └── prod.bru - │ ├── users/ - │ │ ├── folder.bru - │ │ ├── get-users.bru - │ │ └── create-user.bru - │ └── auth/ - │ ├── folder.bru - │ └── login.bru - └── yml-format/ - ├── opencollection.yml - └── ... +type OCFormField struct { + Name string `yaml:"name"` + Value string `yaml:"value"` + Disabled bool `yaml:"disabled,omitempty"` + ContentType string `yaml:"contentType,omitempty"` // For multipart file uploads +} + +type OCAuth struct { + Type string `yaml:"type"` // "none"|"inherit"|"basic"|"bearer"|"apikey"|... + Token string `yaml:"token,omitempty"` // For bearer + Username string `yaml:"username,omitempty"` // For basic + Password string `yaml:"password,omitempty"` // For basic + Key string `yaml:"key,omitempty"` // For apikey + Value string `yaml:"value,omitempty"` // For apikey + Placement string `yaml:"placement,omitempty"` // For apikey: "header"|"query" +} + +// --- Runtime --- + +type OCRuntime struct { + Scripts []OCScript `yaml:"scripts,omitempty"` + Assertions []OCAssertion `yaml:"assertions,omitempty"` + Actions []OCAction `yaml:"actions,omitempty"` +} + +type OCScript struct { + Type string `yaml:"type"` // "before-request"|"after-response"|"tests" + Code string `yaml:"code"` +} + +type OCAssertion struct { + Expression string `yaml:"expression"` + Operator string `yaml:"operator"` + Value string `yaml:"value,omitempty"` +} + +type OCAction struct { + Type string `yaml:"type"` // "set-variable" + Phase string `yaml:"phase"` // "after-response" + Selector OCSelector `yaml:"selector"` + Variable OCVariable `yaml:"variable"` +} + +type OCSelector struct { + Expression string `yaml:"expression"` + Method string `yaml:"method"` +} + +type OCVariable struct { + Name string `yaml:"name"` + Scope string `yaml:"scope"` +} + +// --- Settings --- + +type OCSettings struct { + EncodeUrl *bool `yaml:"encodeUrl,omitempty"` + Timeout *int `yaml:"timeout,omitempty"` + FollowRedirects *bool `yaml:"followRedirects,omitempty"` + MaxRedirects *int `yaml:"maxRedirects,omitempty"` +} + +// --- Environment --- + +type OCEnvironment struct { + Name string `yaml:"name"` + Variables []OCEnvVariable `yaml:"variables"` +} + +type OCEnvVariable struct { + Name string `yaml:"name"` + Value string `yaml:"value"` + Enabled *bool `yaml:"enabled,omitempty"` + Secret *bool `yaml:"secret,omitempty"` + Type string `yaml:"type,omitempty"` // "text" +} ``` -#### Core Types +### 1.4 Converter (`topencollection/converter.go`) ```go -package tbruno +package topencollection -// BrunoResolved contains all entities extracted from a Bruno collection, +// OpenCollectionResolved contains all entities extracted from an OpenCollection, // following the same pattern as tpostmanv2.PostmanResolvedV2 and harv2.HARResolved. -type BrunoResolved struct { +type OpenCollectionResolved struct { // HTTP entities HTTPRequests []mhttp.HTTP HTTPHeaders []mhttp.HTTPHeader @@ -188,74 +424,89 @@ type BrunoResolved struct { } type ConvertOptions struct { - WorkspaceID idwrap.IDWrap - FolderID *idwrap.IDWrap // Optional parent folder to import into - CollectionName string // Override collection name - CreateFlow bool // Whether to generate a flow from the collection - GenerateFiles bool // Whether to create File entries for hierarchy + WorkspaceID idwrap.IDWrap + FolderID *idwrap.IDWrap // Optional parent folder to import into + CollectionName string // Override collection name + CreateFlow bool // Whether to generate a flow from the collection + GenerateFiles bool // Whether to create File entries for hierarchy } -// ConvertBrunoCollection reads a Bruno collection directory and returns resolved entities. -func ConvertBrunoCollection(collectionPath string, opts ConvertOptions) (*BrunoResolved, error) +// ConvertOpenCollection reads an OpenCollection YAML directory and returns resolved entities. +func ConvertOpenCollection(collectionPath string, opts ConvertOptions) (*OpenCollectionResolved, error) ``` #### Conversion Logic -1. **Detect format**: Check for `bruno.json` (BRU format) or `opencollection.yml` (YML format) -2. **Parse config**: Read collection name, version, ignore patterns from config file +1. **Detect format**: Check for `opencollection.yml` in the directory root +2. **Parse root config**: Read collection name, version from `opencollection.yml` 3. **Walk directory tree** (depth-first): - Skip ignored paths (`node_modules`, `.git`, dotenv files) - For each directory: create `mfile.File` with `ContentTypeFolder` - - For each `.bru` file: parse with `bruparser.Parse()`, convert to `mhttp.HTTP` + children - - For `folder.bru`: extract folder-level metadata (auth, headers) — apply as defaults to child requests - - For `collection.bru`: extract collection-level defaults - - For `environments/*.bru`: convert to `menv.Env` + `menv.Variable` -4. **Map to DevTools models**: - - BRU `meta.name` → `mhttp.HTTP.Name` - - BRU method block (`get`, `post`, etc.) → `mhttp.HTTP.Method` + `mhttp.HTTP.Url` - - BRU `headers {}` → `[]mhttp.HTTPHeader` - - BRU `params:query {}` → `[]mhttp.HTTPSearchParam` - - BRU `body:json {}` → `mhttp.HTTPBodyRaw` with `BodyKind = Raw` - - BRU `body:form-urlencoded {}` → `[]mhttp.HTTPBodyUrlencoded` - - BRU `body:multipart-form {}` → `[]mhttp.HTTPBodyForm` - - BRU `assert {}` → `[]mhttp.HTTPAssert` - - BRU `meta.seq` → `mfile.File.Order` - - Directory nesting → `mfile.File.ParentID` hierarchy + - For each `.yml` file (not `opencollection.yml`, not `folder.yml`, not in `environments/`): + - Parse with `yaml.Unmarshal()` into `OCRequest` + - Convert to `mhttp.HTTP` + child entities + - For `folder.yml`: extract folder metadata + - For `environments/*.yml`: convert to `menv.Env` + `menv.Variable` +4. **Map to DevTools models** (see mapping table) 5. **Generate flow** (optional): Create a linear flow with request nodes ordered by `seq` -#### Mapping Table: Bruno → DevTools +#### Mapping Table: OpenCollection YAML → DevTools -| Bruno (.bru) | DevTools Model | Notes | +| OpenCollection YAML | DevTools Model | Notes | |---|---|---| -| `meta.name` | `mhttp.HTTP.Name` | | -| `meta.seq` | `mfile.File.Order` | Float64 ordering | -| `meta.type` | Determines request type | "http", "graphql" | -| `get/post/put/...` block | `mhttp.HTTP.Method` | Block name = method | -| `url` in method block | `mhttp.HTTP.Url` | | -| `body` in method block | `mhttp.HTTP.BodyKind` | "json"→Raw, "form"→FormData, etc. | -| `headers {}` | `[]mhttp.HTTPHeader` | `~` prefix → `Enabled: false` | -| `params:query {}` | `[]mhttp.HTTPSearchParam` | `~` prefix → `Enabled: false` | -| `params:path {}` | Embedded in URL | DevTools uses URL template vars | -| `body:json {}` | `mhttp.HTTPBodyRaw` | `BodyKind: Raw` | -| `body:xml {}` | `mhttp.HTTPBodyRaw` | `BodyKind: Raw` | -| `body:text {}` | `mhttp.HTTPBodyRaw` | `BodyKind: Raw` | -| `body:form-urlencoded {}` | `[]mhttp.HTTPBodyUrlencoded` | | -| `body:multipart-form {}` | `[]mhttp.HTTPBodyForm` | | -| `auth:bearer` | `mhttp.HTTPHeader` | Converted to Authorization header | -| `auth:basic` | `mhttp.HTTPHeader` | Converted to Authorization header | -| `auth:apikey` | `mhttp.HTTPHeader` | Key/value in header or query param | -| `assert {}` | `[]mhttp.HTTPAssert` | Expression syntax may differ | -| `script:pre-request {}` | Not imported (log warning) | DevTools uses JS nodes in flows | -| `script:post-response {}` | Not imported (log warning) | DevTools uses JS nodes in flows | -| `tests {}` | Not imported (log warning) | DevTools uses assert system | -| `vars:pre-request {}` | Not imported (log warning) | DevTools uses flow variables | -| `docs {}` | `mhttp.HTTP.Description` | | +| `info.name` | `mhttp.HTTP.Name` | | +| `info.seq` | `mfile.File.Order` | Float64 ordering | +| `info.type` | Determines request type | "http" only for now | +| `http.method` | `mhttp.HTTP.Method` | Uppercase | +| `http.url` | `mhttp.HTTP.Url` | | +| `http.headers` | `[]mhttp.HTTPHeader` | `disabled: true` → `Enabled: false` | +| `http.params` (type=query) | `[]mhttp.HTTPSearchParam` | Filter by `type: query` | +| `http.params` (type=path) | Embedded in URL | DevTools uses URL template vars | +| `http.body.type: json` | `mhttp.HTTPBodyRaw`, `BodyKind: Raw` | | +| `http.body.type: xml` | `mhttp.HTTPBodyRaw`, `BodyKind: Raw` | | +| `http.body.type: text` | `mhttp.HTTPBodyRaw`, `BodyKind: Raw` | | +| `http.body.type: form-urlencoded` | `[]mhttp.HTTPBodyUrlencoded` | | +| `http.body.type: multipart-form` | `[]mhttp.HTTPBodyForm` | | +| `http.auth.type: bearer` | `mhttp.HTTPHeader` | → `Authorization: Bearer ` | +| `http.auth.type: basic` | `mhttp.HTTPHeader` | → `Authorization: Basic ` | +| `http.auth.type: apikey` | `mhttp.HTTPHeader` or `mhttp.HTTPSearchParam` | Based on `placement` | +| `runtime.assertions` | `[]mhttp.HTTPAssert` | Convert `expr operator value` format | +| `runtime.scripts` | Not imported (log warning) | DevTools uses JS nodes in flows | +| `runtime.actions` | Not imported (log warning) | DevTools uses flow variables | +| `docs` | `mhttp.HTTP.Description` | | +| `settings` | Not imported (log info) | DevTools has own request settings | | Directory structure | `mfile.File` hierarchy | Folder nesting preserved | -| `folder.bru` | Folder metadata | Auth/headers applied to children | -| `environments/*.bru` | `menv.Env` + `menv.Variable` | | -| `bruno.json` | Collection config | Name used as root folder name | +| `folder.yml` | Folder metadata | Name used for folder | +| `environments/*.yml` | `menv.Env` + `menv.Variable` | | +| `opencollection.yml` | Collection config | Name used as root folder name | -### 1.4 CLI Command +### 1.5 Package Structure + +``` +packages/server/pkg/translate/topencollection/ +├── types.go # OCRequest, OCHTTPBlock, etc. (YAML struct definitions) +├── converter.go # Main conversion: directory → OpenCollectionResolved +├── converter_test.go # Tests with sample collections +├── collection.go # opencollection.yml parsing +├── environment.go # Environment .yml → menv conversion +├── auth.go # Auth type → header/param conversion +├── body.go # Body type → mhttp body conversion +└── testdata/ # Sample OpenCollection directories for tests + └── basic-collection/ + ├── opencollection.yml + ├── environments/ + │ ├── dev.yml + │ └── prod.yml + ├── users/ + │ ├── folder.yml + │ ├── get-users.yml + │ └── create-user.yml + └── auth/ + ├── folder.yml + └── login.yml +``` + +### 1.6 CLI Command Add `import bruno ` to the existing import command tree: @@ -264,13 +515,13 @@ Add `import bruno ` to the existing import command tree: var importBrunoCmd = &cobra.Command{ Use: "bruno [directory]", - Short: "Import a Bruno collection directory", + Short: "Import a Bruno OpenCollection YAML directory", Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { collectionPath := args[0] return importer.RunImport(ctx, logger, workspaceID, folderID, func(ctx context.Context, services *common.Services, wsID idwrap.IDWrap, folderIDPtr *idwrap.IDWrap) error { - resolved, err := tbruno.ConvertBrunoCollection(collectionPath, tbruno.ConvertOptions{ + resolved, err := topencollection.ConvertOpenCollection(collectionPath, topencollection.ConvertOptions{ WorkspaceID: wsID, FolderID: folderIDPtr, GenerateFiles: true, @@ -287,12 +538,13 @@ var importBrunoCmd = &cobra.Command{ } ``` -### 1.5 Testing Strategy +### 1.7 Testing Strategy -- **Parser unit tests**: Port/create test cases from real `.bru` files — parse, verify struct output -- **Round-trip tests**: Parse `.bru` → serialize back → parse again, verify equality -- **Converter tests**: Use `testdata/` sample collections → verify `BrunoResolved` output -- **Integration test**: Import sample collection into in-memory SQLite → verify all entities created correctly +- **YAML parsing tests**: Parse sample `.yml` request files, verify struct output +- **Converter tests**: Use `testdata/` sample collections → verify `OpenCollectionResolved` output +- **Auth conversion tests**: Test all auth types (bearer, basic, apikey) → correct headers/params +- **Body conversion tests**: Test all body types (json, xml, form-urlencoded, multipart-form) +- **Integration test**: Import sample collection into in-memory SQLite → verify all entities created --- @@ -306,11 +558,12 @@ This is the core feature — a bidirectional filesystem sync that uses DevTools' 2. **Human-readable**: Developers can edit requests in their editor/IDE 3. **Bidirectional**: Changes in the UI persist to disk; changes on disk reflect in the UI 4. **Compatible**: Similar UX to Bruno's folder sync (users understand the concept) -5. **DevTools-native**: Uses DevTools' own YAML format, not `.bru` format +5. **DevTools-native**: Uses DevTools' own YAML format +6. **OpenCollection-compatible import**: Can import from OpenCollection and sync in DevTools format ### 2.2 DevTools Collection YAML Format -Inspired by `yamlflowsimplev2` but simplified for individual request files (not full flows). +Simplified for individual request files. Intentionally similar to OpenCollection for easy migration, but with DevTools-specific conventions. #### Directory Structure @@ -481,8 +734,8 @@ package tyamlcollection // CollectionConfig represents devtools.yaml type CollectionConfig struct { - Version string `yaml:"version"` - Name string `yaml:"name"` + Version string `yaml:"version"` + Name string `yaml:"name"` Settings *CollectionSettings `yaml:"settings,omitempty"` } @@ -511,9 +764,9 @@ type HeaderEntry struct { } type BodyDef struct { - Type string `yaml:"type"` // "none", "raw", "form-data", "urlencoded" - Content string `yaml:"content,omitempty"` // For raw bodies - Fields []HeaderEntry `yaml:"fields,omitempty"` // For form-data / urlencoded + Type string `yaml:"type"` // "none", "raw", "form-data", "urlencoded" + Content string `yaml:"content,omitempty"` // For raw bodies + Fields []HeaderEntry `yaml:"fields,omitempty"` // For form-data / urlencoded } type AssertionEntry struct { @@ -522,12 +775,11 @@ type AssertionEntry struct { Description string `yaml:"description,omitempty"` } -// Simplified assertion: can be just a string // Custom unmarshaler handles both "res.status eq 200" and {value: ..., enabled: false} type EnvironmentFile struct { - Name string `yaml:"name"` - Variables []EnvVariable `yaml:"variables"` + Name string `yaml:"name"` + Variables []EnvVariable `yaml:"variables"` } type EnvVariable struct { @@ -537,9 +789,9 @@ type EnvVariable struct { } type FolderMeta struct { - Name string `yaml:"name,omitempty"` - Order int `yaml:"order,omitempty"` - Description string `yaml:"description,omitempty"` + Name string `yaml:"name,omitempty"` + Order int `yaml:"order,omitempty"` + Description string `yaml:"description,omitempty"` Defaults *FolderDefaults `yaml:"defaults,omitempty"` } @@ -601,9 +853,9 @@ const ( ) type WatchEvent struct { - Type EventType - Path string // Absolute path - RelPath string // Relative to collection root + Type EventType + Path string // Absolute path + RelPath string // Relative to collection root } // CollectionWatcher watches a collection directory for filesystem changes. @@ -675,24 +927,24 @@ type SyncCoordinator struct { format tyamlcollection.CollectionConfig // Filesystem → Database - watcher *CollectionWatcher - parser *tyamlcollection.Parser + watcher *CollectionWatcher + parser *tyamlcollection.Parser // Database → Filesystem - selfWrites *SelfWriteTracker - serializer *tyamlcollection.Serializer - autosaver *AutoSaver + selfWrites *SelfWriteTracker + serializer *tyamlcollection.Serializer + autosaver *AutoSaver // State mapping - mu sync.RWMutex - pathToID map[string]idwrap.IDWrap // filepath → entity ID (UID preservation) - idToPath map[idwrap.IDWrap]string // entity ID → filepath (reverse mapping) + mu sync.RWMutex + pathToID map[string]idwrap.IDWrap // filepath → entity ID (UID preservation) + idToPath map[idwrap.IDWrap]string // entity ID → filepath (reverse mapping) // Services - services *common.Services + services *common.Services // Event publishing for real-time sync to UI - publisher *eventstream.Publisher + publisher *eventstream.Publisher } func NewSyncCoordinator(opts SyncOptions) (*SyncCoordinator, error) @@ -774,8 +1026,6 @@ interface FolderSync { The folder sync coordinator starts/stops with the server: ```go -// packages/server/internal/app/app.go or similar - // On workspace open with folder sync enabled: coordinator := foldersync.NewSyncCoordinator(foldersync.SyncOptions{ CollectionPath: "/path/to/collection", @@ -794,7 +1044,6 @@ coordinator.Stop() Subscribe to entity change events for Database → Disk sync: ```go -// Subscribe to HTTP entity changes for this workspace sub := publisher.Subscribe(eventstream.Topic{ WorkspaceID: workspaceID, EntityTypes: []eventstream.EntityType{ @@ -844,9 +1093,9 @@ for event := range sub.Events() { Add a CLI command to run requests directly from a synced collection folder: ``` -devtools run ./my-collection # Run all requests sequentially +devtools run ./my-collection # Run all requests sequentially devtools run ./my-collection/users/get-users.yaml # Run single request -devtools run ./my-collection --env dev # With environment +devtools run ./my-collection --env dev # With environment devtools run ./my-collection/flows/smoke-test.yaml # Run a flow ``` @@ -878,52 +1127,38 @@ var runCollectionCmd = &cobra.Command{ ## Part 4: Implementation Phases -### Phase 1: BRU Parser (Foundation) - -**Scope**: Hand-written `.bru` parser + serializer with full test coverage. - -**Files**: -- `packages/server/pkg/translate/tbruno/bruparser/types.go` -- `packages/server/pkg/translate/tbruno/bruparser/parser.go` -- `packages/server/pkg/translate/tbruno/bruparser/serializer.go` -- `packages/server/pkg/translate/tbruno/bruparser/parser_test.go` - -**Dependencies**: None (pure Go, no external libs) - -**Testing**: Parse real `.bru` files, verify struct output, round-trip test - -**Estimate**: Self-contained, no cross-cutting concerns - -### Phase 2: Bruno Collection Converter +### Phase 1: OpenCollection YAML Parser + Converter -**Scope**: Walk Bruno collection directory → produce `BrunoResolved` with all DevTools models. +**Scope**: Parse OpenCollection YAML directories and convert to DevTools models. **Files**: -- `packages/server/pkg/translate/tbruno/types.go` -- `packages/server/pkg/translate/tbruno/converter.go` -- `packages/server/pkg/translate/tbruno/collection.go` -- `packages/server/pkg/translate/tbruno/environment.go` -- `packages/server/pkg/translate/tbruno/converter_test.go` -- `packages/server/pkg/translate/tbruno/testdata/` (sample collections) +- `packages/server/pkg/translate/topencollection/types.go` +- `packages/server/pkg/translate/topencollection/converter.go` +- `packages/server/pkg/translate/topencollection/collection.go` +- `packages/server/pkg/translate/topencollection/environment.go` +- `packages/server/pkg/translate/topencollection/auth.go` +- `packages/server/pkg/translate/topencollection/body.go` +- `packages/server/pkg/translate/topencollection/converter_test.go` +- `packages/server/pkg/translate/topencollection/testdata/` (sample collections) -**Dependencies**: Phase 1 (BRU parser), existing models (`mhttp`, `mfile`, `menv`) +**Dependencies**: `gopkg.in/yaml.v3` (already a dependency), existing models (`mhttp`, `mfile`, `menv`) -**Testing**: Convert sample Bruno collections, verify all entities +**Testing**: Parse sample OpenCollection directories, verify all entities -### Phase 3: CLI Import Command +### Phase 2: CLI Import Command **Scope**: Add `import bruno ` CLI command. **Files**: - `apps/cli/cmd/import.go` (add `importBrunoCmd`) -**Dependencies**: Phase 2 (converter), existing importer infrastructure +**Dependencies**: Phase 1 (converter), existing importer infrastructure **Testing**: End-to-end import into in-memory SQLite -### Phase 4: DevTools YAML Collection Format +### Phase 3: DevTools YAML Collection Format -**Scope**: Define and implement DevTools' own YAML format for individual request files. +**Scope**: Define and implement DevTools' own YAML format for individual request files with round-trip serialization. **Files**: - `packages/server/pkg/translate/tyamlcollection/types.go` @@ -939,7 +1174,7 @@ var runCollectionCmd = &cobra.Command{ **Testing**: Round-trip tests (parse → serialize → parse), verify equivalence -### Phase 5: File Watcher +### Phase 4: File Watcher **Scope**: `fsnotify`-based watcher with debouncing, filtering, self-write tracking. @@ -954,7 +1189,7 @@ var runCollectionCmd = &cobra.Command{ **Testing**: Create temp dirs, write files, verify events -### Phase 6: Sync Coordinator +### Phase 5: Sync Coordinator **Scope**: Bidirectional sync engine — disk↔database with eventstream integration. @@ -962,11 +1197,11 @@ var runCollectionCmd = &cobra.Command{ - `packages/server/pkg/foldersync/sync.go` - `packages/server/pkg/foldersync/autosaver.go` -**Dependencies**: Phase 4 (YAML format), Phase 5 (watcher), services, eventstream +**Dependencies**: Phase 3 (YAML format), Phase 4 (watcher), services, eventstream **Testing**: Full integration tests — modify files on disk, verify DB updates; modify DB, verify files written -### Phase 7: RPC Endpoints + Desktop Integration +### Phase 6: RPC Endpoints + Desktop Integration **Scope**: TypeSpec definitions for folder sync management, RPC handlers, desktop UI. @@ -976,33 +1211,32 @@ var runCollectionCmd = &cobra.Command{ - `packages/client/` (React hooks/services) - `apps/desktop/` (Electron integration — folder picker, sync status) -**Dependencies**: Phase 6 (sync coordinator) +**Dependencies**: Phase 5 (sync coordinator) -### Phase 8: CLI Collection Runner +### Phase 7: CLI Collection Runner **Scope**: Run requests/flows directly from collection folders. **Files**: - `apps/cli/cmd/run.go` (extend with `collection` subcommand) -**Dependencies**: Phase 4 (YAML format), existing runner +**Dependencies**: Phase 3 (YAML format), existing runner --- ## Phase Dependency Graph ``` -Phase 1: BRU Parser ──────────────┐ - ├──→ Phase 2: Bruno Converter ──→ Phase 3: CLI Import - │ -Phase 4: YAML Collection Format ──┼──→ Phase 8: CLI Collection Runner - │ -Phase 5: File Watcher ────────────┤ - │ - └──→ Phase 6: Sync Coordinator ──→ Phase 7: RPC + Desktop +Phase 1: OpenCollection Parser ──→ Phase 2: CLI Import + │ +Phase 3: DevTools YAML Format ──┬─────────┼──→ Phase 7: CLI Collection Runner + │ │ +Phase 4: File Watcher ──────────┤ │ + │ │ + └──→ Phase 5: Sync Coordinator ──→ Phase 6: RPC + Desktop ``` -Phases 1 and 4-5 can be developed in parallel (no dependencies between them). +Phases 1 and 3-4 can be developed **in parallel** (no dependencies between them). --- @@ -1010,29 +1244,28 @@ Phases 1 and 4-5 can be developed in parallel (no dependencies between them). | Dependency | Purpose | Phase | |---|---|---| -| `github.com/fsnotify/fsnotify` | Cross-platform file system notifications | 5 | -| `gopkg.in/yaml.v3` | YAML parsing (already in use by `yamlflowsimplev2`) | 4 | -| No new dependencies | BRU parser is hand-written | 1 | +| `gopkg.in/yaml.v3` | YAML parsing (already in use by `yamlflowsimplev2`) | 1, 3 | +| `github.com/fsnotify/fsnotify` | Cross-platform file system notifications | 4 | --- ## Key Design Decisions -### Why DevTools YAML instead of .bru format? +### Why OpenCollection YAML only (no .bru parser)? -1. **YAML is universal** — every developer knows it, every editor supports it -2. **No custom parser maintenance** — leverage `gopkg.in/yaml.v3` vs maintaining a PEG parser -3. **Extensible** — easy to add new fields as DevTools evolves -4. **Consistent with existing YAML flow format** — `yamlflowsimplev2` already uses YAML -5. **Better tooling** — YAML schema validation, IDE autocomplete via JSON Schema -6. **Import, don't adopt** — import Bruno collections but don't tie DevTools to Bruno's format +1. **Bruno's direction** — OpenCollection YAML is the recommended format going forward +2. **No custom parser** — standard `gopkg.in/yaml.v3` handles everything +3. **Forward-compatible** — .bru is being phased out +4. **Less code to maintain** — no hand-written PEG parser +5. **Better tooling** — YAML linting, schema validation, IDE support -### Why hand-written BRU parser instead of PEG library? +### Why a separate DevTools YAML format? -1. **Simple grammar** — only 3 block types, line-based parsing -2. **No external dependency** — keeps the import path dependency-free -3. **Better error messages** — hand-written parsers produce clearer diagnostics -4. **One-way** — we only need to parse `.bru` for import, not write it back +1. **Control** — we define the schema, we evolve it independently +2. **Simplicity** — OpenCollection has sections we don't use (`runtime.scripts`, `runtime.actions`) +3. **Flat structure** — our format puts `name`, `method`, `url` at top level (no `info`/`http` nesting) +4. **Consistency** — matches existing `yamlflowsimplev2` conventions +5. **Import, don't adopt** — import OpenCollection, sync in DevTools format ### Why fsnotify instead of polling? From aad4e340552ffe7c7b10f2d75ee3efeef1a049ae Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 15:43:47 +0000 Subject: [PATCH 03/20] =?UTF-8?q?docs:=20major=20plan=20update=20=E2=80=94?= =?UTF-8?q?=20Open=20YAML=20format,=20workspace=20sync=20modes,=20SQLite?= =?UTF-8?q?=20coexistence?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Key changes: - Rename "DevTools YAML" to "Open YAML" — includes flows (not just requests) - Add "Architecture: SQLite + Folder Sync Coexistence" section explaining how SQLite remains the runtime source of truth with folder sync as a bidirectional persistence mirror - Define 3 workspace sync modes: 1. No Sync (default, current behavior) 2. Sync to Folder (bidirectional Open YAML sync) 3. Import from Bruno (OpenCollection → Open YAML in separate folder) - Add workspace schema changes (sync_path, sync_format, sync_enabled) - Add SyncManager for server-level coordinator lifecycle management - Add Phase 3 (Workspace Schema + Migration) to implementation phases - Flow files delegate to existing yamlflowsimplev2 format https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../server/docs/specs/BRUNO_FOLDER_SYNC.md | 1242 +++++++---------- 1 file changed, 538 insertions(+), 704 deletions(-) diff --git a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md index 04cf3426..74818ea7 100644 --- a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md +++ b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md @@ -1,23 +1,20 @@ -# OpenCollection Import & YAML Folder Sync Plan +# OpenCollection Import & Open YAML Folder Sync Plan ## Overview This document describes the plan to add: -1. **OpenCollection YAML Import** — Parse Bruno's OpenCollection YAML collections (`.yml` format with `opencollection.yml` root) and convert them into DevTools' data model -2. **DevTools YAML Folder Sync** — Bidirectional filesystem sync using DevTools' own YAML format, inspired by Bruno's folder sync architecture -3. **CLI Runner Integration** — Execute imported/synced collections from the CLI +1. **OpenCollection YAML Import** — Parse Bruno's OpenCollection YAML collections and convert them into DevTools' Open YAML format in a separate folder +2. **Open YAML Folder Sync** — Bidirectional filesystem sync using DevTools' own "Open YAML" format (requests + flows), with SQLite as the runtime source of truth +3. **Workspace Sync Modes** — A workspace can be synced to a local folder, opened from a Bruno collection (auto-converted), or used without sync (current behavior) +4. **CLI Runner Integration** — Execute collections from Open YAML folders -The goal is to support Bruno users migrating to DevTools, while building a first-class filesystem-based workflow using DevTools' own YAML format for git-friendly, local-first API development. +### Key Concepts -### Why OpenCollection YAML only (no .bru)? - -Bruno is moving to the OpenCollection YAML format as the recommended standard going forward. The legacy `.bru` DSL format is being phased out. By focusing exclusively on the YAML format: - -- No custom parser needed — we use standard `gopkg.in/yaml.v3` (already a dependency) -- Forward-compatible with Bruno's direction -- Simpler codebase to maintain -- YAML tooling (linting, schema validation, IDE support) works out of the box +- **Open YAML** — DevTools' own YAML format for collections. Includes both HTTP requests and flows. One file per request, one file per flow, folder hierarchy maps to the file tree. +- **OpenCollection YAML** — Bruno's YAML format (`opencollection.yml` root). We import FROM this format. +- **SQLite** — Remains the runtime source of truth. Folder sync mirrors SQLite ↔ filesystem bidirectionally. +- **Workspace Sync Modes** — Each workspace can optionally be linked to a folder on disk. ### Sources @@ -25,8 +22,150 @@ Bruno is moving to the OpenCollection YAML format as the recommended standard go - [YAML Structure Reference](https://docs.usebruno.com/opencollection-yaml/structure-reference) - [YAML Samples](https://docs.usebruno.com/opencollection-yaml/samples) - [OpenCollection Spec](https://spec.opencollection.com/) -- [OpenCollection GitHub](https://github.com/opencollection-dev/opencollection) -- [RFC Discussion](https://github.com/usebruno/bruno/discussions/6634) + +--- + +## Architecture: SQLite + Folder Sync Coexistence + +### Current State + +``` +Desktop App (React UI) + ↕ Connect RPC over Unix Socket +Go Server + ↕ Reader/Writer services +Single SQLite DB (state.db in userData) + └── All workspaces, requests, flows, environments in one DB +``` + +- One `state.db` file contains ALL workspaces +- Workspace model: `ID, Name, Updated, Order, ActiveEnv, GlobalEnv` — **no path/folder field** +- All data lives exclusively in SQLite +- Real-time UI sync via in-memory eventstream + +### Proposed State: SQLite + Folder Sync + +``` +Desktop App (React UI) + ↕ Connect RPC over Unix Socket +Go Server + ↕ Reader/Writer services + ├── SQLite DB (state.db) ←── RUNTIME SOURCE OF TRUTH + │ └── workspace.sync_path = "/path/to/my-collection" + │ + └── SyncCoordinator (per synced workspace) + ↕ bidirectional + Open YAML Folder ←── GIT-FRIENDLY PERSISTENCE + /path/to/my-collection/ + ├── devtools.yaml + ├── requests/ + └── flows/ +``` + +### Design Principle: SQLite is King + +SQLite remains the **single source of truth at runtime**. The folder sync is a **persistence mirror**: + +| Direction | Trigger | Behavior | +|-----------|---------|----------| +| **Folder → SQLite** | File watcher detects change | Parse YAML → upsert into SQLite → eventstream → UI updates | +| **SQLite → Folder** | Eventstream publishes change | Serialize from SQLite → write YAML to disk | +| **Initial load** | Workspace opened with `sync_path` | Read entire folder → populate SQLite (folder wins on first load) | +| **Conflict** | Both change simultaneously | Disk wins (matching Bruno behavior). Last-write-wins with debounce. | + +### Why SQLite Stays as Source of Truth + +1. **Performance** — SQLite queries are instant; parsing YAML on every read would be slow +2. **Transactions** — Atomic multi-entity updates (e.g., creating a request + headers + body in one tx) +3. **Indexing** — Fast lookups by ID, workspace, folder +4. **Existing code** — All services, RPC handlers, runner already work with SQLite +5. **Offline** — No filesystem dependency for core functionality +6. **Real-time sync** — Eventstream already works with SQLite changes + +### Workspace Schema Change + +Add `sync_path` and `sync_format` to the workspace model: + +```sql +-- New columns on workspaces table +ALTER TABLE workspaces ADD COLUMN sync_path TEXT; -- NULL = no sync +ALTER TABLE workspaces ADD COLUMN sync_format TEXT; -- "open_yaml" | "opencollection" +ALTER TABLE workspaces ADD COLUMN sync_enabled BOOLEAN NOT NULL DEFAULT 0; +``` + +```go +// Updated workspace model +type Workspace struct { + ID idwrap.IDWrap + Name string + Updated time.Time + Order float64 + ActiveEnv idwrap.IDWrap + GlobalEnv idwrap.IDWrap + FlowCount int32 + CollectionCount int32 + // NEW: Folder sync fields + SyncPath *string // nil = no sync, else absolute path to folder + SyncFormat *string // "open_yaml" (our format) or nil + SyncEnabled bool // Whether sync is currently active +} +``` + +--- + +## Workspace Sync Modes + +### Mode 1: No Sync (Default — Current Behavior) + +``` +User creates workspace → data lives only in SQLite +SyncPath = nil, SyncEnabled = false +``` + +Nothing changes from the current behavior. Workspaces work exactly as they do today. + +### Mode 2: Sync to Folder (Open YAML) + +``` +User creates workspace → links to a folder → bidirectional sync +SyncPath = "/Users/dev/my-api-collection", SyncFormat = "open_yaml", SyncEnabled = true +``` + +**Two sub-scenarios:** + +**A) New sync — empty folder:** +1. User creates workspace in DevTools +2. User clicks "Sync to Folder" → picks/creates an empty directory +3. Server sets `sync_path` on the workspace +4. SyncCoordinator starts → exports all existing SQLite data to Open YAML files in the folder +5. File watcher starts → bidirectional sync is live + +**B) New sync — existing Open YAML folder:** +1. User clicks "Open Folder" → picks a directory with `devtools.yaml` +2. Server creates a new workspace with `sync_path` set +3. SyncCoordinator starts → reads entire folder → populates SQLite +4. File watcher starts → bidirectional sync is live + +### Mode 3: Import from Bruno (OpenCollection → Open YAML) + +``` +User opens Bruno collection → DevTools converts to Open YAML in a NEW folder → syncs there +SyncPath = "/Users/dev/my-api-devtools/", SyncFormat = "open_yaml", SyncEnabled = true +``` + +**Flow:** +1. User clicks "Import Bruno Collection" → picks directory with `opencollection.yml` +2. Server parses the OpenCollection YAML directory +3. Server creates a new workspace and populates SQLite with the converted data +4. Server creates a NEW folder (e.g., next to the Bruno folder, or user picks location) +5. SyncCoordinator exports SQLite data to Open YAML format in the new folder +6. File watcher starts → bidirectional sync is live on the NEW folder +7. Original Bruno folder is NOT modified + +**Why a separate folder?** The Bruno folder uses OpenCollection YAML format (different schema). We don't want to: +- Corrupt the Bruno collection +- Mix two different YAML formats in one folder +- Create confusion about which tool owns the folder --- @@ -34,19 +173,18 @@ Bruno is moving to the OpenCollection YAML format as the recommended standard go ### 1.1 Architecture Fit -OpenCollection import follows the exact same pattern as existing HAR, Postman, and curl importers: - ``` -OpenCollection .yml files on disk +OpenCollection .yml directory → topencollection.ConvertOpenCollection() - → OpenCollectionResolved (mhttp.HTTP, mfile.File, etc.) - → importer.RunImport() + services.Create() - → SQLite DB + → OpenCollectionResolved (mhttp.HTTP, mfile.File, mflow.Flow, etc.) + → SQLite (workspace created + populated) + → SyncCoordinator exports to Open YAML folder ``` | Layer | Location | Pattern | |-------|----------|---------| | CLI Command | `apps/cli/cmd/import.go` | Add `importBrunoCmd` | +| RPC Endpoint | `packages/server/internal/api/` | "Import Bruno Collection" | | Translator | `packages/server/pkg/translate/topencollection/` | New package | | Importer | `apps/cli/internal/importer/` | Existing `RunImport()` callback | @@ -55,7 +193,7 @@ OpenCollection .yml files on disk #### Directory Structure ``` -my-collection/ +my-bruno-collection/ ├── opencollection.yml # Collection root config ├── environments/ │ └── development.yml @@ -83,217 +221,94 @@ info: #### Request File Structure -Each `.yml` request file has these top-level sections: - ```yaml info: name: Create User - type: http # http | graphql | grpc | ws + type: http seq: 5 - tags: - - smoke - - regression + tags: [smoke, regression] http: - method: POST # GET|POST|PUT|PATCH|DELETE|OPTIONS|HEAD|TRACE|CONNECT + method: POST url: https://api.example.com/users headers: - name: Content-Type value: application/json - name: Authorization value: "Bearer {{token}}" - disabled: true # Optional, marks header as disabled + disabled: true params: - name: filter value: active - type: query # query | path + type: query - name: id value: "123" type: path body: - type: json # json | xml | text | form-urlencoded | multipart-form | graphql | none + type: json data: |- { "name": "John Doe", "email": "john@example.com" } - auth: # none | inherit | basic | bearer | apikey | digest | oauth2 | awsv4 | ntlm + auth: type: bearer token: "{{token}}" runtime: - scripts: - - type: before-request - code: |- - const timestamp = Date.now(); - bru.setVar("timestamp", timestamp); - - type: after-response - code: |- - console.log(res.status); - - type: tests - code: |- - test("should return 201", function() { - expect(res.status).to.equal(201); - }); assertions: - expression: res.status operator: eq value: "201" - - expression: res.body.name - operator: isString - actions: - - type: set-variable - phase: after-response - selector: - expression: res.body.token - method: jsonPath - variable: - name: auth_token - scope: collection settings: encodeUrl: true - timeout: 0 - followRedirects: true - maxRedirects: 5 docs: |- - # Create User - Creates a new user account in the system. -``` - -#### Auth Variants - -```yaml -# No auth -auth: none - -# Inherit from parent -auth: inherit - -# Bearer token -auth: - type: bearer - token: "{{token}}" - -# Basic auth -auth: - type: basic - username: admin - password: secret - -# API Key -auth: - type: apikey - key: x-api-key - value: "{{api-key}}" - placement: header # header | query -``` - -#### Body Variants - -```yaml -# JSON body -body: - type: json - data: |- - {"key": "value"} - -# XML body -body: - type: xml - data: |- - value - -# Text body -body: - type: text - data: "plain text content" - -# Form URL-encoded -body: - type: form-urlencoded - data: - - name: username - value: johndoe - - name: password - value: secret123 - -# Multipart form data -body: - type: multipart-form - data: - - name: file - value: "@/path/to/file.pdf" - contentType: application/pdf - - name: description - value: "My file" + Creates a new user account. ``` -#### Environment File (`environments/dev.yml`) - -```yaml -name: development -variables: - - name: api_url - value: http://localhost:3000 - enabled: true - secret: false - type: text -``` - -#### Folder Config (`folder.yml`) - -Contains folder-level metadata, auth, headers, and scripts that apply to all requests in the folder. - ### 1.3 Go Types for OpenCollection Parsing ```go package topencollection // --- Collection Root --- - type OpenCollectionRoot struct { - OpenCollection string `yaml:"opencollection"` - Info OpenCollectionInfo `yaml:"info"` + OpenCollection string `yaml:"opencollection"` + Info OpenCollectionInfo `yaml:"info"` } type OpenCollectionInfo struct { - Name string `yaml:"name"` - Summary string `yaml:"summary,omitempty"` - Version string `yaml:"version,omitempty"` - Authors []OpenCollectionAuthor `yaml:"authors,omitempty"` -} - -type OpenCollectionAuthor struct { - Name string `yaml:"name"` - Email string `yaml:"email,omitempty"` + Name string `yaml:"name"` + Summary string `yaml:"summary,omitempty"` + Version string `yaml:"version,omitempty"` + Authors []OpenCollectionAuthor `yaml:"authors,omitempty"` } // --- Request File --- - type OCRequest struct { - Info OCRequestInfo `yaml:"info"` - HTTP *OCHTTPBlock `yaml:"http,omitempty"` - Runtime *OCRuntime `yaml:"runtime,omitempty"` - Settings *OCSettings `yaml:"settings,omitempty"` - Docs string `yaml:"docs,omitempty"` + Info OCRequestInfo `yaml:"info"` + HTTP *OCHTTPBlock `yaml:"http,omitempty"` + Runtime *OCRuntime `yaml:"runtime,omitempty"` + Settings *OCSettings `yaml:"settings,omitempty"` + Docs string `yaml:"docs,omitempty"` } type OCRequestInfo struct { Name string `yaml:"name"` - Type string `yaml:"type"` // "http", "graphql", "grpc", "ws" + Type string `yaml:"type"` Seq int `yaml:"seq,omitempty"` Tags []string `yaml:"tags,omitempty"` } type OCHTTPBlock struct { - Method string `yaml:"method"` - URL string `yaml:"url"` - Headers []OCHeader `yaml:"headers,omitempty"` - Params []OCParam `yaml:"params,omitempty"` - Body *OCBody `yaml:"body,omitempty"` - Auth *OCAuth `yaml:"auth,omitempty"` // Can also be string "inherit"/"none" + Method string `yaml:"method"` + URL string `yaml:"url"` + Headers []OCHeader `yaml:"headers,omitempty"` + Params []OCParam `yaml:"params,omitempty"` + Body *OCBody `yaml:"body,omitempty"` + Auth *OCAuth `yaml:"auth,omitempty"` } type OCHeader struct { @@ -305,70 +320,44 @@ type OCHeader struct { type OCParam struct { Name string `yaml:"name"` Value string `yaml:"value"` - Type string `yaml:"type"` // "query" | "path" + Type string `yaml:"type"` Disabled bool `yaml:"disabled,omitempty"` } type OCBody struct { - Type string `yaml:"type"` // "json"|"xml"|"text"|"form-urlencoded"|"multipart-form"|"graphql"|"none" - Data interface{} `yaml:"data"` // string for raw types, []OCFormField for form types + Type string `yaml:"type"` // json|xml|text|form-urlencoded|multipart-form|graphql|none + Data interface{} `yaml:"data"` // string for raw, []OCFormField for forms } type OCFormField struct { Name string `yaml:"name"` Value string `yaml:"value"` Disabled bool `yaml:"disabled,omitempty"` - ContentType string `yaml:"contentType,omitempty"` // For multipart file uploads + ContentType string `yaml:"contentType,omitempty"` } type OCAuth struct { - Type string `yaml:"type"` // "none"|"inherit"|"basic"|"bearer"|"apikey"|... - Token string `yaml:"token,omitempty"` // For bearer - Username string `yaml:"username,omitempty"` // For basic - Password string `yaml:"password,omitempty"` // For basic - Key string `yaml:"key,omitempty"` // For apikey - Value string `yaml:"value,omitempty"` // For apikey - Placement string `yaml:"placement,omitempty"` // For apikey: "header"|"query" + Type string `yaml:"type"` // none|inherit|basic|bearer|apikey|... + Token string `yaml:"token,omitempty"` // bearer + Username string `yaml:"username,omitempty"` // basic + Password string `yaml:"password,omitempty"` // basic + Key string `yaml:"key,omitempty"` // apikey + Value string `yaml:"value,omitempty"` // apikey + Placement string `yaml:"placement,omitempty"` // apikey: header|query } -// --- Runtime --- - type OCRuntime struct { Scripts []OCScript `yaml:"scripts,omitempty"` Assertions []OCAssertion `yaml:"assertions,omitempty"` Actions []OCAction `yaml:"actions,omitempty"` } -type OCScript struct { - Type string `yaml:"type"` // "before-request"|"after-response"|"tests" - Code string `yaml:"code"` -} - type OCAssertion struct { Expression string `yaml:"expression"` Operator string `yaml:"operator"` Value string `yaml:"value,omitempty"` } -type OCAction struct { - Type string `yaml:"type"` // "set-variable" - Phase string `yaml:"phase"` // "after-response" - Selector OCSelector `yaml:"selector"` - Variable OCVariable `yaml:"variable"` -} - -type OCSelector struct { - Expression string `yaml:"expression"` - Method string `yaml:"method"` -} - -type OCVariable struct { - Name string `yaml:"name"` - Scope string `yaml:"scope"` -} - -// --- Settings --- - type OCSettings struct { EncodeUrl *bool `yaml:"encodeUrl,omitempty"` Timeout *int `yaml:"timeout,omitempty"` @@ -376,8 +365,6 @@ type OCSettings struct { MaxRedirects *int `yaml:"maxRedirects,omitempty"` } -// --- Environment --- - type OCEnvironment struct { Name string `yaml:"name"` Variables []OCEnvVariable `yaml:"variables"` @@ -388,19 +375,13 @@ type OCEnvVariable struct { Value string `yaml:"value"` Enabled *bool `yaml:"enabled,omitempty"` Secret *bool `yaml:"secret,omitempty"` - Type string `yaml:"type,omitempty"` // "text" } ``` -### 1.4 Converter (`topencollection/converter.go`) +### 1.4 Converter ```go -package topencollection - -// OpenCollectionResolved contains all entities extracted from an OpenCollection, -// following the same pattern as tpostmanv2.PostmanResolvedV2 and harv2.HARResolved. type OpenCollectionResolved struct { - // HTTP entities HTTPRequests []mhttp.HTTP HTTPHeaders []mhttp.HTTPHeader HTTPSearchParams []mhttp.HTTPSearchParam @@ -408,90 +389,48 @@ type OpenCollectionResolved struct { HTTPBodyUrlencoded []mhttp.HTTPBodyUrlencoded HTTPBodyRaw []mhttp.HTTPBodyRaw HTTPAsserts []mhttp.HTTPAssert - - // File hierarchy (folders + request files) - Files []mfile.File - - // Environments - Environments []menv.Env - EnvironmentVars []menv.Variable - - // Flow (optional — one flow with sequential request nodes) - Flow *mflow.Flow - FlowNodes []mflow.Node - RequestNodes []mflow.NodeRequest - FlowEdges []mflow.Edge + Files []mfile.File + Environments []menv.Env + EnvironmentVars []menv.Variable } -type ConvertOptions struct { - WorkspaceID idwrap.IDWrap - FolderID *idwrap.IDWrap // Optional parent folder to import into - CollectionName string // Override collection name - CreateFlow bool // Whether to generate a flow from the collection - GenerateFiles bool // Whether to create File entries for hierarchy -} - -// ConvertOpenCollection reads an OpenCollection YAML directory and returns resolved entities. func ConvertOpenCollection(collectionPath string, opts ConvertOptions) (*OpenCollectionResolved, error) ``` -#### Conversion Logic - -1. **Detect format**: Check for `opencollection.yml` in the directory root -2. **Parse root config**: Read collection name, version from `opencollection.yml` -3. **Walk directory tree** (depth-first): - - Skip ignored paths (`node_modules`, `.git`, dotenv files) - - For each directory: create `mfile.File` with `ContentTypeFolder` - - For each `.yml` file (not `opencollection.yml`, not `folder.yml`, not in `environments/`): - - Parse with `yaml.Unmarshal()` into `OCRequest` - - Convert to `mhttp.HTTP` + child entities - - For `folder.yml`: extract folder metadata - - For `environments/*.yml`: convert to `menv.Env` + `menv.Variable` -4. **Map to DevTools models** (see mapping table) -5. **Generate flow** (optional): Create a linear flow with request nodes ordered by `seq` - -#### Mapping Table: OpenCollection YAML → DevTools +#### Mapping Table: OpenCollection → DevTools | OpenCollection YAML | DevTools Model | Notes | |---|---|---| | `info.name` | `mhttp.HTTP.Name` | | | `info.seq` | `mfile.File.Order` | Float64 ordering | -| `info.type` | Determines request type | "http" only for now | | `http.method` | `mhttp.HTTP.Method` | Uppercase | | `http.url` | `mhttp.HTTP.Url` | | -| `http.headers` | `[]mhttp.HTTPHeader` | `disabled: true` → `Enabled: false` | -| `http.params` (type=query) | `[]mhttp.HTTPSearchParam` | Filter by `type: query` | -| `http.params` (type=path) | Embedded in URL | DevTools uses URL template vars | -| `http.body.type: json` | `mhttp.HTTPBodyRaw`, `BodyKind: Raw` | | -| `http.body.type: xml` | `mhttp.HTTPBodyRaw`, `BodyKind: Raw` | | -| `http.body.type: text` | `mhttp.HTTPBodyRaw`, `BodyKind: Raw` | | +| `http.headers` | `[]mhttp.HTTPHeader` | `disabled` → `Enabled: false` | +| `http.params` (query) | `[]mhttp.HTTPSearchParam` | | +| `http.body.type: json/xml/text` | `mhttp.HTTPBodyRaw` | `BodyKind: Raw` | | `http.body.type: form-urlencoded` | `[]mhttp.HTTPBodyUrlencoded` | | | `http.body.type: multipart-form` | `[]mhttp.HTTPBodyForm` | | | `http.auth.type: bearer` | `mhttp.HTTPHeader` | → `Authorization: Bearer ` | -| `http.auth.type: basic` | `mhttp.HTTPHeader` | → `Authorization: Basic ` | -| `http.auth.type: apikey` | `mhttp.HTTPHeader` or `mhttp.HTTPSearchParam` | Based on `placement` | -| `runtime.assertions` | `[]mhttp.HTTPAssert` | Convert `expr operator value` format | -| `runtime.scripts` | Not imported (log warning) | DevTools uses JS nodes in flows | -| `runtime.actions` | Not imported (log warning) | DevTools uses flow variables | +| `http.auth.type: basic` | `mhttp.HTTPHeader` | → `Authorization: Basic ` | +| `http.auth.type: apikey` | Header or SearchParam | Based on `placement` | +| `runtime.assertions` | `[]mhttp.HTTPAssert` | `expr operator value` format | +| `runtime.scripts` | Not imported (log warning) | DevTools uses JS flow nodes | | `docs` | `mhttp.HTTP.Description` | | -| `settings` | Not imported (log info) | DevTools has own request settings | -| Directory structure | `mfile.File` hierarchy | Folder nesting preserved | -| `folder.yml` | Folder metadata | Name used for folder | +| Directory structure | `mfile.File` hierarchy | Nesting preserved | | `environments/*.yml` | `menv.Env` + `menv.Variable` | | -| `opencollection.yml` | Collection config | Name used as root folder name | ### 1.5 Package Structure ``` packages/server/pkg/translate/topencollection/ -├── types.go # OCRequest, OCHTTPBlock, etc. (YAML struct definitions) -├── converter.go # Main conversion: directory → OpenCollectionResolved +├── types.go # YAML struct definitions +├── converter.go # Directory → OpenCollectionResolved ├── converter_test.go # Tests with sample collections ├── collection.go # opencollection.yml parsing -├── environment.go # Environment .yml → menv conversion -├── auth.go # Auth type → header/param conversion +├── environment.go # Environment conversion +├── auth.go # Auth → header/param conversion ├── body.go # Body type → mhttp body conversion -└── testdata/ # Sample OpenCollection directories for tests +└── testdata/ └── basic-collection/ ├── opencollection.yml ├── environments/ @@ -502,105 +441,59 @@ packages/server/pkg/translate/topencollection/ │ ├── get-users.yml │ └── create-user.yml └── auth/ - ├── folder.yml └── login.yml ``` -### 1.6 CLI Command - -Add `import bruno ` to the existing import command tree: - -```go -// apps/cli/cmd/import.go — add alongside importCurlCmd, importPostmanCmd, importHarCmd - -var importBrunoCmd = &cobra.Command{ - Use: "bruno [directory]", - Short: "Import a Bruno OpenCollection YAML directory", - Args: cobra.ExactArgs(1), - RunE: func(cmd *cobra.Command, args []string) error { - collectionPath := args[0] - return importer.RunImport(ctx, logger, workspaceID, folderID, - func(ctx context.Context, services *common.Services, wsID idwrap.IDWrap, folderIDPtr *idwrap.IDWrap) error { - resolved, err := topencollection.ConvertOpenCollection(collectionPath, topencollection.ConvertOptions{ - WorkspaceID: wsID, - FolderID: folderIDPtr, - GenerateFiles: true, - CreateFlow: false, - }) - if err != nil { - return err - } - // Save all resolved entities via services (same pattern as Postman/HAR) - return saveResolved(ctx, services, resolved) - }, - ) - }, -} -``` - -### 1.7 Testing Strategy - -- **YAML parsing tests**: Parse sample `.yml` request files, verify struct output -- **Converter tests**: Use `testdata/` sample collections → verify `OpenCollectionResolved` output -- **Auth conversion tests**: Test all auth types (bearer, basic, apikey) → correct headers/params -- **Body conversion tests**: Test all body types (json, xml, form-urlencoded, multipart-form) -- **Integration test**: Import sample collection into in-memory SQLite → verify all entities created - --- -## Part 2: DevTools YAML Folder Sync - -This is the core feature — a bidirectional filesystem sync that uses DevTools' own YAML format, inspired by Bruno's chokidar-based sync but built for Go. +## Part 2: Open YAML Format (DevTools' Own Format) ### 2.1 Design Goals -1. **Git-friendly**: YAML files that diff and merge cleanly -2. **Human-readable**: Developers can edit requests in their editor/IDE -3. **Bidirectional**: Changes in the UI persist to disk; changes on disk reflect in the UI -4. **Compatible**: Similar UX to Bruno's folder sync (users understand the concept) -5. **DevTools-native**: Uses DevTools' own YAML format -6. **OpenCollection-compatible import**: Can import from OpenCollection and sync in DevTools format +- **Includes flows** — not just HTTP requests, but full flow definitions +- **One file per entity** — each request and flow is its own `.yaml` file +- **Git-friendly** — clean diffs, merge-friendly structure +- **Human-editable** — developers can edit in any text editor or IDE +- **Flat top-level** — `name`, `method`, `url` at root (no `info`/`http` nesting like OpenCollection) +- **Compatible with existing `yamlflowsimplev2`** — flow files use the same format -### 2.2 DevTools Collection YAML Format - -Simplified for individual request files. Intentionally similar to OpenCollection for easy migration, but with DevTools-specific conventions. - -#### Directory Structure +### 2.2 Directory Structure ``` my-collection/ -├── devtools.yaml # Collection config (name, version, settings) +├── devtools.yaml # Collection config ├── environments/ │ ├── dev.yaml │ └── prod.yaml -├── users/ -│ ├── _folder.yaml # Folder metadata (optional) -│ ├── get-users.yaml # Individual request -│ └── create-user.yaml +├── users/ # Folder = directory +│ ├── _folder.yaml # Optional folder metadata +│ ├── get-users.yaml # HTTP request +│ └── create-user.yaml # HTTP request ├── auth/ │ ├── _folder.yaml │ └── login.yaml -└── flows/ # Optional: flow definitions - └── smoke-test.yaml # Uses yamlflowsimplev2 format +└── flows/ # Flow definitions + ├── smoke-test.yaml # Flow (yamlflowsimplev2 format) + └── ci-regression.yaml ``` -#### Collection Config (`devtools.yaml`) +### 2.3 Collection Config (`devtools.yaml`) ```yaml version: "1" name: My API Collection -settings: - base_url: "https://api.example.com" - timeout: 30 ``` -#### Request File (`get-users.yaml`) +This file identifies the directory as a DevTools Open YAML collection. Its presence is how we detect the format (analogous to `opencollection.yml` for Bruno). + +### 2.4 Request File Format ```yaml name: Get Users method: GET url: "{{base_url}}/users" description: "Fetch all users with optional pagination" +order: 1 headers: - name: Authorization @@ -622,12 +515,11 @@ body: type: none # none | raw | form-data | urlencoded ``` -#### Request with JSON body (`create-user.yaml`) - ```yaml name: Create User method: POST url: "{{base_url}}/users" +order: 2 headers: - name: Content-Type @@ -646,12 +538,11 @@ assertions: - "res.body.id neq null" ``` -#### Request with form body (`upload.yaml`) - ```yaml name: Upload File method: POST url: "{{base_url}}/upload" +order: 3 body: type: form-data @@ -663,37 +554,18 @@ body: value: "Test upload" ``` -#### Environment (`environments/dev.yaml`) +### 2.5 Flow File Format -```yaml -name: Development -variables: - - name: base_url - value: "http://localhost:3000" - - name: token - value: "dev-token-123" - secret: true -``` - -#### Folder metadata (`_folder.yaml`) - -```yaml -name: Users API -order: 1 -description: "User management endpoints" - -# Folder-level defaults (applied to all requests in this folder) -defaults: - headers: - - name: X-Api-Version - value: "v2" -``` - -#### Flow file (`flows/smoke-test.yaml`) +Flows use the existing `yamlflowsimplev2` format — this is already implemented and working: ```yaml -# Uses existing yamlflowsimplev2 format exactly +# flows/smoke-test.yaml name: Smoke Test +variables: + - name: auth_token + type: string + default: "" + steps: - request: name: Login @@ -702,6 +574,7 @@ steps: body: type: raw content: '{"email": "test@example.com", "password": "test"}' + - request: name: Get Profile depends_on: [Login] @@ -709,51 +582,56 @@ steps: url: "{{base_url}}/users/me" headers: Authorization: "Bearer {{Login.response.body.token}}" -``` -### 2.3 YAML Serializer/Deserializer (`tyamlcollection/`) + - js: + name: Validate Response + depends_on: [Get Profile] + code: | + if (response.status !== 200) throw new Error("Failed"); +``` -New package for the collection YAML format: +### 2.6 Environment File +```yaml +name: Development +variables: + - name: base_url + value: "http://localhost:3000" + - name: token + value: "dev-token-123" + secret: true ``` -packages/server/pkg/translate/tyamlcollection/ -├── types.go # YAML struct definitions with yaml tags -├── parser.go # YAML file → DevTools models -├── serializer.go # DevTools models → YAML files -├── collection.go # devtools.yaml config handling -├── request.go # Single request YAML ↔ mhttp conversion -├── environment.go # Environment YAML ↔ menv conversion -├── folder.go # _folder.yaml ↔ folder metadata -└── parser_test.go # Round-trip tests + +### 2.7 Folder Metadata (`_folder.yaml`) + +```yaml +name: Users API +order: 1 +description: "User management endpoints" ``` -#### Core Types +### 2.8 Open YAML Go Types ```go -package tyamlcollection +package openyaml // CollectionConfig represents devtools.yaml type CollectionConfig struct { - Version string `yaml:"version"` - Name string `yaml:"name"` - Settings *CollectionSettings `yaml:"settings,omitempty"` -} - -type CollectionSettings struct { - BaseURL string `yaml:"base_url,omitempty"` - Timeout int `yaml:"timeout,omitempty"` + Version string `yaml:"version"` + Name string `yaml:"name"` } -// RequestFile represents a single .yaml request file +// RequestFile represents a single request .yaml file type RequestFile struct { Name string `yaml:"name"` Method string `yaml:"method"` URL string `yaml:"url"` Description string `yaml:"description,omitempty"` + Order float64 `yaml:"order,omitempty"` Headers []HeaderEntry `yaml:"headers,omitempty"` QueryParams []HeaderEntry `yaml:"query_params,omitempty"` Body *BodyDef `yaml:"body,omitempty"` - Assertions []AssertionEntry `yaml:"assertions,omitempty"` + Assertions AssertionList `yaml:"assertions,omitempty"` } type HeaderEntry struct { @@ -764,19 +642,22 @@ type HeaderEntry struct { } type BodyDef struct { - Type string `yaml:"type"` // "none", "raw", "form-data", "urlencoded" + Type string `yaml:"type"` // none|raw|form-data|urlencoded Content string `yaml:"content,omitempty"` // For raw bodies Fields []HeaderEntry `yaml:"fields,omitempty"` // For form-data / urlencoded } +// AssertionList supports both string shorthand and structured form +// - "res.status eq 200" +// - {value: "res.status eq 200", enabled: false, description: "..."} +type AssertionList []AssertionEntry + type AssertionEntry struct { Value string `yaml:"value,omitempty"` Enabled *bool `yaml:"enabled,omitempty"` Description string `yaml:"description,omitempty"` } -// Custom unmarshaler handles both "res.status eq 200" and {value: ..., enabled: false} - type EnvironmentFile struct { Name string `yaml:"name"` Variables []EnvVariable `yaml:"variables"` @@ -789,60 +670,71 @@ type EnvVariable struct { } type FolderMeta struct { - Name string `yaml:"name,omitempty"` - Order int `yaml:"order,omitempty"` - Description string `yaml:"description,omitempty"` - Defaults *FolderDefaults `yaml:"defaults,omitempty"` + Name string `yaml:"name,omitempty"` + Order float64 `yaml:"order,omitempty"` + Description string `yaml:"description,omitempty"` } +``` -type FolderDefaults struct { - Headers []HeaderEntry `yaml:"headers,omitempty"` -} +### 2.9 Package Structure + +``` +packages/server/pkg/translate/openyaml/ +├── types.go # YAML struct definitions +├── parser.go # Read collection directory → DevTools models +├── serializer.go # DevTools models → YAML files on disk +├── request.go # Single request YAML ↔ mhttp conversion +├── flow.go # Delegates to yamlflowsimplev2 for flow parsing +├── environment.go # Environment YAML ↔ menv conversion +├── folder.go # _folder.yaml handling +├── collection.go # devtools.yaml config +└── parser_test.go # Round-trip tests ``` -#### Conversion Functions +### 2.10 Conversion Functions ```go -// ParseRequestFile reads a .yaml request file and returns DevTools model entities. -func ParseRequestFile(data []byte, opts ParseOptions) (*RequestResolved, error) +// ReadCollection reads an Open YAML directory into DevTools models. +func ReadCollection(collectionPath string, opts ReadOptions) (*ioworkspace.WorkspaceBundle, error) -// SerializeRequest converts DevTools model entities to YAML bytes for a single request. -func SerializeRequest(http mhttp.HTTP, headers []mhttp.HTTPHeader, - params []mhttp.HTTPSearchParam, bodyRaw *mhttp.HTTPBodyRaw, - bodyForms []mhttp.HTTPBodyForm, bodyUrlencoded []mhttp.HTTPBodyUrlencoded, - asserts []mhttp.HTTPAssert) ([]byte, error) +// WriteCollection exports a workspace bundle to an Open YAML directory. +func WriteCollection(collectionPath string, bundle *ioworkspace.WorkspaceBundle) error -// ReadCollection reads a full collection directory into DevTools models. -func ReadCollection(collectionPath string, opts ConvertOptions) (*CollectionResolved, error) +// ReadRequest parses a single request YAML file. +func ReadRequest(data []byte) (*RequestFile, error) -// WriteCollection writes a full workspace/collection to disk as YAML files. -func WriteCollection(collectionPath string, bundle *ioworkspace.WorkspaceBundle) error +// WriteRequest serializes a single request to YAML. +func WriteRequest(http mhttp.HTTP, headers []mhttp.HTTPHeader, + params []mhttp.HTTPSearchParam, body interface{}, + asserts []mhttp.HTTPAssert) ([]byte, error) -// WriteRequest writes a single request to a YAML file on disk. -func WriteRequest(filePath string, http mhttp.HTTP, children RequestChildren) error +// ReadFlow parses a single flow YAML file (delegates to yamlflowsimplev2). +func ReadFlow(data []byte, opts FlowReadOptions) (*FlowResolved, error) + +// WriteFlow serializes a single flow to YAML (delegates to yamlflowsimplev2 exporter). +func WriteFlow(flow FlowBundle) ([]byte, error) ``` -### 2.4 File Watcher (`packages/server/pkg/foldersync/`) +--- + +## Part 3: Folder Sync Engine -Replaces Bruno's chokidar with Go's `fsnotify`. This is a new package in the server. +### 3.1 File Watcher (`packages/server/pkg/foldersync/`) ``` packages/server/pkg/foldersync/ -├── watcher.go # Core CollectionWatcher using fsnotify -├── debouncer.go # Write stabilization (coalesce rapid events) -├── filter.go # Ignore patterns (.git, node_modules, etc.) -├── sync.go # Bidirectional sync coordinator -├── types.go # Event types, config -└── watcher_test.go # Integration tests with temp directories +├── watcher.go # fsnotify-based CollectionWatcher +├── debouncer.go # Write stabilization (80ms coalescing) +├── filter.go # Ignore .git, node_modules, non-.yaml +├── selftrack.go # Self-write tracker (prevent infinite loops) +├── sync.go # SyncCoordinator (bidirectional orchestrator) +├── types.go # Event types, SyncOptions +└── watcher_test.go # Integration tests ``` -#### Watcher +### 3.2 Watcher ```go -package foldersync - -import "github.com/fsnotify/fsnotify" - type EventType int const ( EventFileCreated EventType = iota @@ -858,133 +750,125 @@ type WatchEvent struct { RelPath string // Relative to collection root } -// CollectionWatcher watches a collection directory for filesystem changes. type CollectionWatcher struct { collectionPath string - ignorePatterns []string watcher *fsnotify.Watcher - events chan WatchEvent debouncer *Debouncer - selfWrites *SelfWriteTracker // Suppress events from our own writes + selfWrites *SelfWriteTracker + events chan WatchEvent } -func NewCollectionWatcher(collectionPath string, opts WatcherOptions) (*CollectionWatcher, error) +func NewCollectionWatcher(path string, opts WatcherOptions) (*CollectionWatcher, error) func (w *CollectionWatcher) Start(ctx context.Context) error func (w *CollectionWatcher) Events() <-chan WatchEvent func (w *CollectionWatcher) Stop() error ``` -#### Debouncer +### 3.3 Debouncer (80ms stabilization) ```go -// Debouncer coalesces rapid filesystem events for the same path. -// fsnotify fires multiple events for a single write operation. -// We wait for stabilityThreshold (80ms) of no events before emitting. type Debouncer struct { - stabilityThreshold time.Duration // 80ms (matching Bruno) - timers map[string]*time.Timer - mu sync.Mutex - output chan WatchEvent + threshold time.Duration // 80ms + timers map[string]*time.Timer + mu sync.Mutex + output chan WatchEvent } - -func NewDebouncer(threshold time.Duration) *Debouncer -func (d *Debouncer) Add(event WatchEvent) -func (d *Debouncer) Events() <-chan WatchEvent ``` -#### Self-Write Tracker +### 3.4 Self-Write Tracker ```go -// SelfWriteTracker prevents infinite loops when the sync engine writes a file -// and the watcher detects the change. type SelfWriteTracker struct { mu sync.Mutex - writes map[string]time.Time // path → write timestamp - lifetime time.Duration // How long to suppress (e.g., 2s) + writes map[string]time.Time + lifetime time.Duration // 2s suppression window } func (t *SelfWriteTracker) MarkWrite(path string) func (t *SelfWriteTracker) IsSelfWrite(path string) bool ``` -#### Key Implementation Notes - -- **Recursive watching**: `fsnotify` doesn't watch subdirectories automatically. Walk the tree on start and add watchers for new directories on `EventDirCreated` -- **Initial scan**: On start, walk the tree and emit `EventFileCreated` for all existing `.yaml` files (like chokidar's `ignoreInitial: false`) -- **Ignore patterns**: Filter `.git`, `node_modules`, dotfiles, non-`.yaml` files -- **WSL compatibility**: Detect WSL paths and use polling mode if needed -- **Max depth**: Limit to 20 levels (matching Bruno) +### 3.5 SyncCoordinator -### 2.5 Sync Coordinator (`foldersync/sync.go`) - -The central orchestrator that bridges the filesystem watcher with the DevTools database/services. +The central orchestrator. One instance per synced workspace. ```go -// SyncCoordinator manages bidirectional sync between filesystem and database. type SyncCoordinator struct { collectionPath string workspaceID idwrap.IDWrap - format tyamlcollection.CollectionConfig - - // Filesystem → Database - watcher *CollectionWatcher - parser *tyamlcollection.Parser - // Database → Filesystem + // Components + watcher *CollectionWatcher selfWrites *SelfWriteTracker - serializer *tyamlcollection.Serializer autosaver *AutoSaver - // State mapping + // State mapping (UID preservation across re-parses) mu sync.RWMutex - pathToID map[string]idwrap.IDWrap // filepath → entity ID (UID preservation) - idToPath map[idwrap.IDWrap]string // entity ID → filepath (reverse mapping) + pathToID map[string]idwrap.IDWrap + idToPath map[idwrap.IDWrap]string - // Services + // Services (read/write to SQLite) + db *sql.DB services *common.Services - // Event publishing for real-time sync to UI - publisher *eventstream.Publisher + // Real-time sync to UI + publisher eventstream.Publisher +} + +type SyncOptions struct { + CollectionPath string + WorkspaceID idwrap.IDWrap + DB *sql.DB + Services *common.Services + Publisher eventstream.Publisher } func NewSyncCoordinator(opts SyncOptions) (*SyncCoordinator, error) func (s *SyncCoordinator) Start(ctx context.Context) error func (s *SyncCoordinator) Stop() error + +// InitialLoad reads the entire folder and populates SQLite. +// Called when a workspace is first opened with sync enabled. +func (s *SyncCoordinator) InitialLoad(ctx context.Context) error + +// ExportAll writes all SQLite data for this workspace to the folder. +// Called when sync is first enabled on an existing workspace. +func (s *SyncCoordinator) ExportAll(ctx context.Context) error ``` -#### Disk → Database Flow +#### Disk → SQLite Flow ``` -Filesystem event (watcher) - → Debounce (80ms stabilization) - → Check self-write tracker (skip if we wrote it) - → Parse YAML file - → Look up existing entity by path→ID mapping - → If new file: Create HTTP + File + children via services - → If changed file: Update HTTP + children via services - → If deleted file: Delete HTTP + File via services - → Publish events to eventstream (UI updates in real-time) +File change detected (watcher) + → Debounce (80ms) + → Skip if self-write + → Classify file type (request .yaml, flow .yaml, environment, folder meta) + → Parse YAML → intermediate types + → Look up entity by path→ID mapping + → Begin transaction + → If new: INSERT into SQLite (HTTP + headers + params + body + asserts) + → If changed: UPDATE in SQLite + → If deleted: DELETE from SQLite + → Commit transaction + → Publish events to eventstream → UI updates in real-time ``` -#### Database → Disk Flow +#### SQLite → Disk Flow ``` -User edits in UI - → RPC handler persists to database - → Eventstream publishes change event +UI edit → RPC handler → SQLite write → eventstream publishes event → SyncCoordinator receives event via subscription - → AutoSaver debounces (500ms, matching Bruno) - → Serialize entity to YAML + → AutoSaver debounces (500ms) + → Read full entity from SQLite (HTTP + all children) + → Serialize to YAML → Mark path in self-write tracker - → Write YAML file to disk - → Watcher detects change → self-write tracker suppresses + → Atomic write (temp file + rename) + → Watcher detects → self-write tracker suppresses → no loop ``` -#### AutoSaver +### 3.6 AutoSaver (500ms debounce) ```go -// AutoSaver handles debounced persistence from database changes to disk. -// Matches Bruno's 500ms debounce behavior. type AutoSaver struct { delay time.Duration // 500ms timers map[idwrap.IDWrap]*time.Timer @@ -993,289 +877,239 @@ type AutoSaver struct { } func (a *AutoSaver) ScheduleSave(entityID idwrap.IDWrap) -func (a *AutoSaver) Flush() // Force all pending saves (for graceful shutdown) +func (a *AutoSaver) Flush() // Force-save all pending (graceful shutdown) ``` -### 2.6 Integration with Existing Architecture +### 3.7 Sync Manager (Server-Level) + +Manages all active SyncCoordinators across workspaces. + +```go +// packages/server/pkg/foldersync/manager.go + +type SyncManager struct { + mu sync.RWMutex + coordinators map[idwrap.IDWrap]*SyncCoordinator // workspaceID → coordinator + db *sql.DB + services *common.Services + publisher eventstream.Publisher +} + +func NewSyncManager(db *sql.DB, services *common.Services, publisher eventstream.Publisher) *SyncManager + +// StartSync begins folder sync for a workspace. +func (m *SyncManager) StartSync(ctx context.Context, workspaceID idwrap.IDWrap, path string) error -#### RPC Layer Integration +// StopSync stops folder sync for a workspace. +func (m *SyncManager) StopSync(workspaceID idwrap.IDWrap) error -New RPC endpoints for folder sync management: +// IsActive returns whether a workspace has active sync. +func (m *SyncManager) IsActive(workspaceID idwrap.IDWrap) bool +// RestoreAll starts sync for all workspaces that have sync_enabled=true. +// Called on server startup. +func (m *SyncManager) RestoreAll(ctx context.Context) error + +// Shutdown stops all coordinators gracefully. +func (m *SyncManager) Shutdown() error ``` -// In packages/spec — new TypeSpec definitions -@route("/folder-sync") -interface FolderSync { - // Open a collection folder for sync - @post open(workspaceId: string, collectionPath: string): FolderSyncStatus; +### 3.8 Safety Mechanisms + +| Mechanism | Implementation | +|---|---| +| Path validation | `filepath.Rel()` must not escape collection root | +| Filename sanitization | Strip invalid chars, truncate at 255 | +| Write stabilization | 80ms debounce on watcher events | +| Autosave debounce | 500ms debounce on SQLite→disk writes | +| Self-write suppression | 2s window to suppress watcher events from our writes | +| Atomic writes | Write temp file → `os.Rename()` | +| UID preservation | `pathToID` map persists during session | +| Conflict resolution | Disk wins (last-write-wins with debounce) | +| Large file guard | Skip files >5MB | +| Cross-platform | `filepath.Clean/Rel/Join`, handle `\r\n` | +| Recursive watch | Walk tree on start, add subdirs on `DirCreated` | +| Max depth | 20 levels | + +--- - // Close/stop syncing a collection - @post close(workspaceId: string): void; +## Part 4: RPC Endpoints - // Get current sync status - @get status(workspaceId: string): FolderSyncStatus; +### 4.1 Workspace Sync API + +New TypeSpec definitions for folder sync management: - // Export workspace as a collection folder - @post export(workspaceId: string, outputPath: string): void; -} ``` +// Folder sync operations on workspaces -#### Server Startup +// Enable folder sync on a workspace +EnableFolderSync(workspaceId, folderPath) → SyncStatus -The folder sync coordinator starts/stops with the server: +// Disable folder sync +DisableFolderSync(workspaceId) → void -```go -// On workspace open with folder sync enabled: -coordinator := foldersync.NewSyncCoordinator(foldersync.SyncOptions{ - CollectionPath: "/path/to/collection", - WorkspaceID: workspaceID, - Services: services, - Publisher: publisher, -}) -coordinator.Start(ctx) - -// On workspace close: -coordinator.Stop() -``` +// Get sync status +GetFolderSyncStatus(workspaceId) → SyncStatus -#### Eventstream Integration +// Import Bruno collection → create workspace + Open YAML folder +ImportBrunoCollection(brunoFolderPath, outputFolderPath) → Workspace -Subscribe to entity change events for Database → Disk sync: +// Export workspace to Open YAML folder +ExportToFolder(workspaceId, folderPath) → void -```go -sub := publisher.Subscribe(eventstream.Topic{ - WorkspaceID: workspaceID, - EntityTypes: []eventstream.EntityType{ - eventstream.EntityHTTP, - eventstream.EntityHTTPHeader, - eventstream.EntityHTTPSearchParam, - eventstream.EntityHTTPBodyRaw, - eventstream.EntityHTTPBodyForm, - eventstream.EntityHTTPBodyUrlencoded, - eventstream.EntityHTTPAssert, - eventstream.EntityFile, - }, -}) - -for event := range sub.Events() { - switch event.Op { - case eventstream.OpInsert, eventstream.OpUpdate: - autosaver.ScheduleSave(event.ID) - case eventstream.OpDelete: - deleteFileFromDisk(event.ID) - } +type SyncStatus { + enabled: boolean + folderPath: string + lastSyncAt: timestamp + fileCount: number + errors: string[] // Any sync errors } ``` -### 2.7 Safety Mechanisms +### 4.2 Desktop UI Integration -| Mechanism | Implementation | Matches Bruno? | -|---|---|---| -| Path validation | `filepath.Rel()` must not escape collection root | Yes | -| Filename sanitization | Strip invalid chars, truncate at 255 | Yes | -| Write stabilization | 80ms debounce on watcher events | Yes (stabilityThreshold: 80) | -| Autosave debounce | 500ms debounce on UI changes | Yes | -| Self-write suppression | Track recently-written paths (2s window) | Improved (Bruno re-parses) | -| Atomic writes | Write to temp file, then `os.Rename()` | Improved (Bruno doesn't) | -| UID preservation | `pathToID` map persists across re-parses | Yes | -| Conflict resolution | Disk wins over in-memory (matching Bruno) | Yes | -| Large file handling | Skip files >5MB, warn on collections >20MB | Yes | -| Cross-platform paths | `filepath.Clean/Rel/Join` consistently | Yes | -| Line endings | Handle `\r\n` and `\n` | Yes | +**New UI elements needed:** +- Workspace settings: "Link to Folder" button with folder picker +- Workspace settings: "Unlink Folder" button +- Status bar: sync status indicator (synced, syncing, error) +- Import dialog: "Import Bruno Collection" with source folder picker + destination folder picker +- New workspace dialog: option to "Create from folder" or "Create empty" --- -## Part 3: CLI Runner Integration - -### 3.1 Run from Collection Folder +## Part 5: CLI Integration -Add a CLI command to run requests directly from a synced collection folder: +### 5.1 CLI Import Command ``` -devtools run ./my-collection # Run all requests sequentially -devtools run ./my-collection/users/get-users.yaml # Run single request -devtools run ./my-collection --env dev # With environment -devtools run ./my-collection/flows/smoke-test.yaml # Run a flow +devtools import bruno --output --workspace ``` -This reuses the existing `apps/cli/internal/runner/` infrastructure: +### 5.2 CLI Run from Folder -1. Read collection folder → `tyamlcollection.ReadCollection()` -2. Create in-memory SQLite → populate with resolved entities -3. Execute via existing `runner.RunFlow()` or direct HTTP execution -4. Report results - -### 3.2 CLI Commands - -```go -// apps/cli/cmd/run.go — extend existing run command - -var runCollectionCmd = &cobra.Command{ - Use: "collection [directory-or-file]", - Short: "Run requests from a DevTools collection folder", - RunE: func(cmd *cobra.Command, args []string) error { - // 1. Read collection - // 2. Import into in-memory DB - // 3. Execute via runner - // 4. Report results - }, -} +``` +devtools run # Run all requests +devtools run /users/get-users.yaml # Single request +devtools run /flows/smoke-test.yaml # Run a flow +devtools run --env dev # With environment ``` --- -## Part 4: Implementation Phases +## Implementation Phases ### Phase 1: OpenCollection YAML Parser + Converter -**Scope**: Parse OpenCollection YAML directories and convert to DevTools models. +**Scope**: Parse Bruno's OpenCollection YAML directories → DevTools models. **Files**: -- `packages/server/pkg/translate/topencollection/types.go` -- `packages/server/pkg/translate/topencollection/converter.go` -- `packages/server/pkg/translate/topencollection/collection.go` -- `packages/server/pkg/translate/topencollection/environment.go` -- `packages/server/pkg/translate/topencollection/auth.go` -- `packages/server/pkg/translate/topencollection/body.go` -- `packages/server/pkg/translate/topencollection/converter_test.go` -- `packages/server/pkg/translate/topencollection/testdata/` (sample collections) - -**Dependencies**: `gopkg.in/yaml.v3` (already a dependency), existing models (`mhttp`, `mfile`, `menv`) +``` +packages/server/pkg/translate/topencollection/ +├── types.go, converter.go, collection.go, environment.go +├── auth.go, body.go, converter_test.go +└── testdata/basic-collection/... +``` -**Testing**: Parse sample OpenCollection directories, verify all entities +**Deps**: `gopkg.in/yaml.v3` (existing), `mhttp`, `mfile`, `menv` -### Phase 2: CLI Import Command +### Phase 2: Open YAML Format (Requests + Flows) -**Scope**: Add `import bruno ` CLI command. +**Scope**: DevTools' own YAML format — parser + serializer with round-trip support. Flows delegate to existing `yamlflowsimplev2`. **Files**: -- `apps/cli/cmd/import.go` (add `importBrunoCmd`) - -**Dependencies**: Phase 1 (converter), existing importer infrastructure +``` +packages/server/pkg/translate/openyaml/ +├── types.go, parser.go, serializer.go, request.go +├── flow.go, environment.go, folder.go, collection.go +└── parser_test.go +``` -**Testing**: End-to-end import into in-memory SQLite +**Deps**: `gopkg.in/yaml.v3`, `yamlflowsimplev2` (for flows), `mhttp`, `mfile`, `mflow` -### Phase 3: DevTools YAML Collection Format +### Phase 3: Workspace Schema + Migration -**Scope**: Define and implement DevTools' own YAML format for individual request files with round-trip serialization. +**Scope**: Add `sync_path`, `sync_format`, `sync_enabled` to workspace table and model. **Files**: -- `packages/server/pkg/translate/tyamlcollection/types.go` -- `packages/server/pkg/translate/tyamlcollection/parser.go` -- `packages/server/pkg/translate/tyamlcollection/serializer.go` -- `packages/server/pkg/translate/tyamlcollection/request.go` -- `packages/server/pkg/translate/tyamlcollection/environment.go` -- `packages/server/pkg/translate/tyamlcollection/folder.go` -- `packages/server/pkg/translate/tyamlcollection/collection.go` -- `packages/server/pkg/translate/tyamlcollection/parser_test.go` +- `packages/db/pkg/sqlc/schema/` — new migration SQL +- `packages/db/pkg/sqlc/queries/` — updated workspace queries +- `packages/server/pkg/model/mworkspace/` — updated model +- `packages/server/pkg/service/sworkspace/` — updated reader/writer +- Run `pnpm nx run db:generate` to regenerate sqlc -**Dependencies**: Existing models, `gopkg.in/yaml.v3` +### Phase 4: File Watcher + Sync Engine -**Testing**: Round-trip tests (parse → serialize → parse), verify equivalence - -### Phase 4: File Watcher - -**Scope**: `fsnotify`-based watcher with debouncing, filtering, self-write tracking. +**Scope**: `fsnotify` watcher, debouncer, self-write tracker, SyncCoordinator, SyncManager. **Files**: -- `packages/server/pkg/foldersync/watcher.go` -- `packages/server/pkg/foldersync/debouncer.go` -- `packages/server/pkg/foldersync/filter.go` -- `packages/server/pkg/foldersync/types.go` -- `packages/server/pkg/foldersync/watcher_test.go` - -**Dependencies**: `github.com/fsnotify/fsnotify` +``` +packages/server/pkg/foldersync/ +├── watcher.go, debouncer.go, filter.go, selftrack.go +├── sync.go, manager.go, types.go +└── watcher_test.go +``` -**Testing**: Create temp dirs, write files, verify events +**Deps**: `github.com/fsnotify/fsnotify`, Phase 2 (Open YAML), Phase 3 (workspace schema) -### Phase 5: Sync Coordinator +### Phase 5: RPC Endpoints + CLI Import -**Scope**: Bidirectional sync engine — disk↔database with eventstream integration. +**Scope**: TypeSpec definitions, RPC handlers for sync management, CLI `import bruno` command. **Files**: -- `packages/server/pkg/foldersync/sync.go` -- `packages/server/pkg/foldersync/autosaver.go` - -**Dependencies**: Phase 3 (YAML format), Phase 4 (watcher), services, eventstream +- `packages/spec/` — new TypeSpec definitions +- `packages/server/internal/api/` — RPC handlers +- `apps/cli/cmd/import.go` — `importBrunoCmd` -**Testing**: Full integration tests — modify files on disk, verify DB updates; modify DB, verify files written +**Deps**: Phase 1 (OpenCollection parser), Phase 4 (sync engine) -### Phase 6: RPC Endpoints + Desktop Integration +### Phase 6: Desktop Integration -**Scope**: TypeSpec definitions for folder sync management, RPC handlers, desktop UI. +**Scope**: Electron folder picker, workspace sync settings UI, status indicators. **Files**: -- `packages/spec/` (TypeSpec definitions) -- `packages/server/internal/api/` (RPC handlers) -- `packages/client/` (React hooks/services) -- `apps/desktop/` (Electron integration — folder picker, sync status) +- `packages/client/` — React hooks/services for sync +- `packages/ui/` — sync status components +- `apps/desktop/` — Electron IPC for folder picker -**Dependencies**: Phase 5 (sync coordinator) +**Deps**: Phase 5 (RPC endpoints) ### Phase 7: CLI Collection Runner -**Scope**: Run requests/flows directly from collection folders. +**Scope**: `devtools run ` command. **Files**: -- `apps/cli/cmd/run.go` (extend with `collection` subcommand) +- `apps/cli/cmd/run.go` -**Dependencies**: Phase 3 (YAML format), existing runner +**Deps**: Phase 2 (Open YAML format), existing runner --- ## Phase Dependency Graph ``` -Phase 1: OpenCollection Parser ──→ Phase 2: CLI Import - │ -Phase 3: DevTools YAML Format ──┬─────────┼──→ Phase 7: CLI Collection Runner - │ │ -Phase 4: File Watcher ──────────┤ │ - │ │ - └──→ Phase 5: Sync Coordinator ──→ Phase 6: RPC + Desktop +Phase 1: OpenCollection Parser ──────────────────────────┐ + │ +Phase 2: Open YAML Format ──┬────────────────────────────┤ + │ │ +Phase 3: Workspace Schema ──┤ │ + │ │ + └──→ Phase 4: Sync Engine ───┼──→ Phase 5: RPC + CLI Import + │ + └──→ Phase 6: Desktop UI +Phase 2 ─────────────────────────────────────────────────────→ Phase 7: CLI Runner ``` -Phases 1 and 3-4 can be developed **in parallel** (no dependencies between them). +**Parallel work:** +- Phase 1, 2, 3 can all be developed in parallel +- Phase 4 depends on 2+3 +- Phase 5 depends on 1+4 +- Phase 7 depends only on 2 --- ## External Dependencies -| Dependency | Purpose | Phase | +| Dependency | Purpose | Already in use? | |---|---|---| -| `gopkg.in/yaml.v3` | YAML parsing (already in use by `yamlflowsimplev2`) | 1, 3 | -| `github.com/fsnotify/fsnotify` | Cross-platform file system notifications | 4 | - ---- - -## Key Design Decisions - -### Why OpenCollection YAML only (no .bru parser)? - -1. **Bruno's direction** — OpenCollection YAML is the recommended format going forward -2. **No custom parser** — standard `gopkg.in/yaml.v3` handles everything -3. **Forward-compatible** — .bru is being phased out -4. **Less code to maintain** — no hand-written PEG parser -5. **Better tooling** — YAML linting, schema validation, IDE support - -### Why a separate DevTools YAML format? - -1. **Control** — we define the schema, we evolve it independently -2. **Simplicity** — OpenCollection has sections we don't use (`runtime.scripts`, `runtime.actions`) -3. **Flat structure** — our format puts `name`, `method`, `url` at top level (no `info`/`http` nesting) -4. **Consistency** — matches existing `yamlflowsimplev2` conventions -5. **Import, don't adopt** — import OpenCollection, sync in DevTools format - -### Why fsnotify instead of polling? - -1. **Efficient** — kernel-level notifications, no CPU overhead from polling -2. **Low latency** — events arrive in milliseconds vs polling interval -3. **Standard** — most Go file-watching libraries use fsnotify -4. **Exception**: WSL paths use polling (matching Bruno's approach) - -### Why autosave debounce at 500ms? - -1. **Matches Bruno** — users expect similar behavior -2. **Prevents rapid writes** — typing in URL field doesn't cause per-keystroke disk writes -3. **Balances responsiveness** — changes appear on disk within ~500ms, fast enough for git workflows +| `gopkg.in/yaml.v3` | YAML parsing | Yes (`yamlflowsimplev2`) | +| `github.com/fsnotify/fsnotify` | Filesystem notifications | No (new) | From bc54091db6a5f42235c90e61d58ba9b950316e0f Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 15:54:53 +0000 Subject: [PATCH 04/20] =?UTF-8?q?docs:=20correct=20architecture=20?= =?UTF-8?q?=E2=80=94=20folder=20is=20source=20of=20truth,=20SQLite=20is=20?= =?UTF-8?q?cache?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The previous plan had SQLite as "king" which breaks on git pull — if someone pulls new changes into the Open YAML folder, SQLite wouldn't know and could overwrite them. Corrected architecture: - Folder = source of truth (committed to git, shared, portable) - SQLite = runtime cache (fast queries, disposable, rebuildable) - Delete state.db → server reads folder → SQLite rebuilt → nothing lost - git pull → watcher detects changes → SQLite updated to match folder Added reconciliation-on-startup flow: 1. Walk folder, build path→content map 2. Compare with SQLite entities 3. INSERT/UPDATE/DELETE SQLite to match folder state 4. Start file watcher for live changes This matches how Bruno works — Redux store is just a runtime view of what's on disk. We do the same with SQLite instead of Redux. https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../server/docs/specs/BRUNO_FOLDER_SYNC.md | 129 ++++++++++++------ 1 file changed, 86 insertions(+), 43 deletions(-) diff --git a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md index 74818ea7..1a2afc4e 100644 --- a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md +++ b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md @@ -5,7 +5,7 @@ This document describes the plan to add: 1. **OpenCollection YAML Import** — Parse Bruno's OpenCollection YAML collections and convert them into DevTools' Open YAML format in a separate folder -2. **Open YAML Folder Sync** — Bidirectional filesystem sync using DevTools' own "Open YAML" format (requests + flows), with SQLite as the runtime source of truth +2. **Open YAML Folder Sync** — Bidirectional filesystem sync using DevTools' own "Open YAML" format (requests + flows), with the **folder as the source of truth** and SQLite as a runtime cache 3. **Workspace Sync Modes** — A workspace can be synced to a local folder, opened from a Bruno collection (auto-converted), or used without sync (current behavior) 4. **CLI Runner Integration** — Execute collections from Open YAML folders @@ -13,7 +13,8 @@ This document describes the plan to add: - **Open YAML** — DevTools' own YAML format for collections. Includes both HTTP requests and flows. One file per request, one file per flow, folder hierarchy maps to the file tree. - **OpenCollection YAML** — Bruno's YAML format (`opencollection.yml` root). We import FROM this format. -- **SQLite** — Remains the runtime source of truth. Folder sync mirrors SQLite ↔ filesystem bidirectionally. +- **Folder = Source of Truth** — The Open YAML folder is what gets committed to git, shared with teammates, and survives across machines. It is the canonical data. +- **SQLite = Runtime Cache** — SQLite is populated from the folder and provides fast indexed queries for the UI. It can be fully rebuilt from the folder at any time. - **Workspace Sync Modes** — Each workspace can optionally be linked to a folder on disk. ### Sources @@ -43,44 +44,68 @@ Single SQLite DB (state.db in userData) - All data lives exclusively in SQLite - Real-time UI sync via in-memory eventstream -### Proposed State: SQLite + Folder Sync +### Proposed State: Folder-First Architecture ``` Desktop App (React UI) ↕ Connect RPC over Unix Socket Go Server ↕ Reader/Writer services - ├── SQLite DB (state.db) ←── RUNTIME SOURCE OF TRUTH + ├── SQLite DB (state.db) ←── RUNTIME CACHE (disposable, rebuildable) │ └── workspace.sync_path = "/path/to/my-collection" │ └── SyncCoordinator (per synced workspace) ↕ bidirectional - Open YAML Folder ←── GIT-FRIENDLY PERSISTENCE + Open YAML Folder ←── SOURCE OF TRUTH (git, shared, portable) /path/to/my-collection/ ├── devtools.yaml ├── requests/ └── flows/ ``` -### Design Principle: SQLite is King +### Design Principle: Folder is the Source of Truth -SQLite remains the **single source of truth at runtime**. The folder sync is a **persistence mirror**: +The Open YAML folder is the **canonical data store**. SQLite is a **runtime cache** that can be fully rebuilt from the folder at any time. This is the same model Bruno uses — their Redux store is just a runtime view of what's on disk. + +**Why the folder must be the source of truth:** +- `git pull` brings new changes → folder has the latest data → SQLite must update to match +- Teammate edits a request in their editor → saves → watcher picks it up → SQLite updates → UI reflects +- Delete `state.db` → server starts → reads folder → SQLite rebuilt → nothing lost +- The folder is what gets committed, pushed, reviewed in PRs, and shared across machines + +**Why SQLite is still valuable as a cache:** +- Fast indexed queries (no YAML parsing on every read) +- Transactions for atomic multi-entity operations +- Existing services, RPC handlers, and runner all work with SQLite +- Real-time eventstream already wired to SQLite changes +- Supports non-synced workspaces (Mode 1) that live only in SQLite | Direction | Trigger | Behavior | |-----------|---------|----------| -| **Folder → SQLite** | File watcher detects change | Parse YAML → upsert into SQLite → eventstream → UI updates | -| **SQLite → Folder** | Eventstream publishes change | Serialize from SQLite → write YAML to disk | -| **Initial load** | Workspace opened with `sync_path` | Read entire folder → populate SQLite (folder wins on first load) | -| **Conflict** | Both change simultaneously | Disk wins (matching Bruno behavior). Last-write-wins with debounce. | +| **Folder → SQLite** | File watcher detects change, or git pull, or startup | Parse YAML → upsert into SQLite → eventstream → UI updates | +| **SQLite → Folder** | UI edit via RPC handler | Write to SQLite → serialize to YAML → write to disk | +| **Startup** | Server starts with synced workspace | Read entire folder → populate/reconcile SQLite | +| **Git pull** | Watcher detects batch changes | Re-parse changed files → update SQLite → UI refreshes | +| **Conflict** | File changed on disk while UI was editing | **Folder wins** — disk state overwrites SQLite | +| **Rebuild** | `state.db` deleted or corrupted | Full re-read from folder → SQLite rebuilt from scratch | + +### Reconciliation on Startup -### Why SQLite Stays as Source of Truth +When a synced workspace is opened, the SyncCoordinator must reconcile SQLite with the folder: + +``` +1. Walk the Open YAML folder, build a map of path → parsed content +2. Read all entities for this workspace from SQLite +3. Compare: + a. File exists on disk but not in SQLite → INSERT (new file from git pull) + b. File exists in both, content differs → UPDATE SQLite from disk (folder wins) + c. Entity in SQLite but no file on disk → DELETE from SQLite (file was deleted/moved) + d. File and SQLite match → no-op +4. Rebuild pathToID / idToPath maps +5. Start file watcher for live changes +``` -1. **Performance** — SQLite queries are instant; parsing YAML on every read would be slow -2. **Transactions** — Atomic multi-entity updates (e.g., creating a request + headers + body in one tx) -3. **Indexing** — Fast lookups by ID, workspace, folder -4. **Existing code** — All services, RPC handlers, runner already work with SQLite -5. **Offline** — No filesystem dependency for core functionality -6. **Real-time sync** — Eventstream already works with SQLite changes +This ensures that after a `git pull`, `git merge`, `git checkout`, or any external file changes, SQLite is always in sync with the folder. ### Workspace Schema Change @@ -127,24 +152,26 @@ Nothing changes from the current behavior. Workspaces work exactly as they do to ### Mode 2: Sync to Folder (Open YAML) ``` -User creates workspace → links to a folder → bidirectional sync +User creates workspace → links to a folder → folder becomes source of truth SyncPath = "/Users/dev/my-api-collection", SyncFormat = "open_yaml", SyncEnabled = true ``` **Two sub-scenarios:** -**A) New sync — empty folder:** -1. User creates workspace in DevTools +**A) Export to new folder (existing workspace → empty folder):** +1. User has an existing workspace in DevTools (data in SQLite) 2. User clicks "Sync to Folder" → picks/creates an empty directory 3. Server sets `sync_path` on the workspace -4. SyncCoordinator starts → exports all existing SQLite data to Open YAML files in the folder -5. File watcher starts → bidirectional sync is live +4. SyncCoordinator starts → exports all SQLite data to Open YAML files in the folder +5. File watcher starts → from now on, folder is the source of truth +6. User can `git init && git add . && git commit` to start versioning -**B) New sync — existing Open YAML folder:** +**B) Open existing folder (Open YAML folder → new workspace):** 1. User clicks "Open Folder" → picks a directory with `devtools.yaml` 2. Server creates a new workspace with `sync_path` set -3. SyncCoordinator starts → reads entire folder → populates SQLite -4. File watcher starts → bidirectional sync is live +3. SyncCoordinator starts → reads entire folder → populates SQLite cache +4. File watcher starts → folder is the source of truth +5. This is the common flow after `git clone` on a new machine ### Mode 3: Import from Bruno (OpenCollection → Open YAML) @@ -827,16 +854,18 @@ func NewSyncCoordinator(opts SyncOptions) (*SyncCoordinator, error) func (s *SyncCoordinator) Start(ctx context.Context) error func (s *SyncCoordinator) Stop() error -// InitialLoad reads the entire folder and populates SQLite. -// Called when a workspace is first opened with sync enabled. -func (s *SyncCoordinator) InitialLoad(ctx context.Context) error +// Reconcile reads the entire folder and reconciles SQLite cache to match. +// Called on startup, after git pull, or when opening a synced workspace. +// The folder always wins — SQLite is rebuilt to match the folder state. +func (s *SyncCoordinator) Reconcile(ctx context.Context) error // ExportAll writes all SQLite data for this workspace to the folder. -// Called when sync is first enabled on an existing workspace. +// Called ONCE when sync is first enabled on an existing (non-synced) workspace. +// After this initial export, the folder becomes the source of truth. func (s *SyncCoordinator) ExportAll(ctx context.Context) error ``` -#### Disk → SQLite Flow +#### Folder → SQLite (external changes: git pull, editor saves, etc.) ``` File change detected (watcher) @@ -846,26 +875,38 @@ File change detected (watcher) → Parse YAML → intermediate types → Look up entity by path→ID mapping → Begin transaction - → If new: INSERT into SQLite (HTTP + headers + params + body + asserts) - → If changed: UPDATE in SQLite - → If deleted: DELETE from SQLite + → If new file: INSERT into SQLite cache + → If changed file: UPDATE SQLite cache (folder wins) + → If deleted file: DELETE from SQLite cache → Commit transaction → Publish events to eventstream → UI updates in real-time ``` -#### SQLite → Disk Flow +#### SQLite → Folder (UI edits) ``` -UI edit → RPC handler → SQLite write → eventstream publishes event - → SyncCoordinator receives event via subscription - → AutoSaver debounces (500ms) - → Read full entity from SQLite (HTTP + all children) - → Serialize to YAML +UI edit → RPC handler + → Write to SQLite cache (for immediate UI responsiveness) + → Serialize entity to YAML → Mark path in self-write tracker - → Atomic write (temp file + rename) + → Atomic write to disk (temp file + rename) ← this is the canonical write → Watcher detects → self-write tracker suppresses → no loop ``` +**Key difference from "SQLite is king" model:** The RPC handler writes to BOTH SQLite and disk. The disk write is the one that matters — if SQLite is lost, the disk file survives. The SQLite write is for UI performance (instant queries without re-parsing YAML). + +#### Git Pull / Branch Switch (batch reconciliation) + +``` +User runs `git pull` or `git checkout` outside DevTools + → Watcher detects batch of file changes + → For each changed/added/deleted file: + → Update SQLite cache to match folder state + → Publish batch events to eventstream → UI refreshes +``` + +This is the critical flow that "SQLite is king" would break — after git pull, the folder has the truth and SQLite must catch up. + ### 3.6 AutoSaver (500ms debounce) ```go @@ -907,7 +948,9 @@ func (m *SyncManager) StopSync(workspaceID idwrap.IDWrap) error func (m *SyncManager) IsActive(workspaceID idwrap.IDWrap) bool // RestoreAll starts sync for all workspaces that have sync_enabled=true. -// Called on server startup. +// Called on server startup. For each synced workspace: +// 1. Reconcile SQLite cache from folder (folder wins) +// 2. Start file watcher func (m *SyncManager) RestoreAll(ctx context.Context) error // Shutdown stops all coordinators gracefully. @@ -925,7 +968,7 @@ func (m *SyncManager) Shutdown() error | Self-write suppression | 2s window to suppress watcher events from our writes | | Atomic writes | Write temp file → `os.Rename()` | | UID preservation | `pathToID` map persists during session | -| Conflict resolution | Disk wins (last-write-wins with debounce) | +| Conflict resolution | Folder always wins (it's the source of truth) | | Large file guard | Skip files >5MB | | Cross-platform | `filepath.Clean/Rel/Join`, handle `\r\n` | | Recursive watch | Walk tree on start, add subdirs on `DirCreated` | From b20b03479a008663265a49e0720801d056c8a038 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 16:19:02 +0000 Subject: [PATCH 05/20] =?UTF-8?q?docs:=20align=20plan=20with=20DevTools=20?= =?UTF-8?q?conventions=20=E2=80=94=20reuse=20yamlflowsimplev2=20types,=20r?= =?UTF-8?q?ename=20to=20OpenYAML?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Rename "Open YAML" → "OpenYAML" (no space) throughout - Rename package openyaml/ → topenyaml/ (t prefix convention) - Replace duplicate types (HeaderEntry, BodyDef, AssertionList) with yamlflowsimplev2 imports (YamlRequestDefV2, HeaderMapOrSlice, YamlBodyUnion, AssertionsOrSlice) via embedding - Update YAML examples to match YamlBodyUnion field names (raw, form_data) - Add explicit scope note: SQLite-as-cache only for synced workspaces (Mode 2 & 3), Mode 1 keeps SQLite as sole data store - Confirm only gopkg.in/yaml.v3 used (no extra YAML libs) https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../server/docs/specs/BRUNO_FOLDER_SYNC.md | 166 ++++++++++-------- 1 file changed, 92 insertions(+), 74 deletions(-) diff --git a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md index 1a2afc4e..42b5296f 100644 --- a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md +++ b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md @@ -1,19 +1,19 @@ -# OpenCollection Import & Open YAML Folder Sync Plan +# OpenCollection Import & OpenYAML Folder Sync Plan ## Overview This document describes the plan to add: -1. **OpenCollection YAML Import** — Parse Bruno's OpenCollection YAML collections and convert them into DevTools' Open YAML format in a separate folder -2. **Open YAML Folder Sync** — Bidirectional filesystem sync using DevTools' own "Open YAML" format (requests + flows), with the **folder as the source of truth** and SQLite as a runtime cache +1. **OpenCollection YAML Import** — Parse Bruno's OpenCollection YAML collections and convert them into DevTools' OpenYAML format in a separate folder +2. **OpenYAML Folder Sync** — Bidirectional filesystem sync using DevTools' own "OpenYAML" format (requests + flows), with the **folder as the source of truth** and SQLite as a runtime cache 3. **Workspace Sync Modes** — A workspace can be synced to a local folder, opened from a Bruno collection (auto-converted), or used without sync (current behavior) -4. **CLI Runner Integration** — Execute collections from Open YAML folders +4. **CLI Runner Integration** — Execute collections from OpenYAML folders ### Key Concepts -- **Open YAML** — DevTools' own YAML format for collections. Includes both HTTP requests and flows. One file per request, one file per flow, folder hierarchy maps to the file tree. +- **OpenYAML** — DevTools' own YAML format for collections. Includes both HTTP requests and flows. One file per request, one file per flow, folder hierarchy maps to the file tree. - **OpenCollection YAML** — Bruno's YAML format (`opencollection.yml` root). We import FROM this format. -- **Folder = Source of Truth** — The Open YAML folder is what gets committed to git, shared with teammates, and survives across machines. It is the canonical data. +- **Folder = Source of Truth** — The OpenYAML folder is what gets committed to git, shared with teammates, and survives across machines. It is the canonical data. - **SQLite = Runtime Cache** — SQLite is populated from the folder and provides fast indexed queries for the UI. It can be fully rebuilt from the folder at any time. - **Workspace Sync Modes** — Each workspace can optionally be linked to a folder on disk. @@ -56,7 +56,7 @@ Go Server │ └── SyncCoordinator (per synced workspace) ↕ bidirectional - Open YAML Folder ←── SOURCE OF TRUTH (git, shared, portable) + OpenYAML Folder ←── SOURCE OF TRUTH (git, shared, portable) /path/to/my-collection/ ├── devtools.yaml ├── requests/ @@ -65,7 +65,13 @@ Go Server ### Design Principle: Folder is the Source of Truth -The Open YAML folder is the **canonical data store**. SQLite is a **runtime cache** that can be fully rebuilt from the folder at any time. This is the same model Bruno uses — their Redux store is just a runtime view of what's on disk. +> **Scope:** The folder-as-source-of-truth / SQLite-as-cache behavior described in this section +> **only applies to synced workspaces** (Mode 2: Sync to Folder, Mode 3: Import from Bruno). +> Non-synced workspaces (Mode 1) continue to use SQLite as the sole data store — no folder +> is involved, and no reconciliation or file watching occurs. The SyncCoordinator is never +> created for Mode 1 workspaces. + +The OpenYAML folder is the **canonical data store**. SQLite is a **runtime cache** that can be fully rebuilt from the folder at any time. This is the same model Bruno uses — their Redux store is just a runtime view of what's on disk. **Why the folder must be the source of truth:** - `git pull` brings new changes → folder has the latest data → SQLite must update to match @@ -94,7 +100,7 @@ The Open YAML folder is the **canonical data store**. SQLite is a **runtime cach When a synced workspace is opened, the SyncCoordinator must reconcile SQLite with the folder: ``` -1. Walk the Open YAML folder, build a map of path → parsed content +1. Walk the OpenYAML folder, build a map of path → parsed content 2. Read all entities for this workspace from SQLite 3. Compare: a. File exists on disk but not in SQLite → INSERT (new file from git pull) @@ -147,9 +153,9 @@ User creates workspace → data lives only in SQLite SyncPath = nil, SyncEnabled = false ``` -Nothing changes from the current behavior. Workspaces work exactly as they do today. +Nothing changes from the current behavior. Workspaces work exactly as they do today. **SQLite is the sole data store** — no folder sync, no file watcher, no reconciliation. All existing services, RPC handlers, and eventstream work unchanged. -### Mode 2: Sync to Folder (Open YAML) +### Mode 2: Sync to Folder (OpenYAML) ``` User creates workspace → links to a folder → folder becomes source of truth @@ -162,21 +168,21 @@ SyncPath = "/Users/dev/my-api-collection", SyncFormat = "open_yaml", SyncEnabled 1. User has an existing workspace in DevTools (data in SQLite) 2. User clicks "Sync to Folder" → picks/creates an empty directory 3. Server sets `sync_path` on the workspace -4. SyncCoordinator starts → exports all SQLite data to Open YAML files in the folder +4. SyncCoordinator starts → exports all SQLite data to OpenYAML files in the folder 5. File watcher starts → from now on, folder is the source of truth 6. User can `git init && git add . && git commit` to start versioning -**B) Open existing folder (Open YAML folder → new workspace):** +**B) Open existing folder (OpenYAML folder → new workspace):** 1. User clicks "Open Folder" → picks a directory with `devtools.yaml` 2. Server creates a new workspace with `sync_path` set 3. SyncCoordinator starts → reads entire folder → populates SQLite cache 4. File watcher starts → folder is the source of truth 5. This is the common flow after `git clone` on a new machine -### Mode 3: Import from Bruno (OpenCollection → Open YAML) +### Mode 3: Import from Bruno (OpenCollection → OpenYAML) ``` -User opens Bruno collection → DevTools converts to Open YAML in a NEW folder → syncs there +User opens Bruno collection → DevTools converts to OpenYAML in a NEW folder → syncs there SyncPath = "/Users/dev/my-api-devtools/", SyncFormat = "open_yaml", SyncEnabled = true ``` @@ -185,7 +191,7 @@ SyncPath = "/Users/dev/my-api-devtools/", SyncFormat = "open_yaml", SyncEnabled 2. Server parses the OpenCollection YAML directory 3. Server creates a new workspace and populates SQLite with the converted data 4. Server creates a NEW folder (e.g., next to the Bruno folder, or user picks location) -5. SyncCoordinator exports SQLite data to Open YAML format in the new folder +5. SyncCoordinator exports SQLite data to OpenYAML format in the new folder 6. File watcher starts → bidirectional sync is live on the NEW folder 7. Original Bruno folder is NOT modified @@ -205,7 +211,7 @@ OpenCollection .yml directory → topencollection.ConvertOpenCollection() → OpenCollectionResolved (mhttp.HTTP, mfile.File, mflow.Flow, etc.) → SQLite (workspace created + populated) - → SyncCoordinator exports to Open YAML folder + → SyncCoordinator exports to OpenYAML folder ``` | Layer | Location | Pattern | @@ -473,7 +479,7 @@ packages/server/pkg/translate/topencollection/ --- -## Part 2: Open YAML Format (DevTools' Own Format) +## Part 2: OpenYAML Format (DevTools' Own Format) ### 2.1 Design Goals @@ -482,7 +488,7 @@ packages/server/pkg/translate/topencollection/ - **Git-friendly** — clean diffs, merge-friendly structure - **Human-editable** — developers can edit in any text editor or IDE - **Flat top-level** — `name`, `method`, `url` at root (no `info`/`http` nesting like OpenCollection) -- **Compatible with existing `yamlflowsimplev2`** — flow files use the same format +- **Reuses existing `yamlflowsimplev2` types** — request types (`YamlRequestDefV2`, `HeaderMapOrSlice`, `YamlBodyUnion`, `AssertionsOrSlice`) and flow types (`YamlFlowFlowV2`) are imported directly, not duplicated. Only `gopkg.in/yaml.v3` for YAML parsing. ### 2.2 Directory Structure @@ -511,7 +517,7 @@ version: "1" name: My API Collection ``` -This file identifies the directory as a DevTools Open YAML collection. Its presence is how we detect the format (analogous to `opencollection.yml` for Bruno). +This file identifies the directory as a DevTools OpenYAML collection. Its presence is how we detect the format (analogous to `opencollection.yml` for Bruno). ### 2.4 Request File Format @@ -554,7 +560,7 @@ headers: body: type: raw - content: | + raw: | { "name": "John Doe", "email": "john@example.com" @@ -572,8 +578,8 @@ url: "{{base_url}}/upload" order: 3 body: - type: form-data - fields: + type: form_data + form_data: - name: file value: "@./fixtures/test.png" description: "File to upload" @@ -600,7 +606,7 @@ steps: url: "{{base_url}}/auth/login" body: type: raw - content: '{"email": "test@example.com", "password": "test"}' + raw: '{"email": "test@example.com", "password": "test"}' - request: name: Get Profile @@ -637,10 +643,26 @@ order: 1 description: "User management endpoints" ``` -### 2.8 Open YAML Go Types +### 2.8 OpenYAML Go Types + +> **Reuse Policy:** The `topenyaml` package reuses types from `yamlflowsimplev2` wherever possible +> to avoid duplicating YAML marshaling logic. The types below reference: +> - `yamlflowsimplev2.YamlRequestDefV2` — request fields (name, method, url, headers, body, etc.) +> - `yamlflowsimplev2.HeaderMapOrSlice` — flexible header/param list (map or slice with custom marshal) +> - `yamlflowsimplev2.YamlNameValuePairV2` — individual name/value/enabled entry +> - `yamlflowsimplev2.YamlBodyUnion` — flexible body (raw string, JSON map, or structured with type) +> - `yamlflowsimplev2.AssertionsOrSlice` — assertions (string shorthand or structured) +> - `yamlflowsimplev2.YamlAssertionV2` — individual assertion entry +> - `yamlflowsimplev2.YamlFlowFlowV2` — flow definition (used directly for flow files) +> +> Only types that have no equivalent in `yamlflowsimplev2` are defined in `topenyaml`. ```go -package openyaml +package topenyaml + +import ( + yfs "github.com/the-dev-tools/dev-tools/packages/server/pkg/translate/yamlflowsimplev2" +) // CollectionConfig represents devtools.yaml type CollectionConfig struct { @@ -648,43 +670,20 @@ type CollectionConfig struct { Name string `yaml:"name"` } -// RequestFile represents a single request .yaml file +// RequestFile represents a single request .yaml file. +// Embeds YamlRequestDefV2 for the core request fields (Name, Method, URL, +// Headers, QueryParams, Body, Assertions, Description) — all of which reuse +// the existing custom marshalers (HeaderMapOrSlice, YamlBodyUnion, etc.). +// Adds Order for file-tree ordering (not present in YamlRequestDefV2). type RequestFile struct { - Name string `yaml:"name"` - Method string `yaml:"method"` - URL string `yaml:"url"` - Description string `yaml:"description,omitempty"` - Order float64 `yaml:"order,omitempty"` - Headers []HeaderEntry `yaml:"headers,omitempty"` - QueryParams []HeaderEntry `yaml:"query_params,omitempty"` - Body *BodyDef `yaml:"body,omitempty"` - Assertions AssertionList `yaml:"assertions,omitempty"` -} - -type HeaderEntry struct { - Name string `yaml:"name"` - Value string `yaml:"value"` - Enabled *bool `yaml:"enabled,omitempty"` // Default: true - Description string `yaml:"description,omitempty"` -} - -type BodyDef struct { - Type string `yaml:"type"` // none|raw|form-data|urlencoded - Content string `yaml:"content,omitempty"` // For raw bodies - Fields []HeaderEntry `yaml:"fields,omitempty"` // For form-data / urlencoded -} - -// AssertionList supports both string shorthand and structured form -// - "res.status eq 200" -// - {value: "res.status eq 200", enabled: false, description: "..."} -type AssertionList []AssertionEntry - -type AssertionEntry struct { - Value string `yaml:"value,omitempty"` - Enabled *bool `yaml:"enabled,omitempty"` - Description string `yaml:"description,omitempty"` + yfs.YamlRequestDefV2 `yaml:",inline"` + Order float64 `yaml:"order,omitempty"` } +// EnvironmentFile represents an environment .yaml file. +// NOTE: This differs from yamlflowsimplev2.YamlEnvironmentV2 which uses +// map[string]string for variables. OpenYAML environments need per-variable +// metadata (secret flag), so we define our own type here. type EnvironmentFile struct { Name string `yaml:"name"` Variables []EnvVariable `yaml:"variables"` @@ -703,10 +702,23 @@ type FolderMeta struct { } ``` +**Type Reuse Summary:** + +| OpenYAML Need | Reused From `yamlflowsimplev2` | Notes | +|---|---|---| +| Request fields | `YamlRequestDefV2` (embedded) | Name, Method, URL, Headers, QueryParams, Body, Assertions, Description | +| Headers / Query params | `HeaderMapOrSlice` → `[]YamlNameValuePairV2` | Supports both map and list YAML forms | +| Body | `*YamlBodyUnion` | Supports raw string, JSON map, form_data, urlencoded | +| Assertions | `AssertionsOrSlice` → `[]YamlAssertionV2` | Supports string shorthand and structured | +| Flow files | `YamlFlowFlowV2` (used directly) | No wrapper needed — flow .yaml files ARE `YamlFlowFlowV2` | +| Environments | **New** `EnvironmentFile` | Needs `secret` field not in `YamlEnvironmentV2` | +| Folder metadata | **New** `FolderMeta` | No equivalent in `yamlflowsimplev2` | +| Collection config | **New** `CollectionConfig` | No equivalent in `yamlflowsimplev2` | + ### 2.9 Package Structure ``` -packages/server/pkg/translate/openyaml/ +packages/server/pkg/translate/topenyaml/ ├── types.go # YAML struct definitions ├── parser.go # Read collection directory → DevTools models ├── serializer.go # DevTools models → YAML files on disk @@ -721,25 +733,31 @@ packages/server/pkg/translate/openyaml/ ### 2.10 Conversion Functions ```go -// ReadCollection reads an Open YAML directory into DevTools models. +// ReadCollection reads an OpenYAML directory into DevTools models. +// Uses yamlflowsimplev2 converter functions internally: +// - convertToHTTPHeaders() for HeaderMapOrSlice → []mhttp.HTTPHeader +// - convertToHTTPSearchParams() for HeaderMapOrSlice → []mhttp.HTTPSearchParam +// - convertBodyStruct() for *YamlBodyUnion → mhttp body types func ReadCollection(collectionPath string, opts ReadOptions) (*ioworkspace.WorkspaceBundle, error) -// WriteCollection exports a workspace bundle to an Open YAML directory. +// WriteCollection exports a workspace bundle to an OpenYAML directory. func WriteCollection(collectionPath string, bundle *ioworkspace.WorkspaceBundle) error -// ReadRequest parses a single request YAML file. +// ReadRequest parses a single request YAML file into a RequestFile +// (which embeds yamlflowsimplev2.YamlRequestDefV2). func ReadRequest(data []byte) (*RequestFile, error) -// WriteRequest serializes a single request to YAML. +// WriteRequest serializes DevTools models to a single request YAML. func WriteRequest(http mhttp.HTTP, headers []mhttp.HTTPHeader, params []mhttp.HTTPSearchParam, body interface{}, asserts []mhttp.HTTPAssert) ([]byte, error) // ReadFlow parses a single flow YAML file (delegates to yamlflowsimplev2). -func ReadFlow(data []byte, opts FlowReadOptions) (*FlowResolved, error) +// Flow files are yamlflowsimplev2.YamlFlowFlowV2 — no topenyaml wrapper needed. +func ReadFlow(data []byte, opts yfs.ConvertOptionsV2) (*yfs.YamlFlowDataV2, error) // WriteFlow serializes a single flow to YAML (delegates to yamlflowsimplev2 exporter). -func WriteFlow(flow FlowBundle) ([]byte, error) +func WriteFlow(flow yfs.YamlFlowFlowV2) ([]byte, error) ``` --- @@ -994,10 +1012,10 @@ DisableFolderSync(workspaceId) → void // Get sync status GetFolderSyncStatus(workspaceId) → SyncStatus -// Import Bruno collection → create workspace + Open YAML folder +// Import Bruno collection → create workspace + OpenYAML folder ImportBrunoCollection(brunoFolderPath, outputFolderPath) → Workspace -// Export workspace to Open YAML folder +// Export workspace to OpenYAML folder ExportToFolder(workspaceId, folderPath) → void type SyncStatus { @@ -1055,13 +1073,13 @@ packages/server/pkg/translate/topencollection/ **Deps**: `gopkg.in/yaml.v3` (existing), `mhttp`, `mfile`, `menv` -### Phase 2: Open YAML Format (Requests + Flows) +### Phase 2: OpenYAML Format (Requests + Flows) **Scope**: DevTools' own YAML format — parser + serializer with round-trip support. Flows delegate to existing `yamlflowsimplev2`. **Files**: ``` -packages/server/pkg/translate/openyaml/ +packages/server/pkg/translate/topenyaml/ ├── types.go, parser.go, serializer.go, request.go ├── flow.go, environment.go, folder.go, collection.go └── parser_test.go @@ -1092,7 +1110,7 @@ packages/server/pkg/foldersync/ └── watcher_test.go ``` -**Deps**: `github.com/fsnotify/fsnotify`, Phase 2 (Open YAML), Phase 3 (workspace schema) +**Deps**: `github.com/fsnotify/fsnotify`, Phase 2 (OpenYAML), Phase 3 (workspace schema) ### Phase 5: RPC Endpoints + CLI Import @@ -1123,7 +1141,7 @@ packages/server/pkg/foldersync/ **Files**: - `apps/cli/cmd/run.go` -**Deps**: Phase 2 (Open YAML format), existing runner +**Deps**: Phase 2 (OpenYAML format), existing runner --- @@ -1132,7 +1150,7 @@ packages/server/pkg/foldersync/ ``` Phase 1: OpenCollection Parser ──────────────────────────┐ │ -Phase 2: Open YAML Format ──┬────────────────────────────┤ +Phase 2: OpenYAML Format ──┬────────────────────────────┤ │ │ Phase 3: Workspace Schema ──┤ │ │ │ From f48d151c0866c693ea437d83bc87fad89a88c126 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 16:36:22 +0000 Subject: [PATCH 06/20] =?UTF-8?q?docs:=20simplify=20plan=20=E2=80=94=20no?= =?UTF-8?q?=20topenyaml=20package,=20no=20config=20files,=20skip=20non-HTT?= =?UTF-8?q?P?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Remove topenyaml package entirely — extend yamlflowsimplev2 with ReadDirectory()/WriteDirectory() and Order field on YamlRequestDefV2 - Remove devtools.yaml (no collection config needed, workspace tracks sync_path) - Remove _folder.yaml (directory name IS the folder name) - Remove EnvironmentFile/EnvVariable/secret — use YamlEnvironmentV2 directly - Remove CollectionConfig, FolderMeta types — zero new types needed - Add explicit skip for non-HTTP request types during OpenCollection import (graphql, websocket, grpc logged as warnings) - Phase 2 now just adds directory.go to yamlflowsimplev2 https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../server/docs/specs/BRUNO_FOLDER_SYNC.md | 228 ++++++------------ 1 file changed, 71 insertions(+), 157 deletions(-) diff --git a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md index 42b5296f..445295e2 100644 --- a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md +++ b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md @@ -58,8 +58,10 @@ Go Server ↕ bidirectional OpenYAML Folder ←── SOURCE OF TRUTH (git, shared, portable) /path/to/my-collection/ - ├── devtools.yaml - ├── requests/ + ├── environments/ + ├── users/ + │ ├── get-users.yaml + │ └── create-user.yaml └── flows/ ``` @@ -173,7 +175,7 @@ SyncPath = "/Users/dev/my-api-collection", SyncFormat = "open_yaml", SyncEnabled 6. User can `git init && git add . && git commit` to start versioning **B) Open existing folder (OpenYAML folder → new workspace):** -1. User clicks "Open Folder" → picks a directory with `devtools.yaml` +1. User clicks "Open Folder" → picks a directory containing `.yaml` request/flow files 2. Server creates a new workspace with `sync_path` set 3. SyncCoordinator starts → reads entire folder → populates SQLite cache 4. File watcher starts → folder is the source of truth @@ -427,6 +429,9 @@ type OpenCollectionResolved struct { EnvironmentVars []menv.Variable } +// ConvertOpenCollection walks the directory, parses each .yml file, and converts +// to DevTools models. Only info.type == "http" requests are imported. +// GraphQL, WebSocket, and gRPC types are skipped with a log warning. func ConvertOpenCollection(collectionPath string, opts ConvertOptions) (*OpenCollectionResolved, error) ``` @@ -447,6 +452,9 @@ func ConvertOpenCollection(collectionPath string, opts ConvertOptions) (*OpenCol | `http.auth.type: basic` | `mhttp.HTTPHeader` | → `Authorization: Basic ` | | `http.auth.type: apikey` | Header or SearchParam | Based on `placement` | | `runtime.assertions` | `[]mhttp.HTTPAssert` | `expr operator value` format | +| `info.type: graphql` | **Skipped** (log warning) | Not supported yet | +| `info.type: ws` | **Skipped** (log warning) | WebSocket not supported yet | +| `info.type: grpc` | **Skipped** (log warning) | gRPC not supported yet | | `runtime.scripts` | Not imported (log warning) | DevTools uses JS flow nodes | | `docs` | `mhttp.HTTP.Description` | | | Directory structure | `mfile.File` hierarchy | Nesting preserved | @@ -479,7 +487,7 @@ packages/server/pkg/translate/topencollection/ --- -## Part 2: OpenYAML Format (DevTools' Own Format) +## Part 2: OpenYAML Format — Extending `yamlflowsimplev2` ### 2.1 Design Goals @@ -487,39 +495,31 @@ packages/server/pkg/translate/topencollection/ - **One file per entity** — each request and flow is its own `.yaml` file - **Git-friendly** — clean diffs, merge-friendly structure - **Human-editable** — developers can edit in any text editor or IDE -- **Flat top-level** — `name`, `method`, `url` at root (no `info`/`http` nesting like OpenCollection) -- **Reuses existing `yamlflowsimplev2` types** — request types (`YamlRequestDefV2`, `HeaderMapOrSlice`, `YamlBodyUnion`, `AssertionsOrSlice`) and flow types (`YamlFlowFlowV2`) are imported directly, not duplicated. Only `gopkg.in/yaml.v3` for YAML parsing. +- **No new package** — the format IS `yamlflowsimplev2`. Individual request files are `YamlRequestDefV2`, flow files are `YamlFlowFlowV2`, environment files are `YamlEnvironmentV2`. We add `ReadDirectory()`/`WriteDirectory()` to handle the multi-file layout. +- **No config files** — no `devtools.yaml` or `_folder.yaml`. The workspace tracks `sync_path` in SQLite. Directory names are folder names. Ordering uses alphabetical/filesystem order. ### 2.2 Directory Structure ``` my-collection/ -├── devtools.yaml # Collection config ├── environments/ │ ├── dev.yaml │ └── prod.yaml -├── users/ # Folder = directory -│ ├── _folder.yaml # Optional folder metadata -│ ├── get-users.yaml # HTTP request -│ └── create-user.yaml # HTTP request +├── users/ # Folder = directory name +│ ├── get-users.yaml # HTTP request (YamlRequestDefV2) +│ └── create-user.yaml # HTTP request (YamlRequestDefV2) ├── auth/ -│ ├── _folder.yaml │ └── login.yaml └── flows/ # Flow definitions - ├── smoke-test.yaml # Flow (yamlflowsimplev2 format) + ├── smoke-test.yaml # Flow (YamlFlowFlowV2) └── ci-regression.yaml ``` -### 2.3 Collection Config (`devtools.yaml`) +No marker files needed. The workspace knows its `sync_path`. Directory names are the folder names. File names are derived from the entity name. -```yaml -version: "1" -name: My API Collection -``` +### 2.3 Request File Format -This file identifies the directory as a DevTools OpenYAML collection. Its presence is how we detect the format (analogous to `opencollection.yml` for Bruno). - -### 2.4 Request File Format +Request files are `YamlRequestDefV2` (with `Order` field added to the struct): ```yaml name: Get Users @@ -543,9 +543,6 @@ query_params: - name: limit value: "10" enabled: false - -body: - type: none # none | raw | form-data | urlencoded ``` ```yaml @@ -587,12 +584,11 @@ body: value: "Test upload" ``` -### 2.5 Flow File Format +### 2.4 Flow File Format -Flows use the existing `yamlflowsimplev2` format — this is already implemented and working: +Flow files are `YamlFlowFlowV2` — already implemented and working: ```yaml -# flows/smoke-test.yaml name: Smoke Test variables: - name: auth_token @@ -623,143 +619,60 @@ steps: if (response.status !== 200) throw new Error("Failed"); ``` -### 2.6 Environment File +### 2.5 Environment File Format + +Environment files use `YamlEnvironmentV2` directly: ```yaml name: Development variables: - - name: base_url - value: "http://localhost:3000" - - name: token - value: "dev-token-123" - secret: true + base_url: "http://localhost:3000" + token: "dev-token-123" ``` -### 2.7 Folder Metadata (`_folder.yaml`) +### 2.6 Changes to `yamlflowsimplev2` -```yaml -name: Users API -order: 1 -description: "User management endpoints" -``` - -### 2.8 OpenYAML Go Types - -> **Reuse Policy:** The `topenyaml` package reuses types from `yamlflowsimplev2` wherever possible -> to avoid duplicating YAML marshaling logic. The types below reference: -> - `yamlflowsimplev2.YamlRequestDefV2` — request fields (name, method, url, headers, body, etc.) -> - `yamlflowsimplev2.HeaderMapOrSlice` — flexible header/param list (map or slice with custom marshal) -> - `yamlflowsimplev2.YamlNameValuePairV2` — individual name/value/enabled entry -> - `yamlflowsimplev2.YamlBodyUnion` — flexible body (raw string, JSON map, or structured with type) -> - `yamlflowsimplev2.AssertionsOrSlice` — assertions (string shorthand or structured) -> - `yamlflowsimplev2.YamlAssertionV2` — individual assertion entry -> - `yamlflowsimplev2.YamlFlowFlowV2` — flow definition (used directly for flow files) -> -> Only types that have no equivalent in `yamlflowsimplev2` are defined in `topenyaml`. +**Only change to existing types** — add `Order` to `YamlRequestDefV2`: ```go -package topenyaml - -import ( - yfs "github.com/the-dev-tools/dev-tools/packages/server/pkg/translate/yamlflowsimplev2" -) - -// CollectionConfig represents devtools.yaml -type CollectionConfig struct { - Version string `yaml:"version"` - Name string `yaml:"name"` -} - -// RequestFile represents a single request .yaml file. -// Embeds YamlRequestDefV2 for the core request fields (Name, Method, URL, -// Headers, QueryParams, Body, Assertions, Description) — all of which reuse -// the existing custom marshalers (HeaderMapOrSlice, YamlBodyUnion, etc.). -// Adds Order for file-tree ordering (not present in YamlRequestDefV2). -type RequestFile struct { - yfs.YamlRequestDefV2 `yaml:",inline"` - Order float64 `yaml:"order,omitempty"` -} - -// EnvironmentFile represents an environment .yaml file. -// NOTE: This differs from yamlflowsimplev2.YamlEnvironmentV2 which uses -// map[string]string for variables. OpenYAML environments need per-variable -// metadata (secret flag), so we define our own type here. -type EnvironmentFile struct { - Name string `yaml:"name"` - Variables []EnvVariable `yaml:"variables"` -} - -type EnvVariable struct { - Name string `yaml:"name"` - Value string `yaml:"value"` - Secret bool `yaml:"secret,omitempty"` -} - -type FolderMeta struct { - Name string `yaml:"name,omitempty"` - Order float64 `yaml:"order,omitempty"` - Description string `yaml:"description,omitempty"` +type YamlRequestDefV2 struct { + Name string `yaml:"name,omitempty"` + Method string `yaml:"method,omitempty"` + URL string `yaml:"url,omitempty"` + Headers HeaderMapOrSlice `yaml:"headers,omitempty"` + QueryParams HeaderMapOrSlice `yaml:"query_params,omitempty"` + Body *YamlBodyUnion `yaml:"body,omitempty"` + Assertions AssertionsOrSlice `yaml:"assertions,omitempty"` + Description string `yaml:"description,omitempty"` + Order float64 `yaml:"order,omitempty"` // NEW: file-tree ordering } ``` -**Type Reuse Summary:** +**New functions** for multi-file directory I/O: -| OpenYAML Need | Reused From `yamlflowsimplev2` | Notes | -|---|---|---| -| Request fields | `YamlRequestDefV2` (embedded) | Name, Method, URL, Headers, QueryParams, Body, Assertions, Description | -| Headers / Query params | `HeaderMapOrSlice` → `[]YamlNameValuePairV2` | Supports both map and list YAML forms | -| Body | `*YamlBodyUnion` | Supports raw string, JSON map, form_data, urlencoded | -| Assertions | `AssertionsOrSlice` → `[]YamlAssertionV2` | Supports string shorthand and structured | -| Flow files | `YamlFlowFlowV2` (used directly) | No wrapper needed — flow .yaml files ARE `YamlFlowFlowV2` | -| Environments | **New** `EnvironmentFile` | Needs `secret` field not in `YamlEnvironmentV2` | -| Folder metadata | **New** `FolderMeta` | No equivalent in `yamlflowsimplev2` | -| Collection config | **New** `CollectionConfig` | No equivalent in `yamlflowsimplev2` | +```go +// ReadDirectory reads an OpenYAML folder into a WorkspaceBundle. +// Walks the directory tree: +// - *.yaml files in root/subdirs → YamlRequestDefV2 → mhttp models +// - flows/*.yaml → YamlFlowFlowV2 → mflow models +// - environments/*.yaml → YamlEnvironmentV2 → menv models +// - Subdirectories → mfile.File (ContentTypeFolder) +func ReadDirectory(dirPath string, opts ConvertOptionsV2) (*ioworkspace.WorkspaceBundle, error) -### 2.9 Package Structure +// WriteDirectory exports a WorkspaceBundle to an OpenYAML folder. +// Creates one .yaml file per request, flow, and environment. +// Directory structure mirrors the mfile.File hierarchy. +func WriteDirectory(dirPath string, bundle *ioworkspace.WorkspaceBundle) error -``` -packages/server/pkg/translate/topenyaml/ -├── types.go # YAML struct definitions -├── parser.go # Read collection directory → DevTools models -├── serializer.go # DevTools models → YAML files on disk -├── request.go # Single request YAML ↔ mhttp conversion -├── flow.go # Delegates to yamlflowsimplev2 for flow parsing -├── environment.go # Environment YAML ↔ menv conversion -├── folder.go # _folder.yaml handling -├── collection.go # devtools.yaml config -└── parser_test.go # Round-trip tests -``` +// ReadSingleRequest parses one request .yaml file. +func ReadSingleRequest(data []byte) (*YamlRequestDefV2, error) -### 2.10 Conversion Functions - -```go -// ReadCollection reads an OpenYAML directory into DevTools models. -// Uses yamlflowsimplev2 converter functions internally: -// - convertToHTTPHeaders() for HeaderMapOrSlice → []mhttp.HTTPHeader -// - convertToHTTPSearchParams() for HeaderMapOrSlice → []mhttp.HTTPSearchParam -// - convertBodyStruct() for *YamlBodyUnion → mhttp body types -func ReadCollection(collectionPath string, opts ReadOptions) (*ioworkspace.WorkspaceBundle, error) - -// WriteCollection exports a workspace bundle to an OpenYAML directory. -func WriteCollection(collectionPath string, bundle *ioworkspace.WorkspaceBundle) error - -// ReadRequest parses a single request YAML file into a RequestFile -// (which embeds yamlflowsimplev2.YamlRequestDefV2). -func ReadRequest(data []byte) (*RequestFile, error) - -// WriteRequest serializes DevTools models to a single request YAML. -func WriteRequest(http mhttp.HTTP, headers []mhttp.HTTPHeader, - params []mhttp.HTTPSearchParam, body interface{}, - asserts []mhttp.HTTPAssert) ([]byte, error) - -// ReadFlow parses a single flow YAML file (delegates to yamlflowsimplev2). -// Flow files are yamlflowsimplev2.YamlFlowFlowV2 — no topenyaml wrapper needed. -func ReadFlow(data []byte, opts yfs.ConvertOptionsV2) (*yfs.YamlFlowDataV2, error) - -// WriteFlow serializes a single flow to YAML (delegates to yamlflowsimplev2 exporter). -func WriteFlow(flow yfs.YamlFlowFlowV2) ([]byte, error) +// WriteSingleRequest serializes one request to YAML. +func WriteSingleRequest(req YamlRequestDefV2) ([]byte, error) ``` +These functions reuse the existing converter functions internally (`convertToHTTPHeaders`, `convertToHTTPSearchParams`, `convertBodyStruct`, etc.). + --- ## Part 3: Folder Sync Engine @@ -889,7 +802,7 @@ func (s *SyncCoordinator) ExportAll(ctx context.Context) error File change detected (watcher) → Debounce (80ms) → Skip if self-write - → Classify file type (request .yaml, flow .yaml, environment, folder meta) + → Classify file type (request .yaml, flow .yaml, environment) → Parse YAML → intermediate types → Look up entity by path→ID mapping → Begin transaction @@ -1073,19 +986,20 @@ packages/server/pkg/translate/topencollection/ **Deps**: `gopkg.in/yaml.v3` (existing), `mhttp`, `mfile`, `menv` -### Phase 2: OpenYAML Format (Requests + Flows) +### Phase 2: OpenYAML Multi-File I/O (extend `yamlflowsimplev2`) -**Scope**: DevTools' own YAML format — parser + serializer with round-trip support. Flows delegate to existing `yamlflowsimplev2`. +**Scope**: Add `Order` field to `YamlRequestDefV2`. Add `ReadDirectory()`/`WriteDirectory()` functions for multi-file collection layout. No new package — all code lives in `yamlflowsimplev2`. **Files**: ``` -packages/server/pkg/translate/topenyaml/ -├── types.go, parser.go, serializer.go, request.go -├── flow.go, environment.go, folder.go, collection.go -└── parser_test.go +packages/server/pkg/translate/yamlflowsimplev2/ +├── types.go # Add Order to YamlRequestDefV2 +├── directory.go # NEW: ReadDirectory(), WriteDirectory() +├── directory_test.go # NEW: round-trip tests with testdata/ +└── testdata/collection/ # NEW: sample multi-file collection ``` -**Deps**: `gopkg.in/yaml.v3`, `yamlflowsimplev2` (for flows), `mhttp`, `mfile`, `mflow` +**Deps**: existing `yamlflowsimplev2` types, `mhttp`, `mfile`, `mflow`, `menv` ### Phase 3: Workspace Schema + Migration @@ -1110,7 +1024,7 @@ packages/server/pkg/foldersync/ └── watcher_test.go ``` -**Deps**: `github.com/fsnotify/fsnotify`, Phase 2 (OpenYAML), Phase 3 (workspace schema) +**Deps**: `github.com/fsnotify/fsnotify`, Phase 2 (directory I/O), Phase 3 (workspace schema) ### Phase 5: RPC Endpoints + CLI Import @@ -1141,7 +1055,7 @@ packages/server/pkg/foldersync/ **Files**: - `apps/cli/cmd/run.go` -**Deps**: Phase 2 (OpenYAML format), existing runner +**Deps**: Phase 2 (directory I/O), existing runner --- @@ -1150,7 +1064,7 @@ packages/server/pkg/foldersync/ ``` Phase 1: OpenCollection Parser ──────────────────────────┐ │ -Phase 2: OpenYAML Format ──┬────────────────────────────┤ +Phase 2: yamlflowsimplev2 Dir I/O ──┬───────────────────┤ │ │ Phase 3: Workspace Schema ──┤ │ │ │ From 0c34cfea3046d850ddb769c64d3610d650d76e99 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 16:39:16 +0000 Subject: [PATCH 07/20] docs: wrapper package for directory I/O, clean Bruno separation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Don't clutter yamlflowsimplev2 — only add Order field there - Create thin openyaml wrapper package for directory read/write (imports yamlflowsimplev2 types, no new YAML types) - openyaml has no dependency on topencollection (Bruno import) - topencollection stays fully isolated — can be removed cleanly - Update Phase 2 and dependency graph accordingly https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../server/docs/specs/BRUNO_FOLDER_SYNC.md | 80 ++++++++++++------- 1 file changed, 53 insertions(+), 27 deletions(-) diff --git a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md index 445295e2..ebdec83c 100644 --- a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md +++ b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md @@ -487,7 +487,7 @@ packages/server/pkg/translate/topencollection/ --- -## Part 2: OpenYAML Format — Extending `yamlflowsimplev2` +## Part 2: OpenYAML Format — Wrapper over `yamlflowsimplev2` ### 2.1 Design Goals @@ -495,8 +495,9 @@ packages/server/pkg/translate/topencollection/ - **One file per entity** — each request and flow is its own `.yaml` file - **Git-friendly** — clean diffs, merge-friendly structure - **Human-editable** — developers can edit in any text editor or IDE -- **No new package** — the format IS `yamlflowsimplev2`. Individual request files are `YamlRequestDefV2`, flow files are `YamlFlowFlowV2`, environment files are `YamlEnvironmentV2`. We add `ReadDirectory()`/`WriteDirectory()` to handle the multi-file layout. +- **Thin wrapper, not modification** — the format IS `yamlflowsimplev2` types. A separate small package wraps them with directory I/O (`ReadDirectory()`/`WriteDirectory()`). `yamlflowsimplev2` stays untouched except adding `Order` to `YamlRequestDefV2`. - **No config files** — no `devtools.yaml` or `_folder.yaml`. The workspace tracks `sync_path` in SQLite. Directory names are folder names. Ordering uses alphabetical/filesystem order. +- **Clean separation** — `topencollection/` (Bruno import) is fully isolated and can be removed without affecting the rest. The directory I/O wrapper has no dependency on Bruno types. ### 2.2 Directory Structure @@ -632,32 +633,36 @@ variables: ### 2.6 Changes to `yamlflowsimplev2` -**Only change to existing types** — add `Order` to `YamlRequestDefV2`: +**Only change** — add `Order` to `YamlRequestDefV2`: ```go type YamlRequestDefV2 struct { - Name string `yaml:"name,omitempty"` - Method string `yaml:"method,omitempty"` - URL string `yaml:"url,omitempty"` - Headers HeaderMapOrSlice `yaml:"headers,omitempty"` - QueryParams HeaderMapOrSlice `yaml:"query_params,omitempty"` - Body *YamlBodyUnion `yaml:"body,omitempty"` - Assertions AssertionsOrSlice `yaml:"assertions,omitempty"` - Description string `yaml:"description,omitempty"` + // ... existing fields unchanged ... Order float64 `yaml:"order,omitempty"` // NEW: file-tree ordering } ``` -**New functions** for multi-file directory I/O: +No other changes to `yamlflowsimplev2`. No new functions added there. + +### 2.7 Directory I/O Wrapper (`packages/server/pkg/openyaml/`) + +A thin wrapper package that imports `yamlflowsimplev2` types and handles multi-file directory layout. No new YAML types — just I/O logic. ```go +package openyaml + +import ( + yfs "github.com/the-dev-tools/dev-tools/packages/server/pkg/translate/yamlflowsimplev2" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/ioworkspace" +) + // ReadDirectory reads an OpenYAML folder into a WorkspaceBundle. // Walks the directory tree: -// - *.yaml files in root/subdirs → YamlRequestDefV2 → mhttp models -// - flows/*.yaml → YamlFlowFlowV2 → mflow models -// - environments/*.yaml → YamlEnvironmentV2 → menv models +// - *.yaml files in root/subdirs → yfs.YamlRequestDefV2 → mhttp models +// - flows/*.yaml → yfs.YamlFlowFlowV2 → mflow models +// - environments/*.yaml → yfs.YamlEnvironmentV2 → menv models // - Subdirectories → mfile.File (ContentTypeFolder) -func ReadDirectory(dirPath string, opts ConvertOptionsV2) (*ioworkspace.WorkspaceBundle, error) +func ReadDirectory(dirPath string, opts ReadOptions) (*ioworkspace.WorkspaceBundle, error) // WriteDirectory exports a WorkspaceBundle to an OpenYAML folder. // Creates one .yaml file per request, flow, and environment. @@ -665,13 +670,29 @@ func ReadDirectory(dirPath string, opts ConvertOptionsV2) (*ioworkspace.Workspac func WriteDirectory(dirPath string, bundle *ioworkspace.WorkspaceBundle) error // ReadSingleRequest parses one request .yaml file. -func ReadSingleRequest(data []byte) (*YamlRequestDefV2, error) +func ReadSingleRequest(data []byte) (*yfs.YamlRequestDefV2, error) // WriteSingleRequest serializes one request to YAML. -func WriteSingleRequest(req YamlRequestDefV2) ([]byte, error) +func WriteSingleRequest(req yfs.YamlRequestDefV2) ([]byte, error) + +// ReadSingleFlow parses one flow .yaml file. +func ReadSingleFlow(data []byte) (*yfs.YamlFlowFlowV2, error) + +// WriteSingleFlow serializes one flow to YAML. +func WriteSingleFlow(flow yfs.YamlFlowFlowV2) ([]byte, error) +``` + +``` +packages/server/pkg/openyaml/ +├── directory.go # ReadDirectory(), WriteDirectory() +├── request.go # ReadSingleRequest(), WriteSingleRequest() +├── flow.go # ReadSingleFlow(), WriteSingleFlow() +├── environment.go # environment file read/write +├── directory_test.go # Round-trip tests +└── testdata/collection/ # Sample multi-file collection ``` -These functions reuse the existing converter functions internally (`convertToHTTPHeaders`, `convertToHTTPSearchParams`, `convertBodyStruct`, etc.). +This package uses `yamlflowsimplev2` converter functions internally (`convertToHTTPHeaders`, `convertToHTTPSearchParams`, `convertBodyStruct`, etc.). It has no dependency on `topencollection` (Bruno import). --- @@ -986,20 +1007,25 @@ packages/server/pkg/translate/topencollection/ **Deps**: `gopkg.in/yaml.v3` (existing), `mhttp`, `mfile`, `menv` -### Phase 2: OpenYAML Multi-File I/O (extend `yamlflowsimplev2`) +### Phase 2: OpenYAML Directory I/O Wrapper -**Scope**: Add `Order` field to `YamlRequestDefV2`. Add `ReadDirectory()`/`WriteDirectory()` functions for multi-file collection layout. No new package — all code lives in `yamlflowsimplev2`. +**Scope**: Add `Order` field to `YamlRequestDefV2` (only change to `yamlflowsimplev2`). Create thin `openyaml` wrapper package for multi-file directory read/write. No Bruno dependency. **Files**: ``` packages/server/pkg/translate/yamlflowsimplev2/ -├── types.go # Add Order to YamlRequestDefV2 -├── directory.go # NEW: ReadDirectory(), WriteDirectory() -├── directory_test.go # NEW: round-trip tests with testdata/ -└── testdata/collection/ # NEW: sample multi-file collection +└── types.go # Add Order to YamlRequestDefV2 + +packages/server/pkg/openyaml/ # NEW: directory I/O wrapper +├── directory.go # ReadDirectory(), WriteDirectory() +├── request.go # ReadSingleRequest(), WriteSingleRequest() +├── flow.go # ReadSingleFlow(), WriteSingleFlow() +├── environment.go # environment file read/write +├── directory_test.go # Round-trip tests +└── testdata/collection/ # Sample multi-file collection ``` -**Deps**: existing `yamlflowsimplev2` types, `mhttp`, `mfile`, `mflow`, `menv` +**Deps**: `yamlflowsimplev2` (types only), `mhttp`, `mfile`, `mflow`, `menv` ### Phase 3: Workspace Schema + Migration @@ -1064,7 +1090,7 @@ packages/server/pkg/foldersync/ ``` Phase 1: OpenCollection Parser ──────────────────────────┐ │ -Phase 2: yamlflowsimplev2 Dir I/O ──┬───────────────────┤ +Phase 2: openyaml Dir I/O ──────┬───────────────────────┤ │ │ Phase 3: Workspace Schema ──┤ │ │ │ From b9088ab46762a15b2bb8ea07fc7a71a2ac2e07e4 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 16:43:17 +0000 Subject: [PATCH 08/20] =?UTF-8?q?docs:=20rename=20openyaml=20references=20?= =?UTF-8?q?=E2=80=94=20it's=20the=20format,=20not=20just=20I/O?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../server/docs/specs/BRUNO_FOLDER_SYNC.md | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md index ebdec83c..03e8189d 100644 --- a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md +++ b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md @@ -487,7 +487,7 @@ packages/server/pkg/translate/topencollection/ --- -## Part 2: OpenYAML Format — Wrapper over `yamlflowsimplev2` +## Part 2: OpenYAML Format (`openyaml` package) ### 2.1 Design Goals @@ -495,7 +495,7 @@ packages/server/pkg/translate/topencollection/ - **One file per entity** — each request and flow is its own `.yaml` file - **Git-friendly** — clean diffs, merge-friendly structure - **Human-editable** — developers can edit in any text editor or IDE -- **Thin wrapper, not modification** — the format IS `yamlflowsimplev2` types. A separate small package wraps them with directory I/O (`ReadDirectory()`/`WriteDirectory()`). `yamlflowsimplev2` stays untouched except adding `Order` to `YamlRequestDefV2`. +- **Own package, reuses types** — `openyaml` is the OpenYAML format package. It reuses `yamlflowsimplev2` types (`YamlRequestDefV2`, `YamlFlowFlowV2`, `YamlEnvironmentV2`) and adds multi-file collection support. `yamlflowsimplev2` stays untouched except adding `Order` to `YamlRequestDefV2`. - **No config files** — no `devtools.yaml` or `_folder.yaml`. The workspace tracks `sync_path` in SQLite. Directory names are folder names. Ordering uses alphabetical/filesystem order. - **Clean separation** — `topencollection/` (Bruno import) is fully isolated and can be removed without affecting the rest. The directory I/O wrapper has no dependency on Bruno types. @@ -644,9 +644,9 @@ type YamlRequestDefV2 struct { No other changes to `yamlflowsimplev2`. No new functions added there. -### 2.7 Directory I/O Wrapper (`packages/server/pkg/openyaml/`) +### 2.7 OpenYAML Package (`packages/server/pkg/openyaml/`) -A thin wrapper package that imports `yamlflowsimplev2` types and handles multi-file directory layout. No new YAML types — just I/O logic. +The OpenYAML format package. Reuses `yamlflowsimplev2` types for individual files and adds multi-file collection support. ```go package openyaml @@ -692,7 +692,7 @@ packages/server/pkg/openyaml/ └── testdata/collection/ # Sample multi-file collection ``` -This package uses `yamlflowsimplev2` converter functions internally (`convertToHTTPHeaders`, `convertToHTTPSearchParams`, `convertBodyStruct`, etc.). It has no dependency on `topencollection` (Bruno import). +Uses `yamlflowsimplev2` converter functions internally. No dependency on `topencollection` (Bruno import). --- @@ -1007,16 +1007,16 @@ packages/server/pkg/translate/topencollection/ **Deps**: `gopkg.in/yaml.v3` (existing), `mhttp`, `mfile`, `menv` -### Phase 2: OpenYAML Directory I/O Wrapper +### Phase 2: OpenYAML Format (`openyaml` package) -**Scope**: Add `Order` field to `YamlRequestDefV2` (only change to `yamlflowsimplev2`). Create thin `openyaml` wrapper package for multi-file directory read/write. No Bruno dependency. +**Scope**: Add `Order` field to `YamlRequestDefV2` (only change to `yamlflowsimplev2`). Create `openyaml` package — the OpenYAML format with multi-file collection support. No Bruno dependency. **Files**: ``` packages/server/pkg/translate/yamlflowsimplev2/ └── types.go # Add Order to YamlRequestDefV2 -packages/server/pkg/openyaml/ # NEW: directory I/O wrapper +packages/server/pkg/openyaml/ # NEW: OpenYAML format ├── directory.go # ReadDirectory(), WriteDirectory() ├── request.go # ReadSingleRequest(), WriteSingleRequest() ├── flow.go # ReadSingleFlow(), WriteSingleFlow() @@ -1050,7 +1050,7 @@ packages/server/pkg/foldersync/ └── watcher_test.go ``` -**Deps**: `github.com/fsnotify/fsnotify`, Phase 2 (directory I/O), Phase 3 (workspace schema) +**Deps**: `github.com/fsnotify/fsnotify`, Phase 2 (openyaml), Phase 3 (workspace schema) ### Phase 5: RPC Endpoints + CLI Import @@ -1081,7 +1081,7 @@ packages/server/pkg/foldersync/ **Files**: - `apps/cli/cmd/run.go` -**Deps**: Phase 2 (directory I/O), existing runner +**Deps**: Phase 2 (openyaml), existing runner --- @@ -1090,7 +1090,7 @@ packages/server/pkg/foldersync/ ``` Phase 1: OpenCollection Parser ──────────────────────────┐ │ -Phase 2: openyaml Dir I/O ──────┬───────────────────────┤ +Phase 2: openyaml format ───────┬───────────────────────┤ │ │ Phase 3: Workspace Schema ──┤ │ │ │ From d2f7fa01456122a4f0ebc1871a67c2f88cf6c23a Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 17:42:50 +0000 Subject: [PATCH 09/20] feat: implement Bruno import, OpenYAML format, and workspace sync schema MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Phase 1 — topencollection package (Bruno OpenCollection YAML import): - Converter that walks Bruno collection directories and produces WorkspaceBundle-compatible models (HTTP requests, headers, params, bodies, environments, assertions) - Auth conversion (bearer, basic, API key with header/query placement) - Body conversion (JSON/XML/text as raw, form-urlencoded, multipart) - Skips non-HTTP requests (graphql, ws, grpc) with log warnings - Full test coverage (6 tests) Phase 2 — openyaml package (DevTools' own multi-file YAML format): - ReadDirectory / WriteDirectory for full collection round-trips - Single-file read/write for requests, flows, and environments - Reuses existing yamlflowsimplev2 types (adds Order field) - Recursive directory walking with folder hierarchy preservation - Full test coverage including round-trip tests (4 tests) Phase 3 — workspace sync schema and migration: - Add sync_path, sync_format, sync_enabled columns to workspaces table - SQL schema, queries, generated Go code, and model all updated - New GetSyncedWorkspaces and UpdateWorkspaceSync queries - WorkspaceReader.GetSynced and WorkspaceWriter.UpdateSync service methods - Database migration (01KGZ57RM) with ALTER TABLE + validation https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- packages/db/pkg/sqlc/gen/db.go | 20 + packages/db/pkg/sqlc/gen/models.go | 3 + packages/db/pkg/sqlc/gen/workspaces.sql.go | 143 +++- packages/db/pkg/sqlc/queries/workspaces.sql | 65 +- packages/db/pkg/sqlc/schema/01_workspaces.sql | 5 +- .../01KGZ57RM_add_workspace_sync.go | 64 ++ .../server/pkg/model/mworkspace/mworkspace.go | 4 + packages/server/pkg/openyaml/directory.go | 763 ++++++++++++++++++ .../server/pkg/openyaml/directory_test.go | 271 +++++++ packages/server/pkg/openyaml/environment.go | 21 + packages/server/pkg/openyaml/flow.go | 21 + packages/server/pkg/openyaml/request.go | 24 + .../testdata/collection/environments/dev.yaml | 4 + .../testdata/collection/flows/smoke-test.yaml | 9 + .../collection/users/create-user.yaml | 15 + .../testdata/collection/users/get-users.yaml | 15 + .../service/sworkspace/workspace_mapper.go | 6 + .../service/sworkspace/workspace_reader.go | 8 + .../service/sworkspace/workspace_writer.go | 14 + .../pkg/translate/topencollection/auth.go | 65 ++ .../pkg/translate/topencollection/body.go | 155 ++++ .../translate/topencollection/converter.go | 365 +++++++++ .../topencollection/converter_test.go | 294 +++++++ .../translate/topencollection/environment.go | 36 + .../testdata/basic-collection/auth/login.yml | 24 + .../basic-collection/environments/dev.yml | 6 + .../basic-collection/environments/prod.yml | 7 + .../basic-collection/opencollection.yml | 5 + .../basic-collection/users/create-user.yml | 32 + .../basic-collection/users/get-users.yml | 26 + .../topencollection/testhelper_test.go | 15 + .../pkg/translate/topencollection/types.go | 146 ++++ .../pkg/translate/yamlflowsimplev2/types.go | 1 + 33 files changed, 2634 insertions(+), 18 deletions(-) create mode 100644 packages/server/internal/migrations/01KGZ57RM_add_workspace_sync.go create mode 100644 packages/server/pkg/openyaml/directory.go create mode 100644 packages/server/pkg/openyaml/directory_test.go create mode 100644 packages/server/pkg/openyaml/environment.go create mode 100644 packages/server/pkg/openyaml/flow.go create mode 100644 packages/server/pkg/openyaml/request.go create mode 100644 packages/server/pkg/openyaml/testdata/collection/environments/dev.yaml create mode 100644 packages/server/pkg/openyaml/testdata/collection/flows/smoke-test.yaml create mode 100644 packages/server/pkg/openyaml/testdata/collection/users/create-user.yaml create mode 100644 packages/server/pkg/openyaml/testdata/collection/users/get-users.yaml create mode 100644 packages/server/pkg/translate/topencollection/auth.go create mode 100644 packages/server/pkg/translate/topencollection/body.go create mode 100644 packages/server/pkg/translate/topencollection/converter.go create mode 100644 packages/server/pkg/translate/topencollection/converter_test.go create mode 100644 packages/server/pkg/translate/topencollection/environment.go create mode 100644 packages/server/pkg/translate/topencollection/testdata/basic-collection/auth/login.yml create mode 100644 packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/dev.yml create mode 100644 packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/prod.yml create mode 100644 packages/server/pkg/translate/topencollection/testdata/basic-collection/opencollection.yml create mode 100644 packages/server/pkg/translate/topencollection/testdata/basic-collection/users/create-user.yml create mode 100644 packages/server/pkg/translate/topencollection/testdata/basic-collection/users/get-users.yml create mode 100644 packages/server/pkg/translate/topencollection/testhelper_test.go create mode 100644 packages/server/pkg/translate/topencollection/types.go diff --git a/packages/db/pkg/sqlc/gen/db.go b/packages/db/pkg/sqlc/gen/db.go index 93601b6e..80ebb7a7 100644 --- a/packages/db/pkg/sqlc/gen/db.go +++ b/packages/db/pkg/sqlc/gen/db.go @@ -648,6 +648,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.getVariablesByEnvironmentIDOrderedStmt, err = db.PrepareContext(ctx, getVariablesByEnvironmentIDOrdered); err != nil { return nil, fmt.Errorf("error preparing query GetVariablesByEnvironmentIDOrdered: %w", err) } + if q.getSyncedWorkspacesStmt, err = db.PrepareContext(ctx, getSyncedWorkspaces); err != nil { + return nil, fmt.Errorf("error preparing query GetSyncedWorkspaces: %w", err) + } if q.getWorkspaceStmt, err = db.PrepareContext(ctx, getWorkspace); err != nil { return nil, fmt.Errorf("error preparing query GetWorkspace: %w", err) } @@ -834,6 +837,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.updateWorkspaceStmt, err = db.PrepareContext(ctx, updateWorkspace); err != nil { return nil, fmt.Errorf("error preparing query UpdateWorkspace: %w", err) } + if q.updateWorkspaceSyncStmt, err = db.PrepareContext(ctx, updateWorkspaceSync); err != nil { + return nil, fmt.Errorf("error preparing query UpdateWorkspaceSync: %w", err) + } if q.updateWorkspaceUpdatedTimeStmt, err = db.PrepareContext(ctx, updateWorkspaceUpdatedTime); err != nil { return nil, fmt.Errorf("error preparing query UpdateWorkspaceUpdatedTime: %w", err) } @@ -1891,6 +1897,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing getVariablesByEnvironmentIDOrderedStmt: %w", cerr) } } + if q.getSyncedWorkspacesStmt != nil { + if cerr := q.getSyncedWorkspacesStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getSyncedWorkspacesStmt: %w", cerr) + } + } if q.getWorkspaceStmt != nil { if cerr := q.getWorkspaceStmt.Close(); cerr != nil { err = fmt.Errorf("error closing getWorkspaceStmt: %w", cerr) @@ -2201,6 +2212,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing updateWorkspaceStmt: %w", cerr) } } + if q.updateWorkspaceSyncStmt != nil { + if cerr := q.updateWorkspaceSyncStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateWorkspaceSyncStmt: %w", cerr) + } + } if q.updateWorkspaceUpdatedTimeStmt != nil { if cerr := q.updateWorkspaceUpdatedTimeStmt.Close(); cerr != nil { err = fmt.Errorf("error closing updateWorkspaceUpdatedTimeStmt: %w", cerr) @@ -2468,6 +2484,7 @@ type Queries struct { getVariableStmt *sql.Stmt getVariablesByEnvironmentIDStmt *sql.Stmt getVariablesByEnvironmentIDOrderedStmt *sql.Stmt + getSyncedWorkspacesStmt *sql.Stmt getWorkspaceStmt *sql.Stmt getWorkspaceByUserIDStmt *sql.Stmt getWorkspaceByUserIDandWorkspaceIDStmt *sql.Stmt @@ -2530,6 +2547,7 @@ type Queries struct { updateUserStmt *sql.Stmt updateVariableStmt *sql.Stmt updateWorkspaceStmt *sql.Stmt + updateWorkspaceSyncStmt *sql.Stmt updateWorkspaceUpdatedTimeStmt *sql.Stmt updateWorkspaceUserStmt *sql.Stmt upsertNodeExecutionStmt *sql.Stmt @@ -2748,6 +2766,7 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { getVariableStmt: q.getVariableStmt, getVariablesByEnvironmentIDStmt: q.getVariablesByEnvironmentIDStmt, getVariablesByEnvironmentIDOrderedStmt: q.getVariablesByEnvironmentIDOrderedStmt, + getSyncedWorkspacesStmt: q.getSyncedWorkspacesStmt, getWorkspaceStmt: q.getWorkspaceStmt, getWorkspaceByUserIDStmt: q.getWorkspaceByUserIDStmt, getWorkspaceByUserIDandWorkspaceIDStmt: q.getWorkspaceByUserIDandWorkspaceIDStmt, @@ -2810,6 +2829,7 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { updateUserStmt: q.updateUserStmt, updateVariableStmt: q.updateVariableStmt, updateWorkspaceStmt: q.updateWorkspaceStmt, + updateWorkspaceSyncStmt: q.updateWorkspaceSyncStmt, updateWorkspaceUpdatedTimeStmt: q.updateWorkspaceUpdatedTimeStmt, updateWorkspaceUserStmt: q.updateWorkspaceUserStmt, upsertNodeExecutionStmt: q.upsertNodeExecutionStmt, diff --git a/packages/db/pkg/sqlc/gen/models.go b/packages/db/pkg/sqlc/gen/models.go index 7eb307f3..2c6621d5 100644 --- a/packages/db/pkg/sqlc/gen/models.go +++ b/packages/db/pkg/sqlc/gen/models.go @@ -377,6 +377,9 @@ type Workspace struct { ActiveEnv idwrap.IDWrap GlobalEnv idwrap.IDWrap DisplayOrder float64 + SyncPath *string + SyncFormat *string + SyncEnabled bool } type WorkspacesUser struct { diff --git a/packages/db/pkg/sqlc/gen/workspaces.sql.go b/packages/db/pkg/sqlc/gen/workspaces.sql.go index 81133fb9..f08e3dfe 100644 --- a/packages/db/pkg/sqlc/gen/workspaces.sql.go +++ b/packages/db/pkg/sqlc/gen/workspaces.sql.go @@ -43,9 +43,9 @@ func (q *Queries) CheckIFWorkspaceUserExists(ctx context.Context, arg CheckIFWor const createWorkspace = `-- name: CreateWorkspace :exec INSERT INTO - workspaces (id, name, updated, collection_count, flow_count, active_env, global_env, display_order) + workspaces (id, name, updated, collection_count, flow_count, active_env, global_env, display_order, sync_path, sync_format, sync_enabled) VALUES - (?, ?, ?, ?, ?, ?, ?, ?) + (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ` type CreateWorkspaceParams struct { @@ -57,6 +57,9 @@ type CreateWorkspaceParams struct { ActiveEnv idwrap.IDWrap GlobalEnv idwrap.IDWrap DisplayOrder float64 + SyncPath *string + SyncFormat *string + SyncEnabled bool } func (q *Queries) CreateWorkspace(ctx context.Context, arg CreateWorkspaceParams) error { @@ -69,6 +72,9 @@ func (q *Queries) CreateWorkspace(ctx context.Context, arg CreateWorkspaceParams arg.ActiveEnv, arg.GlobalEnv, arg.DisplayOrder, + arg.SyncPath, + arg.SyncFormat, + arg.SyncEnabled, ) return err } @@ -128,7 +134,10 @@ SELECT w.flow_count, w.active_env, w.global_env, - w.display_order + w.display_order, + w.sync_path, + w.sync_format, + w.sync_enabled FROM workspaces w INNER JOIN workspaces_users wu ON w.id = wu.workspace_id @@ -157,6 +166,9 @@ func (q *Queries) GetAllWorkspacesByUserID(ctx context.Context, userID idwrap.ID &i.ActiveEnv, &i.GlobalEnv, &i.DisplayOrder, + &i.SyncPath, + &i.SyncFormat, + &i.SyncEnabled, ); err != nil { return nil, err } @@ -180,7 +192,10 @@ SELECT flow_count, active_env, global_env, - display_order + display_order, + sync_path, + sync_format, + sync_enabled FROM workspaces WHERE @@ -202,6 +217,9 @@ func (q *Queries) GetWorkspace(ctx context.Context, id idwrap.IDWrap) (Workspace &i.ActiveEnv, &i.GlobalEnv, &i.DisplayOrder, + &i.SyncPath, + &i.SyncFormat, + &i.SyncEnabled, ) return i, err } @@ -215,7 +233,10 @@ SELECT flow_count, active_env, global_env, - display_order + display_order, + sync_path, + sync_format, + sync_enabled FROM workspaces WHERE @@ -245,6 +266,9 @@ func (q *Queries) GetWorkspaceByUserID(ctx context.Context, userID idwrap.IDWrap &i.ActiveEnv, &i.GlobalEnv, &i.DisplayOrder, + &i.SyncPath, + &i.SyncFormat, + &i.SyncEnabled, ) return i, err } @@ -258,7 +282,10 @@ SELECT flow_count, active_env, global_env, - display_order + display_order, + sync_path, + sync_format, + sync_enabled FROM workspaces WHERE @@ -294,6 +321,9 @@ func (q *Queries) GetWorkspaceByUserIDandWorkspaceID(ctx context.Context, arg Ge &i.ActiveEnv, &i.GlobalEnv, &i.DisplayOrder, + &i.SyncPath, + &i.SyncFormat, + &i.SyncEnabled, ) return i, err } @@ -445,7 +475,10 @@ SELECT flow_count, active_env, global_env, - display_order + display_order, + sync_path, + sync_format, + sync_enabled FROM workspaces WHERE @@ -477,6 +510,9 @@ func (q *Queries) GetWorkspacesByUserID(ctx context.Context, userID idwrap.IDWra &i.ActiveEnv, &i.GlobalEnv, &i.DisplayOrder, + &i.SyncPath, + &i.SyncFormat, + &i.SyncEnabled, ); err != nil { return nil, err } @@ -500,7 +536,10 @@ SELECT w.flow_count, w.active_env, w.global_env, - w.display_order + w.display_order, + w.sync_path, + w.sync_format, + w.sync_enabled FROM workspaces w INNER JOIN workspaces_users wu ON w.id = wu.workspace_id @@ -528,6 +567,9 @@ func (q *Queries) GetWorkspacesByUserIDOrdered(ctx context.Context, userID idwra &i.ActiveEnv, &i.GlobalEnv, &i.DisplayOrder, + &i.SyncPath, + &i.SyncFormat, + &i.SyncEnabled, ); err != nil { return nil, err } @@ -578,6 +620,91 @@ func (q *Queries) UpdateWorkspace(ctx context.Context, arg UpdateWorkspaceParams return err } +const getSyncedWorkspaces = `-- name: GetSyncedWorkspaces :many +SELECT + id, + name, + updated, + collection_count, + flow_count, + active_env, + global_env, + display_order, + sync_path, + sync_format, + sync_enabled +FROM + workspaces +WHERE + sync_enabled = 1 +` + +// Returns all workspaces with sync enabled +func (q *Queries) GetSyncedWorkspaces(ctx context.Context) ([]Workspace, error) { + rows, err := q.query(ctx, q.getSyncedWorkspacesStmt, getSyncedWorkspaces) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Workspace{} + for rows.Next() { + var i Workspace + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Updated, + &i.CollectionCount, + &i.FlowCount, + &i.ActiveEnv, + &i.GlobalEnv, + &i.DisplayOrder, + &i.SyncPath, + &i.SyncFormat, + &i.SyncEnabled, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateWorkspaceSync = `-- name: UpdateWorkspaceSync :exec +UPDATE workspaces +SET + sync_path = ?, + sync_format = ?, + sync_enabled = ?, + updated = ? +WHERE + id = ? +` + +type UpdateWorkspaceSyncParams struct { + SyncPath *string + SyncFormat *string + SyncEnabled bool + Updated int64 + ID idwrap.IDWrap +} + +func (q *Queries) UpdateWorkspaceSync(ctx context.Context, arg UpdateWorkspaceSyncParams) error { + _, err := q.exec(ctx, q.updateWorkspaceSyncStmt, updateWorkspaceSync, + arg.SyncPath, + arg.SyncFormat, + arg.SyncEnabled, + arg.Updated, + arg.ID, + ) + return err +} + const updateWorkspaceUpdatedTime = `-- name: UpdateWorkspaceUpdatedTime :exec UPDATE workspaces SET diff --git a/packages/db/pkg/sqlc/queries/workspaces.sql b/packages/db/pkg/sqlc/queries/workspaces.sql index 9551316b..40f6ecb9 100644 --- a/packages/db/pkg/sqlc/queries/workspaces.sql +++ b/packages/db/pkg/sqlc/queries/workspaces.sql @@ -10,7 +10,10 @@ SELECT flow_count, active_env, global_env, - display_order + display_order, + sync_path, + sync_format, + sync_enabled FROM workspaces WHERE @@ -27,7 +30,10 @@ SELECT flow_count, active_env, global_env, - display_order + display_order, + sync_path, + sync_format, + sync_enabled FROM workspaces WHERE @@ -53,7 +59,10 @@ SELECT flow_count, active_env, global_env, - display_order + display_order, + sync_path, + sync_format, + sync_enabled FROM workspaces WHERE @@ -75,7 +84,10 @@ SELECT flow_count, active_env, global_env, - display_order + display_order, + sync_path, + sync_format, + sync_enabled FROM workspaces WHERE @@ -95,9 +107,9 @@ LIMIT -- name: CreateWorkspace :exec INSERT INTO - workspaces (id, name, updated, collection_count, flow_count, active_env, global_env, display_order) + workspaces (id, name, updated, collection_count, flow_count, active_env, global_env, display_order, sync_path, sync_format, sync_enabled) VALUES - (?, ?, ?, ?, ?, ?, ?, ?); + (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); -- name: UpdateWorkspace :exec UPDATE workspaces @@ -118,6 +130,16 @@ SET WHERE id = ?; +-- name: UpdateWorkspaceSync :exec +UPDATE workspaces +SET + sync_path = ?, + sync_format = ?, + sync_enabled = ?, + updated = ? +WHERE + id = ?; + -- name: DeleteWorkspace :exec DELETE FROM workspaces WHERE @@ -132,7 +154,10 @@ SELECT w.flow_count, w.active_env, w.global_env, - w.display_order + w.display_order, + w.sync_path, + w.sync_format, + w.sync_enabled FROM workspaces w INNER JOIN workspaces_users wu ON w.id = wu.workspace_id @@ -151,7 +176,10 @@ SELECT w.flow_count, w.active_env, w.global_env, - w.display_order + w.display_order, + w.sync_path, + w.sync_format, + w.sync_enabled FROM workspaces w INNER JOIN workspaces_users wu ON w.id = wu.workspace_id @@ -160,6 +188,25 @@ WHERE ORDER BY w.updated DESC; +-- name: GetSyncedWorkspaces :many +-- Returns all workspaces with sync enabled +SELECT + id, + name, + updated, + collection_count, + flow_count, + active_env, + global_env, + display_order, + sync_path, + sync_format, + sync_enabled +FROM + workspaces +WHERE + sync_enabled = 1; + -- -- WorkspaceUsers -- @@ -246,4 +293,4 @@ WHERE -- name: DeleteWorkspaceUser :exec DELETE FROM workspaces_users WHERE - id = ?; \ No newline at end of file + id = ?; diff --git a/packages/db/pkg/sqlc/schema/01_workspaces.sql b/packages/db/pkg/sqlc/schema/01_workspaces.sql index 4f0e39a3..5db88c3d 100644 --- a/packages/db/pkg/sqlc/schema/01_workspaces.sql +++ b/packages/db/pkg/sqlc/schema/01_workspaces.sql @@ -7,7 +7,10 @@ CREATE TABLE workspaces ( flow_count INT NOT NULL DEFAULT 0, active_env BLOB, global_env BLOB, - display_order REAL NOT NULL DEFAULT 0 + display_order REAL NOT NULL DEFAULT 0, + sync_path TEXT, + sync_format TEXT, + sync_enabled BOOLEAN NOT NULL DEFAULT 0 ); CREATE INDEX workspaces_idx1 ON workspaces ( diff --git a/packages/server/internal/migrations/01KGZ57RM_add_workspace_sync.go b/packages/server/internal/migrations/01KGZ57RM_add_workspace_sync.go new file mode 100644 index 00000000..1414e143 --- /dev/null +++ b/packages/server/internal/migrations/01KGZ57RM_add_workspace_sync.go @@ -0,0 +1,64 @@ +package migrations + +import ( + "context" + "database/sql" + "fmt" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/migrate" +) + +// MigrationAddWorkspaceSyncID is the ULID for the workspace sync migration. +const MigrationAddWorkspaceSyncID = "01KGZ57RM25ANJQA21JQGJ6D2M" + +// MigrationAddWorkspaceSyncChecksum is a stable hash of this migration. +const MigrationAddWorkspaceSyncChecksum = "sha256:add-workspace-sync-v1" + +func init() { + if err := migrate.Register(migrate.Migration{ + ID: MigrationAddWorkspaceSyncID, + Checksum: MigrationAddWorkspaceSyncChecksum, + Description: "Add sync_path, sync_format, sync_enabled columns to workspaces table", + Apply: applyWorkspaceSync, + Validate: validateWorkspaceSync, + RequiresBackup: false, + }); err != nil { + panic("failed to register workspace sync migration: " + err.Error()) + } +} + +// applyWorkspaceSync adds folder sync columns to the workspaces table. +func applyWorkspaceSync(ctx context.Context, tx *sql.Tx) error { + if _, err := tx.ExecContext(ctx, `ALTER TABLE workspaces ADD COLUMN sync_path TEXT`); err != nil { + return fmt.Errorf("add sync_path column: %w", err) + } + + if _, err := tx.ExecContext(ctx, `ALTER TABLE workspaces ADD COLUMN sync_format TEXT`); err != nil { + return fmt.Errorf("add sync_format column: %w", err) + } + + if _, err := tx.ExecContext(ctx, `ALTER TABLE workspaces ADD COLUMN sync_enabled BOOLEAN NOT NULL DEFAULT 0`); err != nil { + return fmt.Errorf("add sync_enabled column: %w", err) + } + + return nil +} + +// validateWorkspaceSync verifies that sync columns exist on the workspaces table. +func validateWorkspaceSync(ctx context.Context, db *sql.DB) error { + columns := []string{"sync_path", "sync_format", "sync_enabled"} + for _, col := range columns { + var cid int + var name, ctype string + var notnull int + var dfltValue *string + var pk int + err := db.QueryRowContext(ctx, + fmt.Sprintf(`SELECT cid, name, type, "notnull", dflt_value, pk FROM pragma_table_info('workspaces') WHERE name = '%s'`, col), + ).Scan(&cid, &name, &ctype, ¬null, &dfltValue, &pk) + if err != nil { + return fmt.Errorf("column %s not found in workspaces table: %w", col, err) + } + } + return nil +} diff --git a/packages/server/pkg/model/mworkspace/mworkspace.go b/packages/server/pkg/model/mworkspace/mworkspace.go index ce8f30f5..bb86b6ce 100644 --- a/packages/server/pkg/model/mworkspace/mworkspace.go +++ b/packages/server/pkg/model/mworkspace/mworkspace.go @@ -15,6 +15,10 @@ type Workspace struct { GlobalEnv idwrap.IDWrap ID idwrap.IDWrap Order float64 + // Folder sync fields + SyncPath *string // nil = no sync, else absolute path to folder + SyncFormat *string // "open_yaml" or nil + SyncEnabled bool // Whether sync is currently active } func (w Workspace) GetCreatedTime() time.Time { diff --git a/packages/server/pkg/openyaml/directory.go b/packages/server/pkg/openyaml/directory.go new file mode 100644 index 00000000..0d2835c4 --- /dev/null +++ b/packages/server/pkg/openyaml/directory.go @@ -0,0 +1,763 @@ +package openyaml + +import ( + "fmt" + "os" + "path/filepath" + "sort" + "strings" + "time" + + "gopkg.in/yaml.v3" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/ioworkspace" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/menv" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mfile" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" + yfs "github.com/the-dev-tools/dev-tools/packages/server/pkg/translate/yamlflowsimplev2" +) + +const ( + flowsDir = "flows" + environmentsDir = "environments" + yamlExt = ".yaml" +) + +// ReadOptions configures directory reading. +type ReadOptions struct { + WorkspaceID idwrap.IDWrap +} + +// ReadDirectory reads an OpenYAML folder into a WorkspaceBundle. +// Directory structure: +// - *.yaml files in root/subdirs -> YamlRequestDefV2 -> mhttp models +// - flows/*.yaml -> YamlFlowFlowV2 -> mflow models +// - environments/*.yaml -> YamlEnvironmentV2 -> menv models +// - Subdirectories -> mfile.File (ContentTypeFolder) +func ReadDirectory(dirPath string, opts ReadOptions) (*ioworkspace.WorkspaceBundle, error) { + bundle := &ioworkspace.WorkspaceBundle{ + Workspace: mworkspace.Workspace{ + ID: opts.WorkspaceID, + Name: filepath.Base(dirPath), + }, + } + + now := time.Now() + + // Read environments + envDir := filepath.Join(dirPath, environmentsDir) + if info, err := os.Stat(envDir); err == nil && info.IsDir() { + if err := readEnvironments(envDir, opts.WorkspaceID, bundle); err != nil { + return nil, fmt.Errorf("read environments: %w", err) + } + } + + // Read flows + flowDir := filepath.Join(dirPath, flowsDir) + if info, err := os.Stat(flowDir); err == nil && info.IsDir() { + if err := readFlows(flowDir, opts.WorkspaceID, bundle); err != nil { + return nil, fmt.Errorf("read flows: %w", err) + } + } + + // Read requests recursively (excluding flows/ and environments/ dirs) + if err := readRequestsRecursive(dirPath, dirPath, nil, opts.WorkspaceID, now, bundle); err != nil { + return nil, fmt.Errorf("read requests: %w", err) + } + + return bundle, nil +} + +// WriteDirectory exports a WorkspaceBundle to an OpenYAML folder. +// Creates one .yaml file per request, flow, and environment. +// Directory structure mirrors the mfile.File hierarchy. +func WriteDirectory(dirPath string, bundle *ioworkspace.WorkspaceBundle) error { + if err := os.MkdirAll(dirPath, 0o755); err != nil { + return fmt.Errorf("create directory: %w", err) + } + + // Build lookup maps + fileByID := make(map[idwrap.IDWrap]mfile.File) + for _, f := range bundle.Files { + fileByID[f.ID] = f + } + + httpByID := make(map[idwrap.IDWrap]mhttp.HTTP) + for _, h := range bundle.HTTPRequests { + httpByID[h.ID] = h + } + + headersByHTTP := make(map[idwrap.IDWrap][]mhttp.HTTPHeader) + for _, h := range bundle.HTTPHeaders { + headersByHTTP[h.HttpID] = append(headersByHTTP[h.HttpID], h) + } + + paramsByHTTP := make(map[idwrap.IDWrap][]mhttp.HTTPSearchParam) + for _, p := range bundle.HTTPSearchParams { + paramsByHTTP[p.HttpID] = append(paramsByHTTP[p.HttpID], p) + } + + bodyRawByHTTP := make(map[idwrap.IDWrap]mhttp.HTTPBodyRaw) + for _, b := range bundle.HTTPBodyRaw { + bodyRawByHTTP[b.HttpID] = b + } + + bodyFormByHTTP := make(map[idwrap.IDWrap][]mhttp.HTTPBodyForm) + for _, f := range bundle.HTTPBodyForms { + bodyFormByHTTP[f.HttpID] = append(bodyFormByHTTP[f.HttpID], f) + } + + bodyUrlByHTTP := make(map[idwrap.IDWrap][]mhttp.HTTPBodyUrlencoded) + for _, u := range bundle.HTTPBodyUrlencoded { + bodyUrlByHTTP[u.HttpID] = append(bodyUrlByHTTP[u.HttpID], u) + } + + assertsByHTTP := make(map[idwrap.IDWrap][]mhttp.HTTPAssert) + for _, a := range bundle.HTTPAsserts { + assertsByHTTP[a.HttpID] = append(assertsByHTTP[a.HttpID], a) + } + + // Write environments + if len(bundle.Environments) > 0 { + envDir := filepath.Join(dirPath, environmentsDir) + if err := os.MkdirAll(envDir, 0o755); err != nil { + return fmt.Errorf("create environments dir: %w", err) + } + + envVarsByEnv := make(map[idwrap.IDWrap][]menv.Variable) + for _, v := range bundle.EnvironmentVars { + envVarsByEnv[v.EnvID] = append(envVarsByEnv[v.EnvID], v) + } + + for _, env := range bundle.Environments { + yamlEnv := yfs.YamlEnvironmentV2{ + Name: env.Name, + Description: env.Description, + Variables: make(map[string]string), + } + + vars := envVarsByEnv[env.ID] + sort.Slice(vars, func(i, j int) bool { return vars[i].Order < vars[j].Order }) + for _, v := range vars { + yamlEnv.Variables[v.VarKey] = v.Value + } + + data, err := WriteSingleEnvironment(yamlEnv) + if err != nil { + return fmt.Errorf("marshal environment %q: %w", env.Name, err) + } + + filename := sanitizeFilename(env.Name) + yamlExt + if err := atomicWrite(filepath.Join(envDir, filename), data); err != nil { + return fmt.Errorf("write environment %q: %w", env.Name, err) + } + } + } + + // Write flows + if len(bundle.Flows) > 0 { + flowDir := filepath.Join(dirPath, flowsDir) + if err := os.MkdirAll(flowDir, 0o755); err != nil { + return fmt.Errorf("create flows dir: %w", err) + } + + for _, flow := range bundle.Flows { + yamlFlow := exportFlow(flow, bundle) + data, err := WriteSingleFlow(yamlFlow) + if err != nil { + return fmt.Errorf("marshal flow %q: %w", flow.Name, err) + } + + filename := sanitizeFilename(flow.Name) + yamlExt + if err := atomicWrite(filepath.Join(flowDir, filename), data); err != nil { + return fmt.Errorf("write flow %q: %w", flow.Name, err) + } + } + } + + // Write requests organized by file hierarchy + // Build parent->children map + childrenByParent := make(map[string][]mfile.File) // parentID string -> children + for _, f := range bundle.Files { + parentKey := "" + if f.ParentID != nil { + parentKey = f.ParentID.String() + } + childrenByParent[parentKey] = append(childrenByParent[parentKey], f) + } + + // Write recursively starting from root files + if err := writeFilesRecursive(dirPath, "", childrenByParent, httpByID, headersByHTTP, paramsByHTTP, bodyRawByHTTP, bodyFormByHTTP, bodyUrlByHTTP, assertsByHTTP); err != nil { + return err + } + + return nil +} + +func readEnvironments(envDir string, workspaceID idwrap.IDWrap, bundle *ioworkspace.WorkspaceBundle) error { + entries, err := os.ReadDir(envDir) + if err != nil { + return err + } + + for _, entry := range entries { + if entry.IsDir() || !isYAMLFile(entry.Name()) { + continue + } + + data, err := os.ReadFile(filepath.Join(envDir, entry.Name())) + if err != nil { + return fmt.Errorf("read %s: %w", entry.Name(), err) + } + + yamlEnv, err := ReadSingleEnvironment(data) + if err != nil { + return fmt.Errorf("parse %s: %w", entry.Name(), err) + } + + envID := idwrap.NewNow() + env := menv.Env{ + ID: envID, + WorkspaceID: workspaceID, + Type: menv.EnvNormal, + Name: yamlEnv.Name, + Description: yamlEnv.Description, + } + bundle.Environments = append(bundle.Environments, env) + + // Sort keys for deterministic ordering + var keys []string + for k := range yamlEnv.Variables { + keys = append(keys, k) + } + sort.Strings(keys) + + for i, k := range keys { + bundle.EnvironmentVars = append(bundle.EnvironmentVars, menv.Variable{ + ID: idwrap.NewNow(), + EnvID: envID, + VarKey: k, + Value: yamlEnv.Variables[k], + Enabled: true, + Order: float64(i + 1), + }) + } + } + + return nil +} + +func readFlows(flowDir string, workspaceID idwrap.IDWrap, bundle *ioworkspace.WorkspaceBundle) error { + entries, err := os.ReadDir(flowDir) + if err != nil { + return err + } + + for _, entry := range entries { + if entry.IsDir() || !isYAMLFile(entry.Name()) { + continue + } + + data, err := os.ReadFile(filepath.Join(flowDir, entry.Name())) + if err != nil { + return fmt.Errorf("read %s: %w", entry.Name(), err) + } + + yamlFlow, err := ReadSingleFlow(data) + if err != nil { + return fmt.Errorf("parse %s: %w", entry.Name(), err) + } + + flowID := idwrap.NewNow() + flow := mflow.Flow{ + ID: flowID, + WorkspaceID: workspaceID, + Name: yamlFlow.Name, + } + bundle.Flows = append(bundle.Flows, flow) + + // Create file entry for the flow + contentID := flowID + bundle.Files = append(bundle.Files, mfile.File{ + ID: idwrap.NewNow(), + WorkspaceID: workspaceID, + ContentID: &contentID, + ContentType: mfile.ContentTypeFlow, + Name: yamlFlow.Name, + }) + + // Convert flow variables + for _, v := range yamlFlow.Variables { + bundle.FlowVariables = append(bundle.FlowVariables, mflow.FlowVariable{ + ID: idwrap.NewNow(), + FlowID: flowID, + Name: v.Name, + Value: v.Value, + }) + } + } + + return nil +} + +func readRequestsRecursive( + rootDir string, + dirPath string, + parentID *idwrap.IDWrap, + workspaceID idwrap.IDWrap, + now time.Time, + bundle *ioworkspace.WorkspaceBundle, +) error { + entries, err := os.ReadDir(dirPath) + if err != nil { + return err + } + + order := float64(1) + for _, entry := range entries { + name := entry.Name() + + // Skip special directories and hidden files + if strings.HasPrefix(name, ".") { + continue + } + + if entry.IsDir() { + // Skip reserved directories + rel, _ := filepath.Rel(rootDir, filepath.Join(dirPath, name)) + if rel == flowsDir || rel == environmentsDir { + continue + } + + // Create folder file entry + folderID := idwrap.NewNow() + folderContentID := folderID + bundle.Files = append(bundle.Files, mfile.File{ + ID: folderID, + WorkspaceID: workspaceID, + ParentID: parentID, + ContentID: &folderContentID, + ContentType: mfile.ContentTypeFolder, + Name: name, + Order: order, + UpdatedAt: now, + }) + + if err := readRequestsRecursive(rootDir, filepath.Join(dirPath, name), &folderID, workspaceID, now, bundle); err != nil { + return err + } + order++ + continue + } + + if !isYAMLFile(name) { + continue + } + + data, err := os.ReadFile(filepath.Join(dirPath, name)) + if err != nil { + return fmt.Errorf("read %s: %w", name, err) + } + + yamlReq, err := ReadSingleRequest(data) + if err != nil { + return fmt.Errorf("parse %s: %w", name, err) + } + + fileOrder := order + if yamlReq.Order > 0 { + fileOrder = yamlReq.Order + } + + httpID := idwrap.NewNow() + nowMs := now.UnixMilli() + + // Determine body kind + bodyKind := mhttp.HttpBodyKindNone + if yamlReq.Body != nil { + switch strings.ToLower(yamlReq.Body.Type) { + case "form_data", "form-data": + bodyKind = mhttp.HttpBodyKindFormData + case "urlencoded": + bodyKind = mhttp.HttpBodyKindUrlEncoded + case "raw", "json", "xml", "text": + bodyKind = mhttp.HttpBodyKindRaw + } + } + + httpReq := mhttp.HTTP{ + ID: httpID, + WorkspaceID: workspaceID, + Name: yamlReq.Name, + Url: yamlReq.URL, + Method: strings.ToUpper(yamlReq.Method), + Description: yamlReq.Description, + BodyKind: bodyKind, + CreatedAt: nowMs, + UpdatedAt: nowMs, + } + if httpReq.Method == "" { + httpReq.Method = "GET" + } + bundle.HTTPRequests = append(bundle.HTTPRequests, httpReq) + + // Create file entry + contentID := httpID + bundle.Files = append(bundle.Files, mfile.File{ + ID: idwrap.NewNow(), + WorkspaceID: workspaceID, + ParentID: parentID, + ContentID: &contentID, + ContentType: mfile.ContentTypeHTTP, + Name: yamlReq.Name, + Order: fileOrder, + UpdatedAt: now, + }) + + // Convert headers + for i, h := range yamlReq.Headers { + bundle.HTTPHeaders = append(bundle.HTTPHeaders, mhttp.HTTPHeader{ + ID: idwrap.NewNow(), + HttpID: httpID, + Key: h.Name, + Value: h.Value, + Enabled: h.Enabled, + Description: h.Description, + DisplayOrder: float32(i + 1), + CreatedAt: nowMs, + UpdatedAt: nowMs, + }) + } + + // Convert query params + for i, p := range yamlReq.QueryParams { + bundle.HTTPSearchParams = append(bundle.HTTPSearchParams, mhttp.HTTPSearchParam{ + ID: idwrap.NewNow(), + HttpID: httpID, + Key: p.Name, + Value: p.Value, + Enabled: p.Enabled, + Description: p.Description, + DisplayOrder: float64(i + 1), + CreatedAt: nowMs, + UpdatedAt: nowMs, + }) + } + + // Convert body + if yamlReq.Body != nil { + convertYAMLBody(yamlReq.Body, httpID, nowMs, bundle) + } + + // Convert assertions + for i, a := range yamlReq.Assertions { + bundle.HTTPAsserts = append(bundle.HTTPAsserts, mhttp.HTTPAssert{ + ID: idwrap.NewNow(), + HttpID: httpID, + Value: a.Expression, + Enabled: a.Enabled, + DisplayOrder: float32(i + 1), + CreatedAt: nowMs, + UpdatedAt: nowMs, + }) + } + + order++ + } + + return nil +} + +func convertYAMLBody(body *yfs.YamlBodyUnion, httpID idwrap.IDWrap, nowMs int64, bundle *ioworkspace.WorkspaceBundle) { + switch strings.ToLower(body.Type) { + case "form_data", "form-data": + for i, f := range body.Form { + bundle.HTTPBodyForms = append(bundle.HTTPBodyForms, mhttp.HTTPBodyForm{ + ID: idwrap.NewNow(), + HttpID: httpID, + Key: f.Name, + Value: f.Value, + Enabled: f.Enabled, + Description: f.Description, + DisplayOrder: float32(i + 1), + CreatedAt: nowMs, + UpdatedAt: nowMs, + }) + } + case "urlencoded": + for i, u := range body.UrlEncoded { + bundle.HTTPBodyUrlencoded = append(bundle.HTTPBodyUrlencoded, mhttp.HTTPBodyUrlencoded{ + ID: idwrap.NewNow(), + HttpID: httpID, + Key: u.Name, + Value: u.Value, + Enabled: u.Enabled, + Description: u.Description, + DisplayOrder: float32(i + 1), + CreatedAt: nowMs, + UpdatedAt: nowMs, + }) + } + default: + // Raw body (json, xml, text, raw) + rawData := body.Raw + if rawData == "" && body.JSON != nil { + // Marshal JSON map back to string + b, _ := yaml.Marshal(body.JSON) + rawData = string(b) + } + if rawData != "" { + bundle.HTTPBodyRaw = append(bundle.HTTPBodyRaw, mhttp.HTTPBodyRaw{ + ID: idwrap.NewNow(), + HttpID: httpID, + RawData: []byte(rawData), + CreatedAt: nowMs, + UpdatedAt: nowMs, + }) + } + } +} + +func writeFilesRecursive( + currentDir string, + parentIDStr string, + childrenByParent map[string][]mfile.File, + httpByID map[idwrap.IDWrap]mhttp.HTTP, + headersByHTTP map[idwrap.IDWrap][]mhttp.HTTPHeader, + paramsByHTTP map[idwrap.IDWrap][]mhttp.HTTPSearchParam, + bodyRawByHTTP map[idwrap.IDWrap]mhttp.HTTPBodyRaw, + bodyFormByHTTP map[idwrap.IDWrap][]mhttp.HTTPBodyForm, + bodyUrlByHTTP map[idwrap.IDWrap][]mhttp.HTTPBodyUrlencoded, + assertsByHTTP map[idwrap.IDWrap][]mhttp.HTTPAssert, +) error { + children := childrenByParent[parentIDStr] + sort.Slice(children, func(i, j int) bool { return children[i].Order < children[j].Order }) + + for _, f := range children { + switch f.ContentType { + case mfile.ContentTypeFolder: + subDir := filepath.Join(currentDir, sanitizeFilename(f.Name)) + if err := os.MkdirAll(subDir, 0o755); err != nil { + return fmt.Errorf("create dir %q: %w", f.Name, err) + } + if err := writeFilesRecursive(subDir, f.ID.String(), childrenByParent, httpByID, headersByHTTP, paramsByHTTP, bodyRawByHTTP, bodyFormByHTTP, bodyUrlByHTTP, assertsByHTTP); err != nil { + return err + } + + case mfile.ContentTypeHTTP: + if f.ContentID == nil { + continue + } + httpReq, ok := httpByID[*f.ContentID] + if !ok { + continue + } + + yamlReq := exportHTTPRequest(httpReq, f.Order, headersByHTTP, paramsByHTTP, bodyRawByHTTP, bodyFormByHTTP, bodyUrlByHTTP, assertsByHTTP) + data, err := WriteSingleRequest(yamlReq) + if err != nil { + return fmt.Errorf("marshal request %q: %w", httpReq.Name, err) + } + + filename := sanitizeFilename(httpReq.Name) + yamlExt + if err := atomicWrite(filepath.Join(currentDir, filename), data); err != nil { + return fmt.Errorf("write request %q: %w", httpReq.Name, err) + } + } + } + + return nil +} + +func exportHTTPRequest( + httpReq mhttp.HTTP, + order float64, + headersByHTTP map[idwrap.IDWrap][]mhttp.HTTPHeader, + paramsByHTTP map[idwrap.IDWrap][]mhttp.HTTPSearchParam, + bodyRawByHTTP map[idwrap.IDWrap]mhttp.HTTPBodyRaw, + bodyFormByHTTP map[idwrap.IDWrap][]mhttp.HTTPBodyForm, + bodyUrlByHTTP map[idwrap.IDWrap][]mhttp.HTTPBodyUrlencoded, + assertsByHTTP map[idwrap.IDWrap][]mhttp.HTTPAssert, +) yfs.YamlRequestDefV2 { + req := yfs.YamlRequestDefV2{ + Name: httpReq.Name, + Method: httpReq.Method, + URL: httpReq.Url, + Description: httpReq.Description, + Order: order, + } + + // Headers + headers := headersByHTTP[httpReq.ID] + if len(headers) > 0 { + var pairs []yfs.YamlNameValuePairV2 + for _, h := range headers { + pairs = append(pairs, yfs.YamlNameValuePairV2{ + Name: h.Key, + Value: h.Value, + Description: h.Description, + Enabled: h.Enabled, + }) + } + req.Headers = yfs.HeaderMapOrSlice(pairs) + } + + // Query params + params := paramsByHTTP[httpReq.ID] + if len(params) > 0 { + var pairs []yfs.YamlNameValuePairV2 + for _, p := range params { + pairs = append(pairs, yfs.YamlNameValuePairV2{ + Name: p.Key, + Value: p.Value, + Description: p.Description, + Enabled: p.Enabled, + }) + } + req.QueryParams = yfs.HeaderMapOrSlice(pairs) + } + + // Body + switch httpReq.BodyKind { + case mhttp.HttpBodyKindFormData: + forms := bodyFormByHTTP[httpReq.ID] + if len(forms) > 0 { + var pairs []yfs.YamlNameValuePairV2 + for _, f := range forms { + pairs = append(pairs, yfs.YamlNameValuePairV2{ + Name: f.Key, + Value: f.Value, + Description: f.Description, + Enabled: f.Enabled, + }) + } + req.Body = &yfs.YamlBodyUnion{ + Type: "form_data", + Form: yfs.HeaderMapOrSlice(pairs), + } + } + case mhttp.HttpBodyKindUrlEncoded: + urls := bodyUrlByHTTP[httpReq.ID] + if len(urls) > 0 { + var pairs []yfs.YamlNameValuePairV2 + for _, u := range urls { + pairs = append(pairs, yfs.YamlNameValuePairV2{ + Name: u.Key, + Value: u.Value, + Description: u.Description, + Enabled: u.Enabled, + }) + } + req.Body = &yfs.YamlBodyUnion{ + Type: "urlencoded", + UrlEncoded: yfs.HeaderMapOrSlice(pairs), + } + } + case mhttp.HttpBodyKindRaw: + if raw, ok := bodyRawByHTTP[httpReq.ID]; ok && len(raw.RawData) > 0 { + req.Body = &yfs.YamlBodyUnion{ + Type: "raw", + Raw: string(raw.RawData), + } + } + } + + // Assertions + asserts := assertsByHTTP[httpReq.ID] + if len(asserts) > 0 { + var yamlAsserts []yfs.YamlAssertionV2 + for _, a := range asserts { + yamlAsserts = append(yamlAsserts, yfs.YamlAssertionV2{ + Expression: a.Value, + Enabled: a.Enabled, + }) + } + req.Assertions = yfs.AssertionsOrSlice(yamlAsserts) + } + + return req +} + +func exportFlow(flow mflow.Flow, bundle *ioworkspace.WorkspaceBundle) yfs.YamlFlowFlowV2 { + yamlFlow := yfs.YamlFlowFlowV2{ + Name: flow.Name, + } + + // Variables + for _, v := range bundle.FlowVariables { + if v.FlowID.Compare(flow.ID) == 0 { + yamlFlow.Variables = append(yamlFlow.Variables, yfs.YamlFlowVariableV2{ + Name: v.Name, + Value: v.Value, + }) + } + } + + return yamlFlow +} + +// atomicWrite writes data to a temp file then renames for safety. +func atomicWrite(path string, data []byte) error { + dir := filepath.Dir(path) + tmp, err := os.CreateTemp(dir, ".openyaml-*") + if err != nil { + return err + } + tmpName := tmp.Name() + + if _, err := tmp.Write(data); err != nil { + tmp.Close() + os.Remove(tmpName) + return err + } + if err := tmp.Close(); err != nil { + os.Remove(tmpName) + return err + } + + return os.Rename(tmpName, path) +} + +// sanitizeFilename makes a string safe for use as a filename. +func sanitizeFilename(name string) string { + if name == "" { + return "untitled" + } + + // Replace characters that are problematic in filenames + replacer := strings.NewReplacer( + "/", "_", + "\\", "_", + ":", "_", + "*", "_", + "?", "_", + "\"", "_", + "<", "_", + ">", "_", + "|", "_", + ) + name = replacer.Replace(name) + name = strings.TrimSpace(name) + + // Convert to lowercase kebab-case for consistency + name = strings.ToLower(name) + name = strings.ReplaceAll(name, " ", "-") + + if name == "" { + return "untitled" + } + + if len(name) > 255 { + name = name[:255] + } + + return name +} + +func isYAMLFile(name string) bool { + ext := strings.ToLower(filepath.Ext(name)) + return ext == ".yaml" || ext == ".yml" +} diff --git a/packages/server/pkg/openyaml/directory_test.go b/packages/server/pkg/openyaml/directory_test.go new file mode 100644 index 00000000..6cc6e73b --- /dev/null +++ b/packages/server/pkg/openyaml/directory_test.go @@ -0,0 +1,271 @@ +package openyaml + +import ( + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mfile" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" +) + +func testdataPath(name string) string { + _, filename, _, _ := runtime.Caller(0) + return filepath.Join(filepath.Dir(filename), "testdata", name) +} + +func TestReadDirectory(t *testing.T) { + dirPath := testdataPath("collection") + opts := ReadOptions{ + WorkspaceID: idwrap.NewNow(), + } + + bundle, err := ReadDirectory(dirPath, opts) + if err != nil { + t.Fatalf("ReadDirectory failed: %v", err) + } + + // Verify workspace name + if bundle.Workspace.Name != "collection" { + t.Errorf("expected workspace name 'collection', got %q", bundle.Workspace.Name) + } + + // Verify HTTP requests: Get Users, Create User = 2 + if len(bundle.HTTPRequests) != 2 { + t.Fatalf("expected 2 HTTP requests, got %d", len(bundle.HTTPRequests)) + } + + reqByName := make(map[string]mhttp.HTTP) + for _, r := range bundle.HTTPRequests { + reqByName[r.Name] = r + } + + getUsers, ok := reqByName["Get Users"] + if !ok { + t.Fatal("missing 'Get Users' request") + } + if getUsers.Method != "GET" { + t.Errorf("Get Users method: expected GET, got %s", getUsers.Method) + } + + createUser, ok := reqByName["Create User"] + if !ok { + t.Fatal("missing 'Create User' request") + } + if createUser.Method != "POST" { + t.Errorf("Create User method: expected POST, got %s", createUser.Method) + } + + // Verify headers + if len(bundle.HTTPHeaders) == 0 { + t.Error("expected some headers, got none") + } + + // Verify query params for Get Users + var getUsersParams int + for _, p := range bundle.HTTPSearchParams { + if p.HttpID == getUsers.ID { + getUsersParams++ + } + } + if getUsersParams != 2 { + t.Errorf("expected 2 search params for Get Users, got %d", getUsersParams) + } + + // Verify body raw for Create User + var createUserBodyRaw int + for _, b := range bundle.HTTPBodyRaw { + if b.HttpID == createUser.ID { + createUserBodyRaw++ + } + } + if createUserBodyRaw != 1 { + t.Errorf("expected 1 body raw for Create User, got %d", createUserBodyRaw) + } + + // Verify assertions + if len(bundle.HTTPAsserts) == 0 { + t.Error("expected some assertions, got none") + } + + // Verify files: should have "users" folder + 2 requests + 1 flow file + var folderCount int + for _, f := range bundle.Files { + if f.ContentType == mfile.ContentTypeFolder { + folderCount++ + } + } + if folderCount != 1 { + t.Errorf("expected 1 folder (users), got %d", folderCount) + } + + // Verify environments + if len(bundle.Environments) != 1 { + t.Errorf("expected 1 environment, got %d", len(bundle.Environments)) + } + + // Verify flows + if len(bundle.Flows) != 1 { + t.Errorf("expected 1 flow, got %d", len(bundle.Flows)) + } + if len(bundle.Flows) > 0 && bundle.Flows[0].Name != "Smoke Test" { + t.Errorf("expected flow name 'Smoke Test', got %q", bundle.Flows[0].Name) + } +} + +func TestRoundTrip(t *testing.T) { + // Read a directory + srcPath := testdataPath("collection") + opts := ReadOptions{ + WorkspaceID: idwrap.NewNow(), + } + + bundle, err := ReadDirectory(srcPath, opts) + if err != nil { + t.Fatalf("ReadDirectory failed: %v", err) + } + + // Write to a temp directory + outDir := filepath.Join(t.TempDir(), "output") + if err := WriteDirectory(outDir, bundle); err != nil { + t.Fatalf("WriteDirectory failed: %v", err) + } + + // Re-read the written directory + opts2 := ReadOptions{ + WorkspaceID: idwrap.NewNow(), + } + + bundle2, err := ReadDirectory(outDir, opts2) + if err != nil { + t.Fatalf("ReadDirectory (round-trip) failed: %v", err) + } + + // Compare counts + if len(bundle.HTTPRequests) != len(bundle2.HTTPRequests) { + t.Errorf("request count mismatch: %d vs %d", len(bundle.HTTPRequests), len(bundle2.HTTPRequests)) + } + if len(bundle.Environments) != len(bundle2.Environments) { + t.Errorf("environment count mismatch: %d vs %d", len(bundle.Environments), len(bundle2.Environments)) + } + if len(bundle.Flows) != len(bundle2.Flows) { + t.Errorf("flow count mismatch: %d vs %d", len(bundle.Flows), len(bundle2.Flows)) + } + + // Verify the written directory has correct structure + entries, err := os.ReadDir(outDir) + if err != nil { + t.Fatalf("failed to read output dir: %v", err) + } + + var hasFolders, hasEnvs, hasFlows bool + for _, e := range entries { + switch e.Name() { + case "environments": + hasEnvs = true + case "flows": + hasFlows = true + case "users": + hasFolders = true + } + } + + if !hasEnvs { + t.Error("missing environments/ directory in output") + } + if !hasFlows { + t.Error("missing flows/ directory in output") + } + if !hasFolders { + t.Error("missing users/ directory in output") + } +} + +func TestReadWriteSingleRequest(t *testing.T) { + yamlData := []byte(` +name: Test Request +method: POST +url: "https://api.example.com/test" +order: 5 +headers: + Content-Type: application/json +body: + type: raw + raw: '{"key": "value"}' +assertions: + - "res.status eq 200" +`) + + req, err := ReadSingleRequest(yamlData) + if err != nil { + t.Fatalf("ReadSingleRequest failed: %v", err) + } + + if req.Name != "Test Request" { + t.Errorf("expected name 'Test Request', got %q", req.Name) + } + if req.Method != "POST" { + t.Errorf("expected method POST, got %s", req.Method) + } + if req.Order != 5 { + t.Errorf("expected order 5, got %f", req.Order) + } + + // Round-trip + data, err := WriteSingleRequest(*req) + if err != nil { + t.Fatalf("WriteSingleRequest failed: %v", err) + } + + req2, err := ReadSingleRequest(data) + if err != nil { + t.Fatalf("ReadSingleRequest (round-trip) failed: %v", err) + } + + if req2.Name != req.Name || req2.Method != req.Method || req2.URL != req.URL { + t.Error("round-trip mismatch") + } +} + +func TestReadWriteSingleFlow(t *testing.T) { + yamlData := []byte(` +name: Test Flow +variables: + - name: token + value: "" +steps: + - request: + name: Login + method: POST + url: "https://api.example.com/login" +`) + + flow, err := ReadSingleFlow(yamlData) + if err != nil { + t.Fatalf("ReadSingleFlow failed: %v", err) + } + + if flow.Name != "Test Flow" { + t.Errorf("expected name 'Test Flow', got %q", flow.Name) + } + if len(flow.Variables) != 1 { + t.Errorf("expected 1 variable, got %d", len(flow.Variables)) + } + + // Round-trip + data, err := WriteSingleFlow(*flow) + if err != nil { + t.Fatalf("WriteSingleFlow failed: %v", err) + } + + flow2, err := ReadSingleFlow(data) + if err != nil { + t.Fatalf("ReadSingleFlow (round-trip) failed: %v", err) + } + + if flow2.Name != flow.Name { + t.Error("round-trip mismatch") + } +} diff --git a/packages/server/pkg/openyaml/environment.go b/packages/server/pkg/openyaml/environment.go new file mode 100644 index 00000000..d4aece8a --- /dev/null +++ b/packages/server/pkg/openyaml/environment.go @@ -0,0 +1,21 @@ +package openyaml + +import ( + "gopkg.in/yaml.v3" + + yfs "github.com/the-dev-tools/dev-tools/packages/server/pkg/translate/yamlflowsimplev2" +) + +// ReadSingleEnvironment parses one environment .yaml file. +func ReadSingleEnvironment(data []byte) (*yfs.YamlEnvironmentV2, error) { + var env yfs.YamlEnvironmentV2 + if err := yaml.Unmarshal(data, &env); err != nil { + return nil, err + } + return &env, nil +} + +// WriteSingleEnvironment serializes one environment to YAML. +func WriteSingleEnvironment(env yfs.YamlEnvironmentV2) ([]byte, error) { + return yaml.Marshal(env) +} diff --git a/packages/server/pkg/openyaml/flow.go b/packages/server/pkg/openyaml/flow.go new file mode 100644 index 00000000..6109a9ca --- /dev/null +++ b/packages/server/pkg/openyaml/flow.go @@ -0,0 +1,21 @@ +package openyaml + +import ( + "gopkg.in/yaml.v3" + + yfs "github.com/the-dev-tools/dev-tools/packages/server/pkg/translate/yamlflowsimplev2" +) + +// ReadSingleFlow parses one flow .yaml file. +func ReadSingleFlow(data []byte) (*yfs.YamlFlowFlowV2, error) { + var flow yfs.YamlFlowFlowV2 + if err := yaml.Unmarshal(data, &flow); err != nil { + return nil, err + } + return &flow, nil +} + +// WriteSingleFlow serializes one flow to YAML. +func WriteSingleFlow(flow yfs.YamlFlowFlowV2) ([]byte, error) { + return yaml.Marshal(flow) +} diff --git a/packages/server/pkg/openyaml/request.go b/packages/server/pkg/openyaml/request.go new file mode 100644 index 00000000..978c1227 --- /dev/null +++ b/packages/server/pkg/openyaml/request.go @@ -0,0 +1,24 @@ +// Package openyaml implements the OpenYAML format for multi-file collections. +// It reuses yamlflowsimplev2 types for individual files and provides +// directory-level read/write operations. No dependency on topencollection. +package openyaml + +import ( + "gopkg.in/yaml.v3" + + yfs "github.com/the-dev-tools/dev-tools/packages/server/pkg/translate/yamlflowsimplev2" +) + +// ReadSingleRequest parses one request .yaml file. +func ReadSingleRequest(data []byte) (*yfs.YamlRequestDefV2, error) { + var req yfs.YamlRequestDefV2 + if err := yaml.Unmarshal(data, &req); err != nil { + return nil, err + } + return &req, nil +} + +// WriteSingleRequest serializes one request to YAML. +func WriteSingleRequest(req yfs.YamlRequestDefV2) ([]byte, error) { + return yaml.Marshal(req) +} diff --git a/packages/server/pkg/openyaml/testdata/collection/environments/dev.yaml b/packages/server/pkg/openyaml/testdata/collection/environments/dev.yaml new file mode 100644 index 00000000..8ac570a5 --- /dev/null +++ b/packages/server/pkg/openyaml/testdata/collection/environments/dev.yaml @@ -0,0 +1,4 @@ +name: Development +variables: + base_url: "http://localhost:3000" + token: "dev-token-123" diff --git a/packages/server/pkg/openyaml/testdata/collection/flows/smoke-test.yaml b/packages/server/pkg/openyaml/testdata/collection/flows/smoke-test.yaml new file mode 100644 index 00000000..5c6549b2 --- /dev/null +++ b/packages/server/pkg/openyaml/testdata/collection/flows/smoke-test.yaml @@ -0,0 +1,9 @@ +name: Smoke Test +variables: + - name: auth_token + value: "" +steps: + - request: + name: Get Users + method: GET + url: "{{base_url}}/users" diff --git a/packages/server/pkg/openyaml/testdata/collection/users/create-user.yaml b/packages/server/pkg/openyaml/testdata/collection/users/create-user.yaml new file mode 100644 index 00000000..542eab4b --- /dev/null +++ b/packages/server/pkg/openyaml/testdata/collection/users/create-user.yaml @@ -0,0 +1,15 @@ +name: Create User +method: POST +url: "{{base_url}}/users" +order: 2 +headers: + Content-Type: application/json +body: + type: raw + raw: | + { + "name": "John Doe", + "email": "john@example.com" + } +assertions: + - "res.status eq 201" diff --git a/packages/server/pkg/openyaml/testdata/collection/users/get-users.yaml b/packages/server/pkg/openyaml/testdata/collection/users/get-users.yaml new file mode 100644 index 00000000..047ed9d6 --- /dev/null +++ b/packages/server/pkg/openyaml/testdata/collection/users/get-users.yaml @@ -0,0 +1,15 @@ +name: Get Users +method: GET +url: "{{base_url}}/users" +description: Fetch all users +order: 1 +headers: + Accept: application/json +query_params: + - name: page + value: "1" + - name: limit + value: "10" + enabled: false +assertions: + - "res.status eq 200" diff --git a/packages/server/pkg/service/sworkspace/workspace_mapper.go b/packages/server/pkg/service/sworkspace/workspace_mapper.go index a7fec589..d1a53a44 100644 --- a/packages/server/pkg/service/sworkspace/workspace_mapper.go +++ b/packages/server/pkg/service/sworkspace/workspace_mapper.go @@ -17,6 +17,9 @@ func ConvertToDBWorkspace(workspace mworkspace.Workspace) gen.Workspace { ActiveEnv: workspace.ActiveEnv, GlobalEnv: workspace.GlobalEnv, DisplayOrder: workspace.Order, + SyncPath: workspace.SyncPath, + SyncFormat: workspace.SyncFormat, + SyncEnabled: workspace.SyncEnabled, } } @@ -30,5 +33,8 @@ func ConvertToModelWorkspace(workspace gen.Workspace) mworkspace.Workspace { ActiveEnv: workspace.ActiveEnv, GlobalEnv: workspace.GlobalEnv, Order: workspace.DisplayOrder, + SyncPath: workspace.SyncPath, + SyncFormat: workspace.SyncFormat, + SyncEnabled: workspace.SyncEnabled, } } diff --git a/packages/server/pkg/service/sworkspace/workspace_reader.go b/packages/server/pkg/service/sworkspace/workspace_reader.go index 6d153009..486aa590 100644 --- a/packages/server/pkg/service/sworkspace/workspace_reader.go +++ b/packages/server/pkg/service/sworkspace/workspace_reader.go @@ -75,3 +75,11 @@ func (r *WorkspaceReader) GetWorkspacesByUserIDOrdered(ctx context.Context, user } return tgeneric.MassConvert(rawWorkspaces, ConvertToModelWorkspace), nil } + +func (r *WorkspaceReader) GetSynced(ctx context.Context) ([]mworkspace.Workspace, error) { + rawWorkspaces, err := r.queries.GetSyncedWorkspaces(ctx) + if err != nil { + return nil, err + } + return tgeneric.MassConvert(rawWorkspaces, ConvertToModelWorkspace), nil +} diff --git a/packages/server/pkg/service/sworkspace/workspace_writer.go b/packages/server/pkg/service/sworkspace/workspace_writer.go index 8c5b35f8..88c2ab8a 100644 --- a/packages/server/pkg/service/sworkspace/workspace_writer.go +++ b/packages/server/pkg/service/sworkspace/workspace_writer.go @@ -57,6 +57,20 @@ func (w *WorkspaceWriter) UpdateUpdatedTime(ctx context.Context, org *mworkspace return err } +func (w *WorkspaceWriter) UpdateSync(ctx context.Context, ws *mworkspace.Workspace) error { + err := w.queries.UpdateWorkspaceSync(ctx, gen.UpdateWorkspaceSyncParams{ + ID: ws.ID, + SyncPath: ws.SyncPath, + SyncFormat: ws.SyncFormat, + SyncEnabled: ws.SyncEnabled, + Updated: ws.Updated.Unix(), + }) + if errors.Is(err, sql.ErrNoRows) { + return ErrNoWorkspaceFound + } + return err +} + func (w *WorkspaceWriter) Delete(ctx context.Context, id idwrap.IDWrap) error { err := w.queries.DeleteWorkspace(ctx, id) if errors.Is(err, sql.ErrNoRows) { diff --git a/packages/server/pkg/translate/topencollection/auth.go b/packages/server/pkg/translate/topencollection/auth.go new file mode 100644 index 00000000..46ef4443 --- /dev/null +++ b/packages/server/pkg/translate/topencollection/auth.go @@ -0,0 +1,65 @@ +package topencollection + +import ( + "encoding/base64" + "fmt" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" +) + +// convertAuth converts OpenCollection auth config into HTTP headers or search params. +// Returns additional headers and search params to append. +func convertAuth(auth *OCAuth, httpID idwrap.IDWrap) ([]mhttp.HTTPHeader, []mhttp.HTTPSearchParam) { + if auth == nil { + return nil, nil + } + + switch auth.Type { + case "bearer": + return []mhttp.HTTPHeader{{ + ID: idwrap.NewNow(), + HttpID: httpID, + Key: "Authorization", + Value: fmt.Sprintf("Bearer %s", auth.Token), + Enabled: true, + }}, nil + + case "basic": + encoded := base64.StdEncoding.EncodeToString( + []byte(fmt.Sprintf("%s:%s", auth.Username, auth.Password)), + ) + return []mhttp.HTTPHeader{{ + ID: idwrap.NewNow(), + HttpID: httpID, + Key: "Authorization", + Value: fmt.Sprintf("Basic %s", encoded), + Enabled: true, + }}, nil + + case "apikey": + if auth.Placement == "query" { + return nil, []mhttp.HTTPSearchParam{{ + ID: idwrap.NewNow(), + HttpID: httpID, + Key: auth.Key, + Value: auth.Value, + Enabled: true, + }} + } + // Default placement is header + return []mhttp.HTTPHeader{{ + ID: idwrap.NewNow(), + HttpID: httpID, + Key: auth.Key, + Value: auth.Value, + Enabled: true, + }}, nil + + case "none", "inherit", "": + return nil, nil + + default: + return nil, nil + } +} diff --git a/packages/server/pkg/translate/topencollection/body.go b/packages/server/pkg/translate/topencollection/body.go new file mode 100644 index 00000000..75b0a7d7 --- /dev/null +++ b/packages/server/pkg/translate/topencollection/body.go @@ -0,0 +1,155 @@ +package topencollection + +import ( + "encoding/json" + "fmt" + + "gopkg.in/yaml.v3" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" +) + +// convertBody converts an OpenCollection body into DevTools body models. +func convertBody(body *OCBody, httpID idwrap.IDWrap) ( + bodyKind mhttp.HttpBodyKind, + bodyRaw *mhttp.HTTPBodyRaw, + bodyForms []mhttp.HTTPBodyForm, + bodyUrlencoded []mhttp.HTTPBodyUrlencoded, +) { + if body == nil || body.Type == "none" || body.Type == "" { + return mhttp.HttpBodyKindNone, nil, nil, nil + } + + switch body.Type { + case "json", "xml", "text": + rawData := extractRawData(body.Data) + return mhttp.HttpBodyKindRaw, &mhttp.HTTPBodyRaw{ + ID: idwrap.NewNow(), + HttpID: httpID, + RawData: []byte(rawData), + }, nil, nil + + case "form-urlencoded": + fields := extractFormFields(body.Data) + var items []mhttp.HTTPBodyUrlencoded + for i, f := range fields { + items = append(items, mhttp.HTTPBodyUrlencoded{ + ID: idwrap.NewNow(), + HttpID: httpID, + Key: f.Name, + Value: f.Value, + Enabled: !f.Disabled, + DisplayOrder: float32(i + 1), + }) + } + return mhttp.HttpBodyKindUrlEncoded, nil, nil, items + + case "multipart-form": + fields := extractFormFields(body.Data) + var items []mhttp.HTTPBodyForm + for i, f := range fields { + items = append(items, mhttp.HTTPBodyForm{ + ID: idwrap.NewNow(), + HttpID: httpID, + Key: f.Name, + Value: f.Value, + Description: f.ContentType, + Enabled: !f.Disabled, + DisplayOrder: float32(i + 1), + }) + } + return mhttp.HttpBodyKindFormData, nil, items, nil + + default: + // Unknown body type — treat as raw + rawData := extractRawData(body.Data) + return mhttp.HttpBodyKindRaw, &mhttp.HTTPBodyRaw{ + ID: idwrap.NewNow(), + HttpID: httpID, + RawData: []byte(rawData), + }, nil, nil + } +} + +// extractRawData converts the body data field to a string. +func extractRawData(data interface{}) string { + if data == nil { + return "" + } + + switch v := data.(type) { + case string: + return v + case map[string]interface{}: + // JSON object — marshal it + b, err := json.MarshalIndent(v, "", " ") + if err != nil { + return fmt.Sprintf("%v", v) + } + return string(b) + default: + return fmt.Sprintf("%v", v) + } +} + +// extractFormFields extracts form fields from the body data. +// Handles both []OCFormField (structured) and []interface{} (YAML decoded). +func extractFormFields(data interface{}) []OCFormField { + if data == nil { + return nil + } + + // If it's already []OCFormField (unlikely from YAML), return as is + if fields, ok := data.([]OCFormField); ok { + return fields + } + + // YAML decodes arrays as []interface{} + rawList, ok := data.([]interface{}) + if !ok { + return nil + } + + var fields []OCFormField + for _, item := range rawList { + m, ok := item.(map[string]interface{}) + if !ok { + // Try via YAML re-marshal + b, err := yaml.Marshal(item) + if err != nil { + continue + } + var f OCFormField + if err := yaml.Unmarshal(b, &f); err != nil { + continue + } + fields = append(fields, f) + continue + } + + f := OCFormField{ + Name: stringFromMap(m, "name"), + Value: stringFromMap(m, "value"), + } + if v, ok := m["disabled"].(bool); ok { + f.Disabled = v + } + if v, ok := m["contentType"].(string); ok { + f.ContentType = v + } + fields = append(fields, f) + } + + return fields +} + +func stringFromMap(m map[string]interface{}, key string) string { + if v, ok := m[key]; ok { + if s, ok := v.(string); ok { + return s + } + return fmt.Sprintf("%v", v) + } + return "" +} diff --git a/packages/server/pkg/translate/topencollection/converter.go b/packages/server/pkg/translate/topencollection/converter.go new file mode 100644 index 00000000..8010837b --- /dev/null +++ b/packages/server/pkg/translate/topencollection/converter.go @@ -0,0 +1,365 @@ +package topencollection + +import ( + "fmt" + "log/slog" + "os" + "path/filepath" + "strings" + "time" + + "gopkg.in/yaml.v3" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/menv" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mfile" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" +) + +// ConvertOptions configures the OpenCollection import. +type ConvertOptions struct { + WorkspaceID idwrap.IDWrap + Logger *slog.Logger +} + +// OpenCollectionResolved contains all converted DevTools models. +type OpenCollectionResolved struct { + CollectionName string + + HTTPRequests []mhttp.HTTP + HTTPHeaders []mhttp.HTTPHeader + HTTPSearchParams []mhttp.HTTPSearchParam + HTTPBodyForms []mhttp.HTTPBodyForm + HTTPBodyUrlencoded []mhttp.HTTPBodyUrlencoded + HTTPBodyRaw []mhttp.HTTPBodyRaw + HTTPAsserts []mhttp.HTTPAssert + Files []mfile.File + Environments []menv.Env + EnvironmentVars []menv.Variable +} + +// ConvertOpenCollection walks the given directory, parses each .yml file, and converts +// to DevTools models. Only info.type == "http" requests are imported. +// GraphQL, WebSocket, and gRPC types are skipped with a log warning. +func ConvertOpenCollection(collectionPath string, opts ConvertOptions) (*OpenCollectionResolved, error) { + logger := opts.Logger + if logger == nil { + logger = slog.Default() + } + + // Parse collection root + rootPath := filepath.Join(collectionPath, "opencollection.yml") + rootData, err := os.ReadFile(rootPath) + if err != nil { + return nil, fmt.Errorf("failed to read opencollection.yml: %w", err) + } + + var root OpenCollectionRoot + if err := yaml.Unmarshal(rootData, &root); err != nil { + return nil, fmt.Errorf("failed to parse opencollection.yml: %w", err) + } + + result := &OpenCollectionResolved{ + CollectionName: root.Info.Name, + } + + now := time.Now().UnixMilli() + + // Walk directory tree recursively + if err := walkCollection(collectionPath, collectionPath, nil, opts.WorkspaceID, now, result, logger); err != nil { + return nil, fmt.Errorf("failed to walk collection: %w", err) + } + + // Parse environments + envDir := filepath.Join(collectionPath, "environments") + if info, err := os.Stat(envDir); err == nil && info.IsDir() { + entries, err := os.ReadDir(envDir) + if err != nil { + return nil, fmt.Errorf("failed to read environments directory: %w", err) + } + + for _, entry := range entries { + if entry.IsDir() || !isYAMLFile(entry.Name()) { + continue + } + + envData, err := os.ReadFile(filepath.Join(envDir, entry.Name())) + if err != nil { + logger.Warn("failed to read environment file", "file", entry.Name(), "error", err) + continue + } + + var ocEnv OCEnvironment + if err := yaml.Unmarshal(envData, &ocEnv); err != nil { + logger.Warn("failed to parse environment file", "file", entry.Name(), "error", err) + continue + } + + env, vars := convertEnvironment(ocEnv, opts.WorkspaceID) + result.Environments = append(result.Environments, env) + result.EnvironmentVars = append(result.EnvironmentVars, vars...) + } + } + + return result, nil +} + +// walkCollection recursively walks a directory in the collection, creating +// file entries for folders and converting request files. +func walkCollection( + rootPath string, + dirPath string, + parentID *idwrap.IDWrap, + workspaceID idwrap.IDWrap, + now int64, + result *OpenCollectionResolved, + logger *slog.Logger, +) error { + entries, err := os.ReadDir(dirPath) + if err != nil { + return fmt.Errorf("failed to read directory %s: %w", dirPath, err) + } + + // Separate folders and files, sort by name for consistent ordering + var dirs []os.DirEntry + var files []os.DirEntry + for _, entry := range entries { + if entry.IsDir() { + // Skip environments dir at root level (handled separately) + relPath, _ := filepath.Rel(rootPath, filepath.Join(dirPath, entry.Name())) + if relPath == "environments" { + continue + } + // Skip hidden directories + if strings.HasPrefix(entry.Name(), ".") { + continue + } + dirs = append(dirs, entry) + } else if isYAMLFile(entry.Name()) { + // Skip opencollection.yml and folder.yml + if entry.Name() == "opencollection.yml" || entry.Name() == "folder.yml" { + continue + } + files = append(files, entry) + } + } + + // Try to read folder.yml for folder metadata + folderSeqMap := make(map[string]int) + folderYMLPath := filepath.Join(dirPath, "folder.yml") + if _, err := os.Stat(folderYMLPath); err == nil { + // folder.yml exists but we don't need it for folder name — directory name is used + } + + // Process request files first + order := float64(1) + for _, fileEntry := range files { + filePath := filepath.Join(dirPath, fileEntry.Name()) + data, err := os.ReadFile(filePath) + if err != nil { + logger.Warn("failed to read file", "file", filePath, "error", err) + continue + } + + var ocReq OCRequest + if err := yaml.Unmarshal(data, &ocReq); err != nil { + logger.Warn("failed to parse request file", "file", filePath, "error", err) + continue + } + + // Check request type — only import HTTP + switch strings.ToLower(ocReq.Info.Type) { + case "http": + // Supported — continue + case "graphql": + logger.Warn("skipping graphql request (not supported)", "name", ocReq.Info.Name, "file", filePath) + continue + case "ws": + logger.Warn("skipping websocket request (not supported)", "name", ocReq.Info.Name, "file", filePath) + continue + case "grpc": + logger.Warn("skipping grpc request (not supported)", "name", ocReq.Info.Name, "file", filePath) + continue + default: + logger.Warn("skipping unknown request type", "type", ocReq.Info.Type, "name", ocReq.Info.Name, "file", filePath) + continue + } + + // Use seq for ordering if available + fileOrder := order + if ocReq.Info.Seq > 0 { + fileOrder = float64(ocReq.Info.Seq) + } + if seq, ok := folderSeqMap[fileEntry.Name()]; ok { + fileOrder = float64(seq) + } + + convertRequest(ocReq, workspaceID, parentID, fileOrder, now, result) + order++ + } + + // Process subdirectories + for _, dirEntry := range dirs { + subDirPath := filepath.Join(dirPath, dirEntry.Name()) + + // Create a folder file entry + folderID := idwrap.NewNow() + folderContentID := folderID + folderFile := mfile.File{ + ID: folderID, + WorkspaceID: workspaceID, + ParentID: parentID, + ContentID: &folderContentID, + ContentType: mfile.ContentTypeFolder, + Name: dirEntry.Name(), + Order: order, + UpdatedAt: time.UnixMilli(now), + } + result.Files = append(result.Files, folderFile) + + // Recurse into subdirectory + if err := walkCollection(rootPath, subDirPath, &folderID, workspaceID, now, result, logger); err != nil { + return err + } + order++ + } + + return nil +} + +// convertRequest converts a single OpenCollection request into DevTools models. +func convertRequest( + ocReq OCRequest, + workspaceID idwrap.IDWrap, + parentID *idwrap.IDWrap, + order float64, + now int64, + result *OpenCollectionResolved, +) { + httpID := idwrap.NewNow() + + method := "GET" + url := "" + if ocReq.HTTP != nil { + method = strings.ToUpper(ocReq.HTTP.Method) + if method == "" { + method = "GET" + } + url = ocReq.HTTP.URL + } + + // Determine body kind + bodyKind := mhttp.HttpBodyKindNone + if ocReq.HTTP != nil && ocReq.HTTP.Body != nil { + bodyKind, _, _, _ = convertBody(ocReq.HTTP.Body, httpID) + } + + // Create HTTP request + httpReq := mhttp.HTTP{ + ID: httpID, + WorkspaceID: workspaceID, + Name: ocReq.Info.Name, + Url: url, + Method: method, + Description: ocReq.Docs, + BodyKind: bodyKind, + CreatedAt: now, + UpdatedAt: now, + } + result.HTTPRequests = append(result.HTTPRequests, httpReq) + + // Create file entry for this request + contentID := httpID + fileID := idwrap.NewNow() + file := mfile.File{ + ID: fileID, + WorkspaceID: workspaceID, + ParentID: parentID, + ContentID: &contentID, + ContentType: mfile.ContentTypeHTTP, + Name: ocReq.Info.Name, + Order: order, + UpdatedAt: time.UnixMilli(now), + } + result.Files = append(result.Files, file) + + if ocReq.HTTP == nil { + return + } + + // Convert headers + for i, h := range ocReq.HTTP.Headers { + result.HTTPHeaders = append(result.HTTPHeaders, mhttp.HTTPHeader{ + ID: idwrap.NewNow(), + HttpID: httpID, + Key: h.Name, + Value: h.Value, + Enabled: !h.Disabled, + DisplayOrder: float32(i + 1), + CreatedAt: now, + UpdatedAt: now, + }) + } + + // Convert params + for i, p := range ocReq.HTTP.Params { + if strings.ToLower(p.Type) == "query" || p.Type == "" { + result.HTTPSearchParams = append(result.HTTPSearchParams, mhttp.HTTPSearchParam{ + ID: idwrap.NewNow(), + HttpID: httpID, + Key: p.Name, + Value: p.Value, + Enabled: !p.Disabled, + DisplayOrder: float64(i + 1), + CreatedAt: now, + UpdatedAt: now, + }) + } + // Path params are embedded in the URL — no separate model + } + + // Convert auth → headers/params + authHeaders, authParams := convertAuth(ocReq.HTTP.Auth, httpID) + result.HTTPHeaders = append(result.HTTPHeaders, authHeaders...) + result.HTTPSearchParams = append(result.HTTPSearchParams, authParams...) + + // Convert body + _, bodyRaw, bodyForms, bodyUrlencoded := convertBody(ocReq.HTTP.Body, httpID) + if bodyRaw != nil { + bodyRaw.CreatedAt = now + bodyRaw.UpdatedAt = now + result.HTTPBodyRaw = append(result.HTTPBodyRaw, *bodyRaw) + } + result.HTTPBodyForms = append(result.HTTPBodyForms, bodyForms...) + result.HTTPBodyUrlencoded = append(result.HTTPBodyUrlencoded, bodyUrlencoded...) + + // Convert assertions + if ocReq.Runtime != nil { + for i, a := range ocReq.Runtime.Assertions { + expr := a.Expression + if a.Operator != "" { + expr = fmt.Sprintf("%s %s %s", a.Expression, a.Operator, a.Value) + } + result.HTTPAsserts = append(result.HTTPAsserts, mhttp.HTTPAssert{ + ID: idwrap.NewNow(), + HttpID: httpID, + Value: strings.TrimSpace(expr), + Enabled: true, + DisplayOrder: float32(i + 1), + CreatedAt: now, + UpdatedAt: now, + }) + } + + // Log warning for scripts (not imported) + if len(ocReq.Runtime.Scripts) > 0 { + // Scripts are intentionally not imported — DevTools uses JS flow nodes instead + } + } +} + +// isYAMLFile checks if a filename has a YAML extension. +func isYAMLFile(name string) bool { + ext := strings.ToLower(filepath.Ext(name)) + return ext == ".yml" || ext == ".yaml" +} diff --git a/packages/server/pkg/translate/topencollection/converter_test.go b/packages/server/pkg/translate/topencollection/converter_test.go new file mode 100644 index 00000000..4f258b68 --- /dev/null +++ b/packages/server/pkg/translate/topencollection/converter_test.go @@ -0,0 +1,294 @@ +package topencollection + +import ( + "path/filepath" + "runtime" + "testing" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mfile" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" +) + +func testdataPath(name string) string { + _, filename, _, _ := runtime.Caller(0) + return filepath.Join(filepath.Dir(filename), "testdata", name) +} + +func TestConvertOpenCollection_BasicCollection(t *testing.T) { + collectionPath := testdataPath("basic-collection") + opts := ConvertOptions{ + WorkspaceID: idwrap.NewNow(), + } + + result, err := ConvertOpenCollection(collectionPath, opts) + if err != nil { + t.Fatalf("ConvertOpenCollection failed: %v", err) + } + + // Verify collection name + if result.CollectionName != "Test API Collection" { + t.Errorf("expected collection name 'Test API Collection', got %q", result.CollectionName) + } + + // Verify HTTP requests: Get Users, Create User, Login = 3 + if len(result.HTTPRequests) != 3 { + t.Fatalf("expected 3 HTTP requests, got %d", len(result.HTTPRequests)) + } + + // Build lookup by name + reqByName := make(map[string]mhttp.HTTP) + for _, r := range result.HTTPRequests { + reqByName[r.Name] = r + } + + // Verify Get Users + getUsers, ok := reqByName["Get Users"] + if !ok { + t.Fatal("missing 'Get Users' request") + } + if getUsers.Method != "GET" { + t.Errorf("Get Users method: expected GET, got %s", getUsers.Method) + } + if getUsers.Url != "{{base_url}}/users" { + t.Errorf("Get Users URL: expected {{base_url}}/users, got %s", getUsers.Url) + } + if getUsers.Description != "Fetch all users with pagination" { + t.Errorf("Get Users description mismatch: got %q", getUsers.Description) + } + + // Verify Create User + createUser, ok := reqByName["Create User"] + if !ok { + t.Fatal("missing 'Create User' request") + } + if createUser.Method != "POST" { + t.Errorf("Create User method: expected POST, got %s", createUser.Method) + } + if createUser.BodyKind != mhttp.HttpBodyKindRaw { + t.Errorf("Create User body kind: expected Raw (%d), got %d", mhttp.HttpBodyKindRaw, createUser.BodyKind) + } + + // Verify Login + login, ok := reqByName["Login"] + if !ok { + t.Fatal("missing 'Login' request") + } + if login.Method != "POST" { + t.Errorf("Login method: expected POST, got %s", login.Method) + } + + // Verify headers exist + if len(result.HTTPHeaders) == 0 { + t.Error("expected some headers, got none") + } + + // Check for bearer auth header on Create User + var foundBearerAuth bool + for _, h := range result.HTTPHeaders { + if h.HttpID == createUser.ID && h.Key == "Authorization" { + foundBearerAuth = true + if h.Value != "Bearer {{token}}" { + t.Errorf("expected 'Bearer {{token}}', got %q", h.Value) + } + } + } + if !foundBearerAuth { + t.Error("missing bearer auth header for Create User") + } + + // Verify search params (Get Users has page + limit) + var getUsersParams int + for _, p := range result.HTTPSearchParams { + if p.HttpID == getUsers.ID { + getUsersParams++ + } + } + if getUsersParams != 2 { + t.Errorf("expected 2 search params for Get Users, got %d", getUsersParams) + } + + // Verify body raw exists for Create User + var createUserBodyRaw int + for _, b := range result.HTTPBodyRaw { + if b.HttpID == createUser.ID { + createUserBodyRaw++ + } + } + if createUserBodyRaw != 1 { + t.Errorf("expected 1 body raw for Create User, got %d", createUserBodyRaw) + } + + // Verify assertions + if len(result.HTTPAsserts) == 0 { + t.Error("expected some assertions, got none") + } + + // Verify files + if len(result.Files) == 0 { + t.Error("expected some files, got none") + } + + // Verify folder structure: should have "users" and "auth" folders + var folderCount int + for _, f := range result.Files { + if f.ContentType == mfile.ContentTypeFolder { + folderCount++ + } + } + if folderCount != 2 { + t.Errorf("expected 2 folders (users, auth), got %d", folderCount) + } + + // Verify environments + if len(result.Environments) != 2 { + t.Errorf("expected 2 environments, got %d", len(result.Environments)) + } + + // Verify environment variables + if len(result.EnvironmentVars) == 0 { + t.Error("expected some environment variables, got none") + } +} + +func TestConvertOpenCollection_SkipsNonHTTP(t *testing.T) { + // Create a temp directory with a graphql request + dir := t.TempDir() + + // Write opencollection.yml + writeYAML(t, filepath.Join(dir, "opencollection.yml"), map[string]interface{}{ + "opencollection": "1.0.0", + "info": map[string]interface{}{ + "name": "Test", + }, + }) + + // Write a graphql request + writeYAML(t, filepath.Join(dir, "graphql-query.yml"), map[string]interface{}{ + "info": map[string]interface{}{ + "name": "GraphQL Query", + "type": "graphql", + }, + }) + + // Write an HTTP request + writeYAML(t, filepath.Join(dir, "http-request.yml"), map[string]interface{}{ + "info": map[string]interface{}{ + "name": "HTTP Request", + "type": "http", + }, + "http": map[string]interface{}{ + "method": "GET", + "url": "https://example.com", + }, + }) + + opts := ConvertOptions{ + WorkspaceID: idwrap.NewNow(), + } + + result, err := ConvertOpenCollection(dir, opts) + if err != nil { + t.Fatalf("ConvertOpenCollection failed: %v", err) + } + + // Should only have 1 HTTP request (graphql skipped) + if len(result.HTTPRequests) != 1 { + t.Errorf("expected 1 HTTP request (graphql skipped), got %d", len(result.HTTPRequests)) + } + + if result.HTTPRequests[0].Name != "HTTP Request" { + t.Errorf("expected 'HTTP Request', got %q", result.HTTPRequests[0].Name) + } +} + +func TestConvertAuth_Bearer(t *testing.T) { + httpID := idwrap.NewNow() + auth := &OCAuth{Type: "bearer", Token: "my-token"} + + headers, params := convertAuth(auth, httpID) + + if len(headers) != 1 { + t.Fatalf("expected 1 header, got %d", len(headers)) + } + if headers[0].Key != "Authorization" || headers[0].Value != "Bearer my-token" { + t.Errorf("unexpected header: %s: %s", headers[0].Key, headers[0].Value) + } + if len(params) != 0 { + t.Errorf("expected 0 params, got %d", len(params)) + } +} + +func TestConvertAuth_APIKey_Query(t *testing.T) { + httpID := idwrap.NewNow() + auth := &OCAuth{Type: "apikey", Key: "api_key", Value: "secret123", Placement: "query"} + + headers, params := convertAuth(auth, httpID) + + if len(headers) != 0 { + t.Errorf("expected 0 headers, got %d", len(headers)) + } + if len(params) != 1 { + t.Fatalf("expected 1 param, got %d", len(params)) + } + if params[0].Key != "api_key" || params[0].Value != "secret123" { + t.Errorf("unexpected param: %s=%s", params[0].Key, params[0].Value) + } +} + +func TestConvertBody_JSON(t *testing.T) { + httpID := idwrap.NewNow() + body := &OCBody{Type: "json", Data: `{"key": "value"}`} + + kind, raw, forms, urlencoded := convertBody(body, httpID) + + if kind != mhttp.HttpBodyKindRaw { + t.Errorf("expected Raw body kind, got %d", kind) + } + if raw == nil { + t.Fatal("expected non-nil raw body") + } + if string(raw.RawData) != `{"key": "value"}` { + t.Errorf("unexpected raw data: %s", string(raw.RawData)) + } + if len(forms) != 0 { + t.Errorf("expected 0 forms, got %d", len(forms)) + } + if len(urlencoded) != 0 { + t.Errorf("expected 0 urlencoded, got %d", len(urlencoded)) + } +} + +func TestConvertBody_None(t *testing.T) { + httpID := idwrap.NewNow() + kind, raw, forms, urlencoded := convertBody(nil, httpID) + + if kind != mhttp.HttpBodyKindNone { + t.Errorf("expected None body kind, got %d", kind) + } + if raw != nil { + t.Error("expected nil raw body") + } + if len(forms) != 0 { + t.Errorf("expected 0 forms, got %d", len(forms)) + } + if len(urlencoded) != 0 { + t.Errorf("expected 0 urlencoded, got %d", len(urlencoded)) + } +} + +// writeYAML is a test helper to write YAML files. +func writeYAML(t *testing.T, path string, data interface{}) { + t.Helper() + yamlData, err := yamlMarshal(data) + if err != nil { + t.Fatalf("failed to marshal yaml: %v", err) + } + if err := writeFile(path, yamlData); err != nil { + t.Fatalf("failed to write file %s: %v", path, err) + } +} + +func yamlMarshal(v interface{}) ([]byte, error) { + return yamlMarshalImpl(v) +} diff --git a/packages/server/pkg/translate/topencollection/environment.go b/packages/server/pkg/translate/topencollection/environment.go new file mode 100644 index 00000000..55130dd4 --- /dev/null +++ b/packages/server/pkg/translate/topencollection/environment.go @@ -0,0 +1,36 @@ +package topencollection + +import ( + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/menv" +) + +// convertEnvironment converts an OpenCollection environment to DevTools models. +func convertEnvironment(ocEnv OCEnvironment, workspaceID idwrap.IDWrap) (menv.Env, []menv.Variable) { + envID := idwrap.NewNow() + env := menv.Env{ + ID: envID, + WorkspaceID: workspaceID, + Type: menv.EnvNormal, + Name: ocEnv.Name, + } + + var vars []menv.Variable + for i, v := range ocEnv.Variables { + enabled := true + if v.Enabled != nil { + enabled = *v.Enabled + } + + vars = append(vars, menv.Variable{ + ID: idwrap.NewNow(), + EnvID: envID, + VarKey: v.Name, + Value: v.Value, + Enabled: enabled, + Order: float64(i + 1), + }) + } + + return env, vars +} diff --git a/packages/server/pkg/translate/topencollection/testdata/basic-collection/auth/login.yml b/packages/server/pkg/translate/topencollection/testdata/basic-collection/auth/login.yml new file mode 100644 index 00000000..aaa53be2 --- /dev/null +++ b/packages/server/pkg/translate/topencollection/testdata/basic-collection/auth/login.yml @@ -0,0 +1,24 @@ +info: + name: Login + type: http + seq: 1 + +http: + method: POST + url: "{{base_url}}/auth/login" + headers: + - name: Content-Type + value: application/json + body: + type: json + data: |- + { + "email": "test@example.com", + "password": "test123" + } + +runtime: + assertions: + - expression: res.status + operator: eq + value: "200" diff --git a/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/dev.yml b/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/dev.yml new file mode 100644 index 00000000..eeb167d4 --- /dev/null +++ b/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/dev.yml @@ -0,0 +1,6 @@ +name: Development +variables: + - name: base_url + value: "http://localhost:3000" + - name: token + value: "dev-token-123" diff --git a/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/prod.yml b/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/prod.yml new file mode 100644 index 00000000..7471da38 --- /dev/null +++ b/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/prod.yml @@ -0,0 +1,7 @@ +name: Production +variables: + - name: base_url + value: "https://api.example.com" + - name: token + value: "prod-token-456" + enabled: false diff --git a/packages/server/pkg/translate/topencollection/testdata/basic-collection/opencollection.yml b/packages/server/pkg/translate/topencollection/testdata/basic-collection/opencollection.yml new file mode 100644 index 00000000..41e0ddb3 --- /dev/null +++ b/packages/server/pkg/translate/topencollection/testdata/basic-collection/opencollection.yml @@ -0,0 +1,5 @@ +opencollection: "1.0.0" +info: + name: "Test API Collection" + summary: "A test collection for unit tests" + version: "1.0.0" diff --git a/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/create-user.yml b/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/create-user.yml new file mode 100644 index 00000000..dde47f9c --- /dev/null +++ b/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/create-user.yml @@ -0,0 +1,32 @@ +info: + name: Create User + type: http + seq: 2 + +http: + method: POST + url: "{{base_url}}/users" + headers: + - name: Content-Type + value: application/json + body: + type: json + data: |- + { + "name": "John Doe", + "email": "john@example.com" + } + auth: + type: bearer + token: "{{token}}" + +runtime: + assertions: + - expression: res.status + operator: eq + value: "201" + - expression: res.body.id + operator: neq + value: "" + +docs: "Creates a new user account" diff --git a/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/get-users.yml b/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/get-users.yml new file mode 100644 index 00000000..e58e09ac --- /dev/null +++ b/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/get-users.yml @@ -0,0 +1,26 @@ +info: + name: Get Users + type: http + seq: 1 + +http: + method: GET + url: "{{base_url}}/users" + headers: + - name: Accept + value: application/json + params: + - name: page + value: "1" + type: query + - name: limit + value: "10" + type: query + +runtime: + assertions: + - expression: res.status + operator: eq + value: "200" + +docs: "Fetch all users with pagination" diff --git a/packages/server/pkg/translate/topencollection/testhelper_test.go b/packages/server/pkg/translate/topencollection/testhelper_test.go new file mode 100644 index 00000000..10d896b9 --- /dev/null +++ b/packages/server/pkg/translate/topencollection/testhelper_test.go @@ -0,0 +1,15 @@ +package topencollection + +import ( + "os" + + "gopkg.in/yaml.v3" +) + +func yamlMarshalImpl(v interface{}) ([]byte, error) { + return yaml.Marshal(v) +} + +func writeFile(path string, data []byte) error { + return os.WriteFile(path, data, 0o644) +} diff --git a/packages/server/pkg/translate/topencollection/types.go b/packages/server/pkg/translate/topencollection/types.go new file mode 100644 index 00000000..299b39b3 --- /dev/null +++ b/packages/server/pkg/translate/topencollection/types.go @@ -0,0 +1,146 @@ +// Package topencollection parses Bruno's OpenCollection YAML format and converts +// it into DevTools internal models. This package is isolated from the rest of +// DevTools and can be removed without affecting other functionality. +package topencollection + +// OpenCollectionRoot represents the top-level opencollection.yml file. +type OpenCollectionRoot struct { + OpenCollection string `yaml:"opencollection"` + Info OpenCollectionInfo `yaml:"info"` +} + +// OpenCollectionInfo contains collection metadata. +type OpenCollectionInfo struct { + Name string `yaml:"name"` + Summary string `yaml:"summary,omitempty"` + Version string `yaml:"version,omitempty"` + Authors []OpenCollectionAuthor `yaml:"authors,omitempty"` +} + +// OpenCollectionAuthor represents a collection author. +type OpenCollectionAuthor struct { + Name string `yaml:"name"` + Email string `yaml:"email,omitempty"` +} + +// OCRequest represents a single request file in the OpenCollection format. +type OCRequest struct { + Info OCRequestInfo `yaml:"info"` + HTTP *OCHTTPBlock `yaml:"http,omitempty"` + Runtime *OCRuntime `yaml:"runtime,omitempty"` + Settings *OCSettings `yaml:"settings,omitempty"` + Docs string `yaml:"docs,omitempty"` +} + +// OCRequestInfo contains request metadata. +type OCRequestInfo struct { + Name string `yaml:"name"` + Type string `yaml:"type"` // http, graphql, ws, grpc + Seq int `yaml:"seq,omitempty"` + Tags []string `yaml:"tags,omitempty"` +} + +// OCHTTPBlock contains the HTTP request definition. +type OCHTTPBlock struct { + Method string `yaml:"method"` + URL string `yaml:"url"` + Headers []OCHeader `yaml:"headers,omitempty"` + Params []OCParam `yaml:"params,omitempty"` + Body *OCBody `yaml:"body,omitempty"` + Auth *OCAuth `yaml:"auth,omitempty"` +} + +// OCHeader represents an HTTP header. +type OCHeader struct { + Name string `yaml:"name"` + Value string `yaml:"value"` + Disabled bool `yaml:"disabled,omitempty"` +} + +// OCParam represents a request parameter (query or path). +type OCParam struct { + Name string `yaml:"name"` + Value string `yaml:"value"` + Type string `yaml:"type"` // query, path + Disabled bool `yaml:"disabled,omitempty"` +} + +// OCBody represents the request body. +type OCBody struct { + Type string `yaml:"type"` // json, xml, text, form-urlencoded, multipart-form, graphql, none + Data interface{} `yaml:"data"` // string for raw, []OCFormField for forms +} + +// OCFormField represents a form field in multipart or urlencoded bodies. +type OCFormField struct { + Name string `yaml:"name"` + Value string `yaml:"value"` + Disabled bool `yaml:"disabled,omitempty"` + ContentType string `yaml:"contentType,omitempty"` +} + +// OCAuth represents authentication configuration. +type OCAuth struct { + Type string `yaml:"type"` // none, inherit, basic, bearer, apikey + Token string `yaml:"token,omitempty"` // bearer + Username string `yaml:"username,omitempty"` // basic + Password string `yaml:"password,omitempty"` // basic + Key string `yaml:"key,omitempty"` // apikey + Value string `yaml:"value,omitempty"` // apikey + Placement string `yaml:"placement,omitempty"` // apikey: header, query +} + +// OCRuntime contains runtime configuration (scripts, assertions, actions). +type OCRuntime struct { + Scripts []OCScript `yaml:"scripts,omitempty"` + Assertions []OCAssertion `yaml:"assertions,omitempty"` + Actions []OCAction `yaml:"actions,omitempty"` +} + +// OCScript represents a pre/post request script. +type OCScript struct { + Type string `yaml:"type"` // pre-request, post-response + Code string `yaml:"code"` +} + +// OCAssertion represents a test assertion. +type OCAssertion struct { + Expression string `yaml:"expression"` + Operator string `yaml:"operator"` + Value string `yaml:"value,omitempty"` +} + +// OCAction represents a runtime action. +type OCAction struct { + Type string `yaml:"type"` + Key string `yaml:"key,omitempty"` + Value string `yaml:"value,omitempty"` +} + +// OCSettings contains request-level settings. +type OCSettings struct { + EncodeUrl *bool `yaml:"encodeUrl,omitempty"` + Timeout *int `yaml:"timeout,omitempty"` + FollowRedirects *bool `yaml:"followRedirects,omitempty"` + MaxRedirects *int `yaml:"maxRedirects,omitempty"` +} + +// OCEnvironment represents an environment file. +type OCEnvironment struct { + Name string `yaml:"name"` + Variables []OCEnvVariable `yaml:"variables"` +} + +// OCEnvVariable represents an environment variable. +type OCEnvVariable struct { + Name string `yaml:"name"` + Value string `yaml:"value"` + Enabled *bool `yaml:"enabled,omitempty"` + Secret *bool `yaml:"secret,omitempty"` +} + +// OCFolder represents a folder.yml file in a directory. +type OCFolder struct { + Name string `yaml:"name,omitempty"` + Seq int `yaml:"seq,omitempty"` +} diff --git a/packages/server/pkg/translate/yamlflowsimplev2/types.go b/packages/server/pkg/translate/yamlflowsimplev2/types.go index 26e88308..9dfbd9ed 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/types.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/types.go @@ -49,6 +49,7 @@ type YamlRequestDefV2 struct { Body *YamlBodyUnion `yaml:"body,omitempty"` Assertions AssertionsOrSlice `yaml:"assertions,omitempty"` Description string `yaml:"description,omitempty"` + Order float64 `yaml:"order,omitempty"` // File-tree ordering for multi-file collections } // YamlFlowFlowV2 represents a flow in the modern YAML format From 8038b38e72cd7fc8b42491942704c3c09a490d12 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 17:59:07 +0000 Subject: [PATCH 10/20] test: add migration and workspace sync service tests - TestWorkspaceSyncColumnsCreated: verifies migration adds sync_path, sync_format, sync_enabled to workspaces table with correct constraints - TestWorkspaceSyncUpdateAndGet: tests full CRUD for sync fields via WorkspaceService.UpdateSync/GetSynced (enable, read back, disable) - Make migration idempotent (skip columns that already exist in fresh schema) - Add UpdateSync/GetSynced delegate methods to WorkspaceService https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../01KGZ57RM_add_workspace_sync.go | 41 +++++++-- .../internal/migrations/migrations_test.go | 60 +++++++++++++ .../pkg/service/sworkspace/sworkspace_test.go | 89 +++++++++++++++++++ .../pkg/service/sworkspace/workspace.go | 8 ++ 4 files changed, 189 insertions(+), 9 deletions(-) diff --git a/packages/server/internal/migrations/01KGZ57RM_add_workspace_sync.go b/packages/server/internal/migrations/01KGZ57RM_add_workspace_sync.go index 1414e143..0c7bae61 100644 --- a/packages/server/internal/migrations/01KGZ57RM_add_workspace_sync.go +++ b/packages/server/internal/migrations/01KGZ57RM_add_workspace_sync.go @@ -12,7 +12,7 @@ import ( const MigrationAddWorkspaceSyncID = "01KGZ57RM25ANJQA21JQGJ6D2M" // MigrationAddWorkspaceSyncChecksum is a stable hash of this migration. -const MigrationAddWorkspaceSyncChecksum = "sha256:add-workspace-sync-v1" +const MigrationAddWorkspaceSyncChecksum = "sha256:add-workspace-sync-v2" func init() { if err := migrate.Register(migrate.Migration{ @@ -27,18 +27,41 @@ func init() { } } -// applyWorkspaceSync adds folder sync columns to the workspaces table. -func applyWorkspaceSync(ctx context.Context, tx *sql.Tx) error { - if _, err := tx.ExecContext(ctx, `ALTER TABLE workspaces ADD COLUMN sync_path TEXT`); err != nil { - return fmt.Errorf("add sync_path column: %w", err) +// columnExists checks if a column exists on a table using pragma_table_info. +func columnExists(ctx context.Context, tx *sql.Tx, table, column string) (bool, error) { + var count int + err := tx.QueryRowContext(ctx, + fmt.Sprintf(`SELECT COUNT(*) FROM pragma_table_info('%s') WHERE name = '%s'`, table, column), + ).Scan(&count) + if err != nil { + return false, err } + return count > 0, nil +} - if _, err := tx.ExecContext(ctx, `ALTER TABLE workspaces ADD COLUMN sync_format TEXT`); err != nil { - return fmt.Errorf("add sync_format column: %w", err) +// applyWorkspaceSync adds folder sync columns to the workspaces table. +// Idempotent: skips columns that already exist (e.g. from fresh schema). +func applyWorkspaceSync(ctx context.Context, tx *sql.Tx) error { + columns := []struct { + name string + ddl string + }{ + {"sync_path", `ALTER TABLE workspaces ADD COLUMN sync_path TEXT`}, + {"sync_format", `ALTER TABLE workspaces ADD COLUMN sync_format TEXT`}, + {"sync_enabled", `ALTER TABLE workspaces ADD COLUMN sync_enabled BOOLEAN NOT NULL DEFAULT 0`}, } - if _, err := tx.ExecContext(ctx, `ALTER TABLE workspaces ADD COLUMN sync_enabled BOOLEAN NOT NULL DEFAULT 0`); err != nil { - return fmt.Errorf("add sync_enabled column: %w", err) + for _, col := range columns { + exists, err := columnExists(ctx, tx, "workspaces", col.name) + if err != nil { + return fmt.Errorf("check %s column: %w", col.name, err) + } + if exists { + continue + } + if _, err := tx.ExecContext(ctx, col.ddl); err != nil { + return fmt.Errorf("add %s column: %w", col.name, err) + } } return nil diff --git a/packages/server/internal/migrations/migrations_test.go b/packages/server/internal/migrations/migrations_test.go index c9948a0c..d3ac44e7 100644 --- a/packages/server/internal/migrations/migrations_test.go +++ b/packages/server/internal/migrations/migrations_test.go @@ -226,6 +226,66 @@ func TestFilesTableConstraintUpdated(t *testing.T) { } } +func TestWorkspaceSyncColumnsCreated(t *testing.T) { + ctx := context.Background() + + db, cleanup, err := sqlitemem.NewSQLiteMem(ctx) + if err != nil { + t.Fatalf("failed to create test db: %v", err) + } + t.Cleanup(cleanup) + + cfg := Config{ + DatabasePath: ":memory:", + DataDir: t.TempDir(), + } + if err := Run(ctx, db, cfg); err != nil { + t.Fatalf("failed to run migrations: %v", err) + } + + // Verify workspace sync migration is registered + migrations := migrate.List() + found := false + for _, m := range migrations { + if m.ID == MigrationAddWorkspaceSyncID { + found = true + break + } + } + if !found { + t.Fatalf("MigrationAddWorkspaceSyncID not found in registered migrations") + } + + // Verify sync columns exist on workspaces table + columns := []string{"sync_path", "sync_format", "sync_enabled"} + for _, col := range columns { + var cid int + var name, ctype string + var notnull int + var dfltValue *string + var pk int + err := db.QueryRowContext(ctx, + "SELECT cid, name, type, \"notnull\", dflt_value, pk FROM pragma_table_info('workspaces') WHERE name = '"+col+"'", + ).Scan(&cid, &name, &ctype, ¬null, &dfltValue, &pk) + if err != nil { + t.Errorf("column %s not found in workspaces table: %v", col, err) + } + } + + // Verify sync_enabled has a default of 0 + var notnull int + var dfltValue *string + err = db.QueryRowContext(ctx, + "SELECT \"notnull\", dflt_value FROM pragma_table_info('workspaces') WHERE name = 'sync_enabled'", + ).Scan(¬null, &dfltValue) + if err != nil { + t.Fatalf("failed to check sync_enabled column: %v", err) + } + if notnull != 1 { + t.Errorf("sync_enabled should be NOT NULL, got notnull=%d", notnull) + } +} + func contains(s, substr string) bool { return len(s) >= len(substr) && (s == substr || len(s) > 0 && containsHelper(s, substr)) } diff --git a/packages/server/pkg/service/sworkspace/sworkspace_test.go b/packages/server/pkg/service/sworkspace/sworkspace_test.go index fbb63103..0d5a22af 100644 --- a/packages/server/pkg/service/sworkspace/sworkspace_test.go +++ b/packages/server/pkg/service/sworkspace/sworkspace_test.go @@ -89,3 +89,92 @@ func TestWorkspaceDeletion(t *testing.T) { require.Equal(t, 0, listAfter[0].ID.Compare(ws1), "Expected first workspace to be WS1") require.Equal(t, 0, listAfter[1].ID.Compare(ws3), "Expected second workspace to be WS3") } + +func TestWorkspaceSyncUpdateAndGet(t *testing.T) { + ctx := context.Background() + db, cleanup, err := sqlitemem.NewSQLiteMem(ctx) + require.NoError(t, err, "failed to create db") + defer cleanup() + + queries, err := gen.Prepare(ctx, db) + require.NoError(t, err, "failed to prepare queries") + + wsService := sworkspace.NewWorkspaceService(queries) + + // Create two workspaces + ws1ID := idwrap.NewNow() + ws1 := &mworkspace.Workspace{ + ID: ws1ID, + Name: "Synced Workspace", + Updated: dbtime.DBNow(), + Order: 0, + } + err = wsService.Create(ctx, ws1) + require.NoError(t, err, "create ws1") + + ws2ID := idwrap.NewNow() + ws2 := &mworkspace.Workspace{ + ID: ws2ID, + Name: "Not Synced", + Updated: dbtime.DBNow(), + Order: 1, + } + err = wsService.Create(ctx, ws2) + require.NoError(t, err, "create ws2") + + // Initially, GetSynced should return empty + synced, err := wsService.GetSynced(ctx) + require.NoError(t, err, "get synced (initial)") + require.Len(t, synced, 0, "no workspaces should be synced initially") + + // Enable sync on ws1 + syncPath := "/home/user/collections/my-api" + syncFormat := "open_yaml" + ws1.SyncPath = &syncPath + ws1.SyncFormat = &syncFormat + ws1.SyncEnabled = true + ws1.Updated = dbtime.DBNow() + err = wsService.UpdateSync(ctx, ws1) + require.NoError(t, err, "update sync") + + // GetSynced should now return ws1 + synced, err = wsService.GetSynced(ctx) + require.NoError(t, err, "get synced (after enable)") + require.Len(t, synced, 1, "one workspace should be synced") + require.Equal(t, 0, synced[0].ID.Compare(ws1ID), "synced workspace should be ws1") + require.Equal(t, &syncPath, synced[0].SyncPath) + require.Equal(t, &syncFormat, synced[0].SyncFormat) + require.True(t, synced[0].SyncEnabled) + + // Verify Get also returns sync fields + fetched, err := wsService.Get(ctx, ws1ID) + require.NoError(t, err, "get ws1") + require.Equal(t, &syncPath, fetched.SyncPath) + require.Equal(t, &syncFormat, fetched.SyncFormat) + require.True(t, fetched.SyncEnabled) + + // ws2 should NOT have sync fields + fetched2, err := wsService.Get(ctx, ws2ID) + require.NoError(t, err, "get ws2") + require.Nil(t, fetched2.SyncPath) + require.Nil(t, fetched2.SyncFormat) + require.False(t, fetched2.SyncEnabled) + + // Disable sync on ws1 + ws1.SyncEnabled = false + ws1.Updated = dbtime.DBNow() + err = wsService.UpdateSync(ctx, ws1) + require.NoError(t, err, "disable sync") + + // GetSynced should return empty again + synced, err = wsService.GetSynced(ctx) + require.NoError(t, err, "get synced (after disable)") + require.Len(t, synced, 0, "no workspaces should be synced after disable") + + // But the path/format should still be set + fetched, err = wsService.Get(ctx, ws1ID) + require.NoError(t, err, "get ws1 after disable") + require.Equal(t, &syncPath, fetched.SyncPath, "sync_path should persist after disable") + require.Equal(t, &syncFormat, fetched.SyncFormat, "sync_format should persist after disable") + require.False(t, fetched.SyncEnabled, "sync_enabled should be false") +} diff --git a/packages/server/pkg/service/sworkspace/workspace.go b/packages/server/pkg/service/sworkspace/workspace.go index 7d1c0a34..596fdf75 100644 --- a/packages/server/pkg/service/sworkspace/workspace.go +++ b/packages/server/pkg/service/sworkspace/workspace.go @@ -82,4 +82,12 @@ func (ws WorkspaceService) GetWorkspacesByUserIDOrdered(ctx context.Context, use return ws.reader.GetWorkspacesByUserIDOrdered(ctx, userID) } +func (ws WorkspaceService) UpdateSync(ctx context.Context, w *mworkspace.Workspace) error { + return NewWorkspaceWriterFromQueries(ws.queries).UpdateSync(ctx, w) +} + +func (ws WorkspaceService) GetSynced(ctx context.Context) ([]mworkspace.Workspace, error) { + return ws.reader.GetSynced(ctx) +} + func (ws WorkspaceService) Reader() *WorkspaceReader { return ws.reader } From 7508f08add2c164dc8c4acb4c6231aa91b7318c8 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 17:59:51 +0000 Subject: [PATCH 11/20] chore: update go.work.sum after test dependency resolution https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- go.work.sum | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/go.work.sum b/go.work.sum index f11f3fea..45c7f837 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1514,10 +1514,14 @@ modernc.org/lex v1.1.1 h1:prSCNTLw1R4rn7M/RzwsuMtAuOytfyR3cnyM07P+Pas= modernc.org/lex v1.1.1/go.mod h1:6r8o8DLJkAnOsQaGi8fMoi+Vt6LTbDaCrkUK729D8xM= modernc.org/lexer v1.0.4 h1:hU7xVbZsqwPphyzChc7nMSGrsuaD2PDNOmzrzkS5AlE= modernc.org/lexer v1.0.4/go.mod h1:tOajb8S4sdfOYitzCgXDFmbVJ/LE0v1fNJ7annTw36U= +modernc.org/libc v1.67.4 h1:Yx/6qRlojCVP+kDBZAHgBlALwsVuV9H2oe2McLZ1jkI= +modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU= +modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI= modernc.org/parser v1.1.0 h1:XoClYpoz2xHEDIteSQ7tICOTFcNwBI7XRCeghUS6SNI= modernc.org/parser v1.1.0/go.mod h1:CXl3OTJRZij8FeMpzI3Id/bjupHf0u9HSrCUP4Z9pbA= modernc.org/scannertest v1.0.2 h1:JPtfxcVdbRvzmRf2YUvsDibJsQRw8vKA/3jb31y7cy0= modernc.org/scannertest v1.0.2/go.mod h1:RzTm5RwglF/6shsKoEivo8N91nQIoWtcWI7ns+zPyGA= +modernc.org/sqlite v1.43.0 h1:m9ptJKtfxloX/f/snVgxgcH6H66r3ScyJIEHEUesEC8= modernc.org/y v1.1.0 h1:JdIvLry+rKeSsVNRCdr6YWYimwwNm0GXtzxid77VfWc= modernc.org/y v1.1.0/go.mod h1:Iz3BmyIS4OwAbwGaUS7cqRrLsSsfp2sFWtpzX+P4CsE= nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= From 63d75e0c46d1d00edba6d8fd93caeed580da08a3 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 18:50:54 +0000 Subject: [PATCH 12/20] refactor: return WorkspaceBundle from topencollection, extract httpLookup struct - topencollection: return *ioworkspace.WorkspaceBundle instead of custom OpenCollectionResolved struct (aligns with openyaml's interface) - topencollection: remove dead folderSeqMap code that was never populated - topencollection: compute and set PathHash on all mfile.File entries - openyaml: extract 7 map parameters from writeFilesRecursive/exportHTTPRequest into httpLookup struct with buildHTTPLookup constructor - openyaml: compute and set PathHash on all mfile.File entries (folders + requests) Both packages now share the same return type and PathHash deduplication strategy. All 19 tests pass. https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- packages/server/pkg/openyaml/directory.go | 146 +++++++++--------- .../translate/topencollection/converter.go | 96 +++++------- .../topencollection/converter_test.go | 4 +- 3 files changed, 116 insertions(+), 130 deletions(-) diff --git a/packages/server/pkg/openyaml/directory.go b/packages/server/pkg/openyaml/directory.go index 0d2835c4..588c96a8 100644 --- a/packages/server/pkg/openyaml/directory.go +++ b/packages/server/pkg/openyaml/directory.go @@ -1,6 +1,8 @@ package openyaml import ( + "crypto/sha256" + "encoding/hex" "fmt" "os" "path/filepath" @@ -31,6 +33,51 @@ type ReadOptions struct { WorkspaceID idwrap.IDWrap } +// httpLookup holds pre-built index maps for writing HTTP requests to disk. +type httpLookup struct { + HTTP map[idwrap.IDWrap]mhttp.HTTP + Headers map[idwrap.IDWrap][]mhttp.HTTPHeader + Params map[idwrap.IDWrap][]mhttp.HTTPSearchParam + BodyRaw map[idwrap.IDWrap]mhttp.HTTPBodyRaw + BodyForm map[idwrap.IDWrap][]mhttp.HTTPBodyForm + BodyURL map[idwrap.IDWrap][]mhttp.HTTPBodyUrlencoded + Assertions map[idwrap.IDWrap][]mhttp.HTTPAssert +} + +func buildHTTPLookup(bundle *ioworkspace.WorkspaceBundle) *httpLookup { + lk := &httpLookup{ + HTTP: make(map[idwrap.IDWrap]mhttp.HTTP, len(bundle.HTTPRequests)), + Headers: make(map[idwrap.IDWrap][]mhttp.HTTPHeader), + Params: make(map[idwrap.IDWrap][]mhttp.HTTPSearchParam), + BodyRaw: make(map[idwrap.IDWrap]mhttp.HTTPBodyRaw), + BodyForm: make(map[idwrap.IDWrap][]mhttp.HTTPBodyForm), + BodyURL: make(map[idwrap.IDWrap][]mhttp.HTTPBodyUrlencoded), + Assertions: make(map[idwrap.IDWrap][]mhttp.HTTPAssert), + } + for _, h := range bundle.HTTPRequests { + lk.HTTP[h.ID] = h + } + for _, h := range bundle.HTTPHeaders { + lk.Headers[h.HttpID] = append(lk.Headers[h.HttpID], h) + } + for _, p := range bundle.HTTPSearchParams { + lk.Params[p.HttpID] = append(lk.Params[p.HttpID], p) + } + for _, b := range bundle.HTTPBodyRaw { + lk.BodyRaw[b.HttpID] = b + } + for _, f := range bundle.HTTPBodyForms { + lk.BodyForm[f.HttpID] = append(lk.BodyForm[f.HttpID], f) + } + for _, u := range bundle.HTTPBodyUrlencoded { + lk.BodyURL[u.HttpID] = append(lk.BodyURL[u.HttpID], u) + } + for _, a := range bundle.HTTPAsserts { + lk.Assertions[a.HttpID] = append(lk.Assertions[a.HttpID], a) + } + return lk +} + // ReadDirectory reads an OpenYAML folder into a WorkspaceBundle. // Directory structure: // - *.yaml files in root/subdirs -> YamlRequestDefV2 -> mhttp models @@ -79,46 +126,7 @@ func WriteDirectory(dirPath string, bundle *ioworkspace.WorkspaceBundle) error { return fmt.Errorf("create directory: %w", err) } - // Build lookup maps - fileByID := make(map[idwrap.IDWrap]mfile.File) - for _, f := range bundle.Files { - fileByID[f.ID] = f - } - - httpByID := make(map[idwrap.IDWrap]mhttp.HTTP) - for _, h := range bundle.HTTPRequests { - httpByID[h.ID] = h - } - - headersByHTTP := make(map[idwrap.IDWrap][]mhttp.HTTPHeader) - for _, h := range bundle.HTTPHeaders { - headersByHTTP[h.HttpID] = append(headersByHTTP[h.HttpID], h) - } - - paramsByHTTP := make(map[idwrap.IDWrap][]mhttp.HTTPSearchParam) - for _, p := range bundle.HTTPSearchParams { - paramsByHTTP[p.HttpID] = append(paramsByHTTP[p.HttpID], p) - } - - bodyRawByHTTP := make(map[idwrap.IDWrap]mhttp.HTTPBodyRaw) - for _, b := range bundle.HTTPBodyRaw { - bodyRawByHTTP[b.HttpID] = b - } - - bodyFormByHTTP := make(map[idwrap.IDWrap][]mhttp.HTTPBodyForm) - for _, f := range bundle.HTTPBodyForms { - bodyFormByHTTP[f.HttpID] = append(bodyFormByHTTP[f.HttpID], f) - } - - bodyUrlByHTTP := make(map[idwrap.IDWrap][]mhttp.HTTPBodyUrlencoded) - for _, u := range bundle.HTTPBodyUrlencoded { - bodyUrlByHTTP[u.HttpID] = append(bodyUrlByHTTP[u.HttpID], u) - } - - assertsByHTTP := make(map[idwrap.IDWrap][]mhttp.HTTPAssert) - for _, a := range bundle.HTTPAsserts { - assertsByHTTP[a.HttpID] = append(assertsByHTTP[a.HttpID], a) - } + lk := buildHTTPLookup(bundle) // Write environments if len(bundle.Environments) > 0 { @@ -179,8 +187,7 @@ func WriteDirectory(dirPath string, bundle *ioworkspace.WorkspaceBundle) error { } // Write requests organized by file hierarchy - // Build parent->children map - childrenByParent := make(map[string][]mfile.File) // parentID string -> children + childrenByParent := make(map[string][]mfile.File) for _, f := range bundle.Files { parentKey := "" if f.ParentID != nil { @@ -189,12 +196,7 @@ func WriteDirectory(dirPath string, bundle *ioworkspace.WorkspaceBundle) error { childrenByParent[parentKey] = append(childrenByParent[parentKey], f) } - // Write recursively starting from root files - if err := writeFilesRecursive(dirPath, "", childrenByParent, httpByID, headersByHTTP, paramsByHTTP, bodyRawByHTTP, bodyFormByHTTP, bodyUrlByHTTP, assertsByHTTP); err != nil { - return err - } - - return nil + return writeFilesRecursive(dirPath, "", childrenByParent, lk) } func readEnvironments(envDir string, workspaceID idwrap.IDWrap, bundle *ioworkspace.WorkspaceBundle) error { @@ -335,6 +337,7 @@ func readRequestsRecursive( // Create folder file entry folderID := idwrap.NewNow() folderContentID := folderID + pathHash := computePathHash(rel) bundle.Files = append(bundle.Files, mfile.File{ ID: folderID, WorkspaceID: workspaceID, @@ -343,6 +346,7 @@ func readRequestsRecursive( ContentType: mfile.ContentTypeFolder, Name: name, Order: order, + PathHash: &pathHash, UpdatedAt: now, }) @@ -406,6 +410,8 @@ func readRequestsRecursive( // Create file entry contentID := httpID + relPath, _ := filepath.Rel(rootDir, filepath.Join(dirPath, name)) + pathHash := computePathHash(relPath) bundle.Files = append(bundle.Files, mfile.File{ ID: idwrap.NewNow(), WorkspaceID: workspaceID, @@ -414,6 +420,7 @@ func readRequestsRecursive( ContentType: mfile.ContentTypeHTTP, Name: yamlReq.Name, Order: fileOrder, + PathHash: &pathHash, UpdatedAt: now, }) @@ -525,13 +532,7 @@ func writeFilesRecursive( currentDir string, parentIDStr string, childrenByParent map[string][]mfile.File, - httpByID map[idwrap.IDWrap]mhttp.HTTP, - headersByHTTP map[idwrap.IDWrap][]mhttp.HTTPHeader, - paramsByHTTP map[idwrap.IDWrap][]mhttp.HTTPSearchParam, - bodyRawByHTTP map[idwrap.IDWrap]mhttp.HTTPBodyRaw, - bodyFormByHTTP map[idwrap.IDWrap][]mhttp.HTTPBodyForm, - bodyUrlByHTTP map[idwrap.IDWrap][]mhttp.HTTPBodyUrlencoded, - assertsByHTTP map[idwrap.IDWrap][]mhttp.HTTPAssert, + lk *httpLookup, ) error { children := childrenByParent[parentIDStr] sort.Slice(children, func(i, j int) bool { return children[i].Order < children[j].Order }) @@ -543,7 +544,7 @@ func writeFilesRecursive( if err := os.MkdirAll(subDir, 0o755); err != nil { return fmt.Errorf("create dir %q: %w", f.Name, err) } - if err := writeFilesRecursive(subDir, f.ID.String(), childrenByParent, httpByID, headersByHTTP, paramsByHTTP, bodyRawByHTTP, bodyFormByHTTP, bodyUrlByHTTP, assertsByHTTP); err != nil { + if err := writeFilesRecursive(subDir, f.ID.String(), childrenByParent, lk); err != nil { return err } @@ -551,12 +552,12 @@ func writeFilesRecursive( if f.ContentID == nil { continue } - httpReq, ok := httpByID[*f.ContentID] + httpReq, ok := lk.HTTP[*f.ContentID] if !ok { continue } - yamlReq := exportHTTPRequest(httpReq, f.Order, headersByHTTP, paramsByHTTP, bodyRawByHTTP, bodyFormByHTTP, bodyUrlByHTTP, assertsByHTTP) + yamlReq := exportHTTPRequest(httpReq, f.Order, lk) data, err := WriteSingleRequest(yamlReq) if err != nil { return fmt.Errorf("marshal request %q: %w", httpReq.Name, err) @@ -572,16 +573,7 @@ func writeFilesRecursive( return nil } -func exportHTTPRequest( - httpReq mhttp.HTTP, - order float64, - headersByHTTP map[idwrap.IDWrap][]mhttp.HTTPHeader, - paramsByHTTP map[idwrap.IDWrap][]mhttp.HTTPSearchParam, - bodyRawByHTTP map[idwrap.IDWrap]mhttp.HTTPBodyRaw, - bodyFormByHTTP map[idwrap.IDWrap][]mhttp.HTTPBodyForm, - bodyUrlByHTTP map[idwrap.IDWrap][]mhttp.HTTPBodyUrlencoded, - assertsByHTTP map[idwrap.IDWrap][]mhttp.HTTPAssert, -) yfs.YamlRequestDefV2 { +func exportHTTPRequest(httpReq mhttp.HTTP, order float64, lk *httpLookup) yfs.YamlRequestDefV2 { req := yfs.YamlRequestDefV2{ Name: httpReq.Name, Method: httpReq.Method, @@ -591,7 +583,7 @@ func exportHTTPRequest( } // Headers - headers := headersByHTTP[httpReq.ID] + headers := lk.Headers[httpReq.ID] if len(headers) > 0 { var pairs []yfs.YamlNameValuePairV2 for _, h := range headers { @@ -606,7 +598,7 @@ func exportHTTPRequest( } // Query params - params := paramsByHTTP[httpReq.ID] + params := lk.Params[httpReq.ID] if len(params) > 0 { var pairs []yfs.YamlNameValuePairV2 for _, p := range params { @@ -623,7 +615,7 @@ func exportHTTPRequest( // Body switch httpReq.BodyKind { case mhttp.HttpBodyKindFormData: - forms := bodyFormByHTTP[httpReq.ID] + forms := lk.BodyForm[httpReq.ID] if len(forms) > 0 { var pairs []yfs.YamlNameValuePairV2 for _, f := range forms { @@ -640,7 +632,7 @@ func exportHTTPRequest( } } case mhttp.HttpBodyKindUrlEncoded: - urls := bodyUrlByHTTP[httpReq.ID] + urls := lk.BodyURL[httpReq.ID] if len(urls) > 0 { var pairs []yfs.YamlNameValuePairV2 for _, u := range urls { @@ -657,7 +649,7 @@ func exportHTTPRequest( } } case mhttp.HttpBodyKindRaw: - if raw, ok := bodyRawByHTTP[httpReq.ID]; ok && len(raw.RawData) > 0 { + if raw, ok := lk.BodyRaw[httpReq.ID]; ok && len(raw.RawData) > 0 { req.Body = &yfs.YamlBodyUnion{ Type: "raw", Raw: string(raw.RawData), @@ -666,7 +658,7 @@ func exportHTTPRequest( } // Assertions - asserts := assertsByHTTP[httpReq.ID] + asserts := lk.Assertions[httpReq.ID] if len(asserts) > 0 { var yamlAsserts []yfs.YamlAssertionV2 for _, a := range asserts { @@ -699,6 +691,12 @@ func exportFlow(flow mflow.Flow, bundle *ioworkspace.WorkspaceBundle) yfs.YamlFl return yamlFlow } +// computePathHash returns a SHA-256 hash of the given path for deduplication. +func computePathHash(relPath string) string { + h := sha256.Sum256([]byte(relPath)) + return hex.EncodeToString(h[:]) +} + // atomicWrite writes data to a temp file then renames for safety. func atomicWrite(path string, data []byte) error { dir := filepath.Dir(path) diff --git a/packages/server/pkg/translate/topencollection/converter.go b/packages/server/pkg/translate/topencollection/converter.go index 8010837b..134112ef 100644 --- a/packages/server/pkg/translate/topencollection/converter.go +++ b/packages/server/pkg/translate/topencollection/converter.go @@ -1,6 +1,8 @@ package topencollection import ( + "crypto/sha256" + "encoding/hex" "fmt" "log/slog" "os" @@ -11,9 +13,10 @@ import ( "gopkg.in/yaml.v3" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" - "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/menv" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/ioworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" ) // ConvertOptions configures the OpenCollection import. @@ -22,26 +25,10 @@ type ConvertOptions struct { Logger *slog.Logger } -// OpenCollectionResolved contains all converted DevTools models. -type OpenCollectionResolved struct { - CollectionName string - - HTTPRequests []mhttp.HTTP - HTTPHeaders []mhttp.HTTPHeader - HTTPSearchParams []mhttp.HTTPSearchParam - HTTPBodyForms []mhttp.HTTPBodyForm - HTTPBodyUrlencoded []mhttp.HTTPBodyUrlencoded - HTTPBodyRaw []mhttp.HTTPBodyRaw - HTTPAsserts []mhttp.HTTPAssert - Files []mfile.File - Environments []menv.Env - EnvironmentVars []menv.Variable -} - // ConvertOpenCollection walks the given directory, parses each .yml file, and converts // to DevTools models. Only info.type == "http" requests are imported. // GraphQL, WebSocket, and gRPC types are skipped with a log warning. -func ConvertOpenCollection(collectionPath string, opts ConvertOptions) (*OpenCollectionResolved, error) { +func ConvertOpenCollection(collectionPath string, opts ConvertOptions) (*ioworkspace.WorkspaceBundle, error) { logger := opts.Logger if logger == nil { logger = slog.Default() @@ -59,14 +46,17 @@ func ConvertOpenCollection(collectionPath string, opts ConvertOptions) (*OpenCol return nil, fmt.Errorf("failed to parse opencollection.yml: %w", err) } - result := &OpenCollectionResolved{ - CollectionName: root.Info.Name, + bundle := &ioworkspace.WorkspaceBundle{ + Workspace: mworkspace.Workspace{ + ID: opts.WorkspaceID, + Name: root.Info.Name, + }, } now := time.Now().UnixMilli() // Walk directory tree recursively - if err := walkCollection(collectionPath, collectionPath, nil, opts.WorkspaceID, now, result, logger); err != nil { + if err := walkCollection(collectionPath, collectionPath, nil, opts.WorkspaceID, now, bundle, logger); err != nil { return nil, fmt.Errorf("failed to walk collection: %w", err) } @@ -96,12 +86,12 @@ func ConvertOpenCollection(collectionPath string, opts ConvertOptions) (*OpenCol } env, vars := convertEnvironment(ocEnv, opts.WorkspaceID) - result.Environments = append(result.Environments, env) - result.EnvironmentVars = append(result.EnvironmentVars, vars...) + bundle.Environments = append(bundle.Environments, env) + bundle.EnvironmentVars = append(bundle.EnvironmentVars, vars...) } } - return result, nil + return bundle, nil } // walkCollection recursively walks a directory in the collection, creating @@ -112,7 +102,7 @@ func walkCollection( parentID *idwrap.IDWrap, workspaceID idwrap.IDWrap, now int64, - result *OpenCollectionResolved, + bundle *ioworkspace.WorkspaceBundle, logger *slog.Logger, ) error { entries, err := os.ReadDir(dirPath) @@ -144,13 +134,6 @@ func walkCollection( } } - // Try to read folder.yml for folder metadata - folderSeqMap := make(map[string]int) - folderYMLPath := filepath.Join(dirPath, "folder.yml") - if _, err := os.Stat(folderYMLPath); err == nil { - // folder.yml exists but we don't need it for folder name — directory name is used - } - // Process request files first order := float64(1) for _, fileEntry := range files { @@ -190,17 +173,17 @@ func walkCollection( if ocReq.Info.Seq > 0 { fileOrder = float64(ocReq.Info.Seq) } - if seq, ok := folderSeqMap[fileEntry.Name()]; ok { - fileOrder = float64(seq) - } - convertRequest(ocReq, workspaceID, parentID, fileOrder, now, result) + relPath, _ := filepath.Rel(rootPath, filePath) + convertRequest(ocReq, workspaceID, parentID, fileOrder, now, relPath, bundle) order++ } // Process subdirectories for _, dirEntry := range dirs { subDirPath := filepath.Join(dirPath, dirEntry.Name()) + relPath, _ := filepath.Rel(rootPath, subDirPath) + pathHash := computePathHash(relPath) // Create a folder file entry folderID := idwrap.NewNow() @@ -213,12 +196,13 @@ func walkCollection( ContentType: mfile.ContentTypeFolder, Name: dirEntry.Name(), Order: order, + PathHash: &pathHash, UpdatedAt: time.UnixMilli(now), } - result.Files = append(result.Files, folderFile) + bundle.Files = append(bundle.Files, folderFile) // Recurse into subdirectory - if err := walkCollection(rootPath, subDirPath, &folderID, workspaceID, now, result, logger); err != nil { + if err := walkCollection(rootPath, subDirPath, &folderID, workspaceID, now, bundle, logger); err != nil { return err } order++ @@ -234,7 +218,8 @@ func convertRequest( parentID *idwrap.IDWrap, order float64, now int64, - result *OpenCollectionResolved, + relPath string, + bundle *ioworkspace.WorkspaceBundle, ) { httpID := idwrap.NewNow() @@ -266,11 +251,12 @@ func convertRequest( CreatedAt: now, UpdatedAt: now, } - result.HTTPRequests = append(result.HTTPRequests, httpReq) + bundle.HTTPRequests = append(bundle.HTTPRequests, httpReq) // Create file entry for this request contentID := httpID fileID := idwrap.NewNow() + pathHash := computePathHash(relPath) file := mfile.File{ ID: fileID, WorkspaceID: workspaceID, @@ -279,9 +265,10 @@ func convertRequest( ContentType: mfile.ContentTypeHTTP, Name: ocReq.Info.Name, Order: order, + PathHash: &pathHash, UpdatedAt: time.UnixMilli(now), } - result.Files = append(result.Files, file) + bundle.Files = append(bundle.Files, file) if ocReq.HTTP == nil { return @@ -289,7 +276,7 @@ func convertRequest( // Convert headers for i, h := range ocReq.HTTP.Headers { - result.HTTPHeaders = append(result.HTTPHeaders, mhttp.HTTPHeader{ + bundle.HTTPHeaders = append(bundle.HTTPHeaders, mhttp.HTTPHeader{ ID: idwrap.NewNow(), HttpID: httpID, Key: h.Name, @@ -304,7 +291,7 @@ func convertRequest( // Convert params for i, p := range ocReq.HTTP.Params { if strings.ToLower(p.Type) == "query" || p.Type == "" { - result.HTTPSearchParams = append(result.HTTPSearchParams, mhttp.HTTPSearchParam{ + bundle.HTTPSearchParams = append(bundle.HTTPSearchParams, mhttp.HTTPSearchParam{ ID: idwrap.NewNow(), HttpID: httpID, Key: p.Name, @@ -320,18 +307,18 @@ func convertRequest( // Convert auth → headers/params authHeaders, authParams := convertAuth(ocReq.HTTP.Auth, httpID) - result.HTTPHeaders = append(result.HTTPHeaders, authHeaders...) - result.HTTPSearchParams = append(result.HTTPSearchParams, authParams...) + bundle.HTTPHeaders = append(bundle.HTTPHeaders, authHeaders...) + bundle.HTTPSearchParams = append(bundle.HTTPSearchParams, authParams...) // Convert body _, bodyRaw, bodyForms, bodyUrlencoded := convertBody(ocReq.HTTP.Body, httpID) if bodyRaw != nil { bodyRaw.CreatedAt = now bodyRaw.UpdatedAt = now - result.HTTPBodyRaw = append(result.HTTPBodyRaw, *bodyRaw) + bundle.HTTPBodyRaw = append(bundle.HTTPBodyRaw, *bodyRaw) } - result.HTTPBodyForms = append(result.HTTPBodyForms, bodyForms...) - result.HTTPBodyUrlencoded = append(result.HTTPBodyUrlencoded, bodyUrlencoded...) + bundle.HTTPBodyForms = append(bundle.HTTPBodyForms, bodyForms...) + bundle.HTTPBodyUrlencoded = append(bundle.HTTPBodyUrlencoded, bodyUrlencoded...) // Convert assertions if ocReq.Runtime != nil { @@ -340,7 +327,7 @@ func convertRequest( if a.Operator != "" { expr = fmt.Sprintf("%s %s %s", a.Expression, a.Operator, a.Value) } - result.HTTPAsserts = append(result.HTTPAsserts, mhttp.HTTPAssert{ + bundle.HTTPAsserts = append(bundle.HTTPAsserts, mhttp.HTTPAssert{ ID: idwrap.NewNow(), HttpID: httpID, Value: strings.TrimSpace(expr), @@ -350,14 +337,15 @@ func convertRequest( UpdatedAt: now, }) } - - // Log warning for scripts (not imported) - if len(ocReq.Runtime.Scripts) > 0 { - // Scripts are intentionally not imported — DevTools uses JS flow nodes instead - } } } +// computePathHash returns a SHA-256 hash of the given path for deduplication. +func computePathHash(relPath string) string { + h := sha256.Sum256([]byte(relPath)) + return hex.EncodeToString(h[:]) +} + // isYAMLFile checks if a filename has a YAML extension. func isYAMLFile(name string) bool { ext := strings.ToLower(filepath.Ext(name)) diff --git a/packages/server/pkg/translate/topencollection/converter_test.go b/packages/server/pkg/translate/topencollection/converter_test.go index 4f258b68..d217e802 100644 --- a/packages/server/pkg/translate/topencollection/converter_test.go +++ b/packages/server/pkg/translate/topencollection/converter_test.go @@ -27,8 +27,8 @@ func TestConvertOpenCollection_BasicCollection(t *testing.T) { } // Verify collection name - if result.CollectionName != "Test API Collection" { - t.Errorf("expected collection name 'Test API Collection', got %q", result.CollectionName) + if result.Workspace.Name != "Test API Collection" { + t.Errorf("expected collection name 'Test API Collection', got %q", result.Workspace.Name) } // Verify HTTP requests: Get Users, Create User, Login = 3 From c7904a6d273f71e5ddbc032cfa3871bcf147bdfd Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 19:46:10 +0000 Subject: [PATCH 13/20] feat: add sync fields to workspace TypeSpec and RPC handler - TypeSpec: add syncPath?, syncFormat?, syncEnabled to Workspace model (auto-generates Insert/Update/Sync proto types via TanStackDB emitter) - RPC: wire sync fields through toAPIWorkspace, WorkspaceInsert, WorkspaceUpdate, and WorkspaceSync event broadcasting - Requires `pnpm nx run spec:build` to regenerate proto types before the RPC handler will compile https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../internal/api/rworkspace/rworkspace.go | 39 ++++++++++++++++--- packages/spec/api/workspace.tsp | 3 ++ 2 files changed, 36 insertions(+), 6 deletions(-) diff --git a/packages/server/internal/api/rworkspace/rworkspace.go b/packages/server/internal/api/rworkspace/rworkspace.go index 884eb7d2..f2878df8 100644 --- a/packages/server/internal/api/rworkspace/rworkspace.go +++ b/packages/server/internal/api/rworkspace/rworkspace.go @@ -156,6 +156,8 @@ func stringPtr(s string) *string { return &s } func float32Ptr(f float32) *float32 { return &f } +func boolPtr(b bool) *bool { return &b } + func workspaceUpdatedUnion(ts *timestamppb.Timestamp) *apiv1.WorkspaceSyncUpdate_UpdatedUnion { if ts == nil { return nil @@ -172,10 +174,17 @@ func toAPIWorkspace(ws mworkspace.Workspace) *apiv1.Workspace { SelectedEnvironmentId: ws.ActiveEnv.Bytes(), Name: ws.Name, Order: float32(ws.Order), + SyncEnabled: ws.SyncEnabled, } if !ws.Updated.IsZero() { apiWorkspace.Updated = timestamppb.New(ws.Updated) } + if ws.SyncPath != nil { + apiWorkspace.SyncPath = ws.SyncPath + } + if ws.SyncFormat != nil { + apiWorkspace.SyncFormat = ws.SyncFormat + } return apiWorkspace } @@ -195,6 +204,9 @@ func workspaceSyncResponseFrom(evt WorkspaceEvent) *apiv1.WorkspaceSyncResponse Name: evt.Workspace.Name, Updated: evt.Workspace.Updated, Order: evt.Workspace.Order, + SyncPath: evt.Workspace.SyncPath, + SyncFormat: evt.Workspace.SyncFormat, + SyncEnabled: evt.Workspace.SyncEnabled, }, }, } @@ -205,6 +217,9 @@ func workspaceSyncResponseFrom(evt WorkspaceEvent) *apiv1.WorkspaceSyncResponse Name: stringPtr(evt.Workspace.Name), Order: float32Ptr(evt.Workspace.Order), Updated: workspaceUpdatedUnion(evt.Workspace.Updated), + SyncPath: evt.Workspace.SyncPath, + SyncFormat: evt.Workspace.SyncFormat, + SyncEnabled: boolPtr(evt.Workspace.SyncEnabled), } if len(evt.Workspace.SelectedEnvironmentId) > 0 { update.SelectedEnvironmentId = evt.Workspace.SelectedEnvironmentId @@ -307,12 +322,15 @@ func (c *WorkspaceServiceRPC) WorkspaceInsert(ctx context.Context, req *connect. } ws := &mworkspace.Workspace{ - ID: workspaceID, - Name: name, - Updated: dbtime.DBNow(), - ActiveEnv: envID, - GlobalEnv: envID, - Order: float64(item.Order), + ID: workspaceID, + Name: name, + Updated: dbtime.DBNow(), + ActiveEnv: envID, + GlobalEnv: envID, + Order: float64(item.Order), + SyncPath: item.SyncPath, + SyncFormat: item.SyncFormat, + SyncEnabled: item.SyncEnabled, } if err := wsWriter.Create(ctx, ws); err != nil { @@ -460,6 +478,15 @@ func (c *WorkspaceServiceRPC) WorkspaceUpdate(ctx context.Context, req *connect. if item.Order != nil { ws.Order = float64(*item.Order) } + if item.SyncPath != nil { + ws.SyncPath = item.SyncPath + } + if item.SyncFormat != nil { + ws.SyncFormat = item.SyncFormat + } + if item.SyncEnabled != nil { + ws.SyncEnabled = *item.SyncEnabled + } break } diff --git a/packages/spec/api/workspace.tsp b/packages/spec/api/workspace.tsp index 8a0e06a3..87aad4df 100644 --- a/packages/spec/api/workspace.tsp +++ b/packages/spec/api/workspace.tsp @@ -9,4 +9,7 @@ model Workspace { name: string; @visibility(Lifecycle.Read) updated?: Protobuf.WellKnown.Timestamp; order: float32; + syncPath?: string; + syncFormat?: string; + syncEnabled: boolean; } From bcb531f958993c7d974ae3726ef0aba001a1a3aa Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 19:54:27 +0000 Subject: [PATCH 14/20] =?UTF-8?q?feat:=20add=20folder=20sync=20UI=20?= =?UTF-8?q?=E2=80=94=20Import=20from=20Folder=20button=20+=20sync=20settin?= =?UTF-8?q?gs=20dialog?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Workspace list page: - "Import from Folder" button creates a workspace with sync preconfigured (folder path + format selection: OpenYAML or Bruno) - "Folder Sync..." context menu item on each workspace opens sync settings (enable/disable sync, change path/format) New widget: packages/client/src/widgets/folder-sync/ - useImportFolderDialog() — creates workspace from local folder - useFolderSyncDialog() — configures sync on existing workspace - Uses Electron native folder picker (window.electron.dialog) - Follows existing useProgrammaticModal() dialog pattern Requires `pnpm nx run spec:build` first to generate TypeScript types with syncPath, syncFormat, syncEnabled fields. https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../src/pages/dashboard/routes/index.tsx | 18 ++ .../client/src/widgets/folder-sync/index.tsx | 306 ++++++++++++++++++ 2 files changed, 324 insertions(+) create mode 100644 packages/client/src/widgets/folder-sync/index.tsx diff --git a/packages/client/src/pages/dashboard/routes/index.tsx b/packages/client/src/pages/dashboard/routes/index.tsx index ab72e462..e68da30f 100644 --- a/packages/client/src/pages/dashboard/routes/index.tsx +++ b/packages/client/src/pages/dashboard/routes/index.tsx @@ -26,6 +26,7 @@ import { useApiCollection } from '~/shared/api'; import { getNextOrder, handleCollectionReorder, pick } from '~/shared/lib'; import { routes } from '~/shared/routes'; import { DashboardLayout } from '~/shared/ui'; +import { useFolderSyncDialog, useImportFolderDialog } from '~/widgets/folder-sync'; export const Route = createFileRoute('/(dashboard)/')({ component: RouteComponent, @@ -65,8 +66,12 @@ export const WorkspaceListPage = () => { renderDropIndicator: () => , }); + const importFolderDialog = useImportFolderDialog(); + return (
+ {importFolderDialog.render} +
{pipe(DateTime.unsafeNow(), DateTime.formatLocal({ dateStyle: 'full' }))} @@ -77,6 +82,9 @@ export const WorkspaceListPage = () => {
Your Workspaces + +
+ + setFormat(v as SyncFormat)} + value={format} + > + +
+ +
+ OpenYAML + + +
+ Bruno + +
+ +
+ +
+ {currentEnabled && ( + + )} +
+ + +
+ + )} + + ); +}; + +// --- Import from Folder Dialog (creates new workspace) --- + +export const useImportFolderDialog = () => { + const modal = useProgrammaticModal(); + + const open = (): void => + void modal.onOpenChange(true, ); + + const render: ReactNode = modal.children && ( + + ); + + return { open, render }; +}; + +const ImportFolderDialogContent = () => { + const workspaceCollection = useApiCollection(WorkspaceCollectionSchema); + + const [folderPath, setFolderPath] = useState(''); + const [workspaceName, setWorkspaceName] = useState(''); + const [format, setFormat] = useState('openyaml'); + const [isPending, startTransition] = useTransition(); + + const browseFolder = async () => { + if (!window.electron?.dialog) return; + const result = await window.electron.dialog('showOpenDialog', { + properties: ['openDirectory'], + title: 'Select collection folder', + }); + if (!result.canceled && result.filePaths[0]) { + const path = result.filePaths[0]; + setFolderPath(path); + // Auto-fill name from folder name if empty + if (!workspaceName) { + const folderName = path.split('/').pop() ?? path.split('\\').pop() ?? ''; + setWorkspaceName(folderName); + } + } + }; + + const importFolder = () => + startTransition(async () => { + const name = workspaceName || folderPath.split('/').pop() || 'Imported Workspace'; + await workspaceCollection.utils.insert({ + name, + order: await getNextOrder(workspaceCollection), + workspaceId: Ulid.generate().bytes, + syncPath: folderPath, + syncFormat: format, + syncEnabled: true, + }); + }); + + return ( + + {({ close }) => ( + <> + + Import from Folder + + +
+ Create a workspace synced to a local folder. Changes in the folder will automatically appear in DevTools. +
+ +
+
+ + +
+ + + + setFormat(v as SyncFormat)} + value={format} + > + +
+ +
+ OpenYAML + + +
+ Bruno + +
+ +
+ +
+ + +
+ + )} +
+ ); +}; From ba3afe800e2010ac48fa505cb5d8a7ea1e3c12e1 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 20:15:10 +0000 Subject: [PATCH 15/20] fix: handle union types for sync fields in workspace RPC handler The TypeSpec emitter generates SyncPath/SyncFormat as union types (SyncPathUnion/SyncFormatUnion) on update messages, not plain *string. Fix WorkspaceUpdate to extract values from unions, and fix sync event publishing to wrap *string values in the corresponding union types. Verified: rworkspace package compiles, all 19 tests pass. https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../internal/api/rworkspace/rworkspace.go | 36 ++++++++++++++++--- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/packages/server/internal/api/rworkspace/rworkspace.go b/packages/server/internal/api/rworkspace/rworkspace.go index f2878df8..7554ca14 100644 --- a/packages/server/internal/api/rworkspace/rworkspace.go +++ b/packages/server/internal/api/rworkspace/rworkspace.go @@ -168,6 +168,26 @@ func workspaceUpdatedUnion(ts *timestamppb.Timestamp) *apiv1.WorkspaceSyncUpdate } } +func syncPathSyncUnion(s *string) *apiv1.WorkspaceSyncUpdate_SyncPathUnion { + if s == nil { + return nil + } + return &apiv1.WorkspaceSyncUpdate_SyncPathUnion{ + Kind: apiv1.WorkspaceSyncUpdate_SyncPathUnion_KIND_VALUE, + Value: s, + } +} + +func syncFormatSyncUnion(s *string) *apiv1.WorkspaceSyncUpdate_SyncFormatUnion { + if s == nil { + return nil + } + return &apiv1.WorkspaceSyncUpdate_SyncFormatUnion{ + Kind: apiv1.WorkspaceSyncUpdate_SyncFormatUnion_KIND_VALUE, + Value: s, + } +} + func toAPIWorkspace(ws mworkspace.Workspace) *apiv1.Workspace { apiWorkspace := &apiv1.Workspace{ WorkspaceId: ws.ID.Bytes(), @@ -217,8 +237,8 @@ func workspaceSyncResponseFrom(evt WorkspaceEvent) *apiv1.WorkspaceSyncResponse Name: stringPtr(evt.Workspace.Name), Order: float32Ptr(evt.Workspace.Order), Updated: workspaceUpdatedUnion(evt.Workspace.Updated), - SyncPath: evt.Workspace.SyncPath, - SyncFormat: evt.Workspace.SyncFormat, + SyncPath: syncPathSyncUnion(evt.Workspace.SyncPath), + SyncFormat: syncFormatSyncUnion(evt.Workspace.SyncFormat), SyncEnabled: boolPtr(evt.Workspace.SyncEnabled), } if len(evt.Workspace.SelectedEnvironmentId) > 0 { @@ -479,10 +499,18 @@ func (c *WorkspaceServiceRPC) WorkspaceUpdate(ctx context.Context, req *connect. ws.Order = float64(*item.Order) } if item.SyncPath != nil { - ws.SyncPath = item.SyncPath + if item.SyncPath.Kind == apiv1.WorkspaceUpdate_SyncPathUnion_KIND_VALUE { + ws.SyncPath = item.SyncPath.Value + } else if item.SyncPath.Kind == apiv1.WorkspaceUpdate_SyncPathUnion_KIND_UNSET { + ws.SyncPath = nil + } } if item.SyncFormat != nil { - ws.SyncFormat = item.SyncFormat + if item.SyncFormat.Kind == apiv1.WorkspaceUpdate_SyncFormatUnion_KIND_VALUE { + ws.SyncFormat = item.SyncFormat.Value + } else if item.SyncFormat.Kind == apiv1.WorkspaceUpdate_SyncFormatUnion_KIND_UNSET { + ws.SyncFormat = nil + } } if item.SyncEnabled != nil { ws.SyncEnabled = *item.SyncEnabled From 982b8446488bb82812d10ebedf8239a067b8bf91 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 20:29:45 +0000 Subject: [PATCH 16/20] fix: correct modernc.org/sqlite checksum in go.work.sum The go.work.sum had a mismatched checksum for modernc.org/sqlite@v1.43.0 from a previous GONOSUMCHECK download. Align it with the verified checksum in all module go.sum files. https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- go.work.sum | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/go.work.sum b/go.work.sum index 45c7f837..746f0e55 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1514,14 +1514,14 @@ modernc.org/lex v1.1.1 h1:prSCNTLw1R4rn7M/RzwsuMtAuOytfyR3cnyM07P+Pas= modernc.org/lex v1.1.1/go.mod h1:6r8o8DLJkAnOsQaGi8fMoi+Vt6LTbDaCrkUK729D8xM= modernc.org/lexer v1.0.4 h1:hU7xVbZsqwPphyzChc7nMSGrsuaD2PDNOmzrzkS5AlE= modernc.org/lexer v1.0.4/go.mod h1:tOajb8S4sdfOYitzCgXDFmbVJ/LE0v1fNJ7annTw36U= -modernc.org/libc v1.67.4 h1:Yx/6qRlojCVP+kDBZAHgBlALwsVuV9H2oe2McLZ1jkI= +modernc.org/libc v1.67.4 h1:zZGmCMUVPORtKv95c2ReQN5VDjvkoRm9GWPTEPuvlWg= modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU= modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI= modernc.org/parser v1.1.0 h1:XoClYpoz2xHEDIteSQ7tICOTFcNwBI7XRCeghUS6SNI= modernc.org/parser v1.1.0/go.mod h1:CXl3OTJRZij8FeMpzI3Id/bjupHf0u9HSrCUP4Z9pbA= modernc.org/scannertest v1.0.2 h1:JPtfxcVdbRvzmRf2YUvsDibJsQRw8vKA/3jb31y7cy0= modernc.org/scannertest v1.0.2/go.mod h1:RzTm5RwglF/6shsKoEivo8N91nQIoWtcWI7ns+zPyGA= -modernc.org/sqlite v1.43.0 h1:m9ptJKtfxloX/f/snVgxgcH6H66r3ScyJIEHEUesEC8= +modernc.org/sqlite v1.43.0 h1:8YqiFx3G1VhHTXO2Q00bl1Wz9KhS9Q5okwfp9Y97VnA= modernc.org/y v1.1.0 h1:JdIvLry+rKeSsVNRCdr6YWYimwwNm0GXtzxid77VfWc= modernc.org/y v1.1.0/go.mod h1:Iz3BmyIS4OwAbwGaUS7cqRrLsSsfp2sFWtpzX+P4CsE= nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= From 1823198dfcc6dc210c6df17d6ec7a3be69450d25 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 20:45:54 +0000 Subject: [PATCH 17/20] style: run prettier on all new/modified files Fix formatting for TSX components, markdown docs, and YAML test fixtures to pass lint:format CI check. https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../src/pages/dashboard/routes/index.tsx | 6 +- .../client/src/widgets/folder-sync/index.tsx | 53 ++---- .../server/docs/specs/BRUNO_FOLDER_SYNC.md | 177 ++++++++++-------- .../testdata/collection/environments/dev.yaml | 4 +- .../testdata/collection/flows/smoke-test.yaml | 4 +- .../collection/users/create-user.yaml | 4 +- .../testdata/collection/users/get-users.yaml | 8 +- .../testdata/basic-collection/auth/login.yml | 4 +- .../basic-collection/environments/dev.yml | 4 +- .../basic-collection/environments/prod.yml | 4 +- .../basic-collection/opencollection.yml | 8 +- .../basic-collection/users/create-user.yml | 10 +- .../basic-collection/users/get-users.yml | 10 +- 13 files changed, 138 insertions(+), 158 deletions(-) diff --git a/packages/client/src/pages/dashboard/routes/index.tsx b/packages/client/src/pages/dashboard/routes/index.tsx index e68da30f..4169ec93 100644 --- a/packages/client/src/pages/dashboard/routes/index.tsx +++ b/packages/client/src/pages/dashboard/routes/index.tsx @@ -231,11 +231,7 @@ const Item = ({ containerRef, id }: ItemProps) => { void edit()}>Rename - - void folderSyncDialog.open({ workspaceId: workspaceUlid.bytes }) - } - > + void folderSyncDialog.open({ workspaceId: workspaceUlid.bytes })}> Folder Sync... diff --git a/packages/client/src/widgets/folder-sync/index.tsx b/packages/client/src/widgets/folder-sync/index.tsx index a23b7ebe..d63fc911 100644 --- a/packages/client/src/widgets/folder-sync/index.tsx +++ b/packages/client/src/widgets/folder-sync/index.tsx @@ -27,9 +27,7 @@ export const useFolderSyncDialog = () => { const open = (props: FolderSyncDialogProps): void => void modal.onOpenChange(true, ); - const render: ReactNode = modal.children && ( - - ); + const render: ReactNode = modal.children && ; return { open, render }; }; @@ -79,10 +77,7 @@ const FolderSyncDialogContent = ({ {({ close }) => ( <> - + Folder Sync @@ -102,26 +97,16 @@ const FolderSyncDialogContent = ({
- setFormat(v as SyncFormat)} - value={format} - > + setFormat(v as SyncFormat)} value={format}>
- +
OpenYAML - +
@@ -169,12 +154,9 @@ const FolderSyncDialogContent = ({ export const useImportFolderDialog = () => { const modal = useProgrammaticModal(); - const open = (): void => - void modal.onOpenChange(true, ); + const open = (): void => void modal.onOpenChange(true, ); - const render: ReactNode = modal.children && ( - - ); + const render: ReactNode = modal.children && ; return { open, render }; }; @@ -221,10 +203,7 @@ const ImportFolderDialogContent = () => { {({ close }) => ( <> - + Import from Folder @@ -256,26 +235,16 @@ const ImportFolderDialogContent = () => { value={workspaceName} /> - setFormat(v as SyncFormat)} - value={format} - > + setFormat(v as SyncFormat)} value={format}>
- +
OpenYAML - +
diff --git a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md index 03e8189d..0077b566 100644 --- a/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md +++ b/packages/server/docs/specs/BRUNO_FOLDER_SYNC.md @@ -76,26 +76,28 @@ Go Server The OpenYAML folder is the **canonical data store**. SQLite is a **runtime cache** that can be fully rebuilt from the folder at any time. This is the same model Bruno uses — their Redux store is just a runtime view of what's on disk. **Why the folder must be the source of truth:** + - `git pull` brings new changes → folder has the latest data → SQLite must update to match - Teammate edits a request in their editor → saves → watcher picks it up → SQLite updates → UI reflects - Delete `state.db` → server starts → reads folder → SQLite rebuilt → nothing lost - The folder is what gets committed, pushed, reviewed in PRs, and shared across machines **Why SQLite is still valuable as a cache:** + - Fast indexed queries (no YAML parsing on every read) - Transactions for atomic multi-entity operations - Existing services, RPC handlers, and runner all work with SQLite - Real-time eventstream already wired to SQLite changes - Supports non-synced workspaces (Mode 1) that live only in SQLite -| Direction | Trigger | Behavior | -|-----------|---------|----------| +| Direction | Trigger | Behavior | +| ------------------- | ---------------------------------------------------- | ---------------------------------------------------------- | | **Folder → SQLite** | File watcher detects change, or git pull, or startup | Parse YAML → upsert into SQLite → eventstream → UI updates | -| **SQLite → Folder** | UI edit via RPC handler | Write to SQLite → serialize to YAML → write to disk | -| **Startup** | Server starts with synced workspace | Read entire folder → populate/reconcile SQLite | -| **Git pull** | Watcher detects batch changes | Re-parse changed files → update SQLite → UI refreshes | -| **Conflict** | File changed on disk while UI was editing | **Folder wins** — disk state overwrites SQLite | -| **Rebuild** | `state.db` deleted or corrupted | Full re-read from folder → SQLite rebuilt from scratch | +| **SQLite → Folder** | UI edit via RPC handler | Write to SQLite → serialize to YAML → write to disk | +| **Startup** | Server starts with synced workspace | Read entire folder → populate/reconcile SQLite | +| **Git pull** | Watcher detects batch changes | Re-parse changed files → update SQLite → UI refreshes | +| **Conflict** | File changed on disk while UI was editing | **Folder wins** — disk state overwrites SQLite | +| **Rebuild** | `state.db` deleted or corrupted | Full re-read from folder → SQLite rebuilt from scratch | ### Reconciliation on Startup @@ -167,6 +169,7 @@ SyncPath = "/Users/dev/my-api-collection", SyncFormat = "open_yaml", SyncEnabled **Two sub-scenarios:** **A) Export to new folder (existing workspace → empty folder):** + 1. User has an existing workspace in DevTools (data in SQLite) 2. User clicks "Sync to Folder" → picks/creates an empty directory 3. Server sets `sync_path` on the workspace @@ -175,6 +178,7 @@ SyncPath = "/Users/dev/my-api-collection", SyncFormat = "open_yaml", SyncEnabled 6. User can `git init && git add . && git commit` to start versioning **B) Open existing folder (OpenYAML folder → new workspace):** + 1. User clicks "Open Folder" → picks a directory containing `.yaml` request/flow files 2. Server creates a new workspace with `sync_path` set 3. SyncCoordinator starts → reads entire folder → populates SQLite cache @@ -189,6 +193,7 @@ SyncPath = "/Users/dev/my-api-devtools/", SyncFormat = "open_yaml", SyncEnabled ``` **Flow:** + 1. User clicks "Import Bruno Collection" → picks directory with `opencollection.yml` 2. Server parses the OpenCollection YAML directory 3. Server creates a new workspace and populates SQLite with the converted data @@ -198,6 +203,7 @@ SyncPath = "/Users/dev/my-api-devtools/", SyncFormat = "open_yaml", SyncEnabled 7. Original Bruno folder is NOT modified **Why a separate folder?** The Bruno folder uses OpenCollection YAML format (different schema). We don't want to: + - Corrupt the Bruno collection - Mix two different YAML formats in one folder - Create confusion about which tool owns the folder @@ -216,12 +222,12 @@ OpenCollection .yml directory → SyncCoordinator exports to OpenYAML folder ``` -| Layer | Location | Pattern | -|-------|----------|---------| -| CLI Command | `apps/cli/cmd/import.go` | Add `importBrunoCmd` | -| RPC Endpoint | `packages/server/internal/api/` | "Import Bruno Collection" | -| Translator | `packages/server/pkg/translate/topencollection/` | New package | -| Importer | `apps/cli/internal/importer/` | Existing `RunImport()` callback | +| Layer | Location | Pattern | +| ------------ | ------------------------------------------------ | ------------------------------- | +| CLI Command | `apps/cli/cmd/import.go` | Add `importBrunoCmd` | +| RPC Endpoint | `packages/server/internal/api/` | "Import Bruno Collection" | +| Translator | `packages/server/pkg/translate/topencollection/` | New package | +| Importer | `apps/cli/internal/importer/` | Existing `RunImport()` callback | ### 1.2 OpenCollection YAML Format Reference @@ -244,14 +250,14 @@ my-bruno-collection/ #### Collection Root (`opencollection.yml`) ```yaml -opencollection: "1.0.0" +opencollection: '1.0.0' info: - name: "My API Collection" - summary: "A collection for testing our REST API" - version: "2.1.0" + name: 'My API Collection' + summary: 'A collection for testing our REST API' + version: '2.1.0' authors: - - name: "Jane Doe" - email: "[email protected]" + - name: 'Jane Doe' + email: '[email protected]' ``` #### Request File Structure @@ -270,14 +276,14 @@ http: - name: Content-Type value: application/json - name: Authorization - value: "Bearer {{token}}" + value: 'Bearer {{token}}' disabled: true params: - name: filter value: active type: query - name: id - value: "123" + value: '123' type: path body: type: json @@ -288,13 +294,13 @@ http: } auth: type: bearer - token: "{{token}}" + token: '{{token}}' runtime: assertions: - expression: res.status operator: eq - value: "201" + value: '201' settings: encodeUrl: true @@ -437,28 +443,28 @@ func ConvertOpenCollection(collectionPath string, opts ConvertOptions) (*OpenCol #### Mapping Table: OpenCollection → DevTools -| OpenCollection YAML | DevTools Model | Notes | -|---|---|---| -| `info.name` | `mhttp.HTTP.Name` | | -| `info.seq` | `mfile.File.Order` | Float64 ordering | -| `http.method` | `mhttp.HTTP.Method` | Uppercase | -| `http.url` | `mhttp.HTTP.Url` | | -| `http.headers` | `[]mhttp.HTTPHeader` | `disabled` → `Enabled: false` | -| `http.params` (query) | `[]mhttp.HTTPSearchParam` | | -| `http.body.type: json/xml/text` | `mhttp.HTTPBodyRaw` | `BodyKind: Raw` | -| `http.body.type: form-urlencoded` | `[]mhttp.HTTPBodyUrlencoded` | | -| `http.body.type: multipart-form` | `[]mhttp.HTTPBodyForm` | | -| `http.auth.type: bearer` | `mhttp.HTTPHeader` | → `Authorization: Bearer ` | -| `http.auth.type: basic` | `mhttp.HTTPHeader` | → `Authorization: Basic ` | -| `http.auth.type: apikey` | Header or SearchParam | Based on `placement` | -| `runtime.assertions` | `[]mhttp.HTTPAssert` | `expr operator value` format | -| `info.type: graphql` | **Skipped** (log warning) | Not supported yet | -| `info.type: ws` | **Skipped** (log warning) | WebSocket not supported yet | -| `info.type: grpc` | **Skipped** (log warning) | gRPC not supported yet | -| `runtime.scripts` | Not imported (log warning) | DevTools uses JS flow nodes | -| `docs` | `mhttp.HTTP.Description` | | -| Directory structure | `mfile.File` hierarchy | Nesting preserved | -| `environments/*.yml` | `menv.Env` + `menv.Variable` | | +| OpenCollection YAML | DevTools Model | Notes | +| --------------------------------- | ---------------------------- | --------------------------------- | +| `info.name` | `mhttp.HTTP.Name` | | +| `info.seq` | `mfile.File.Order` | Float64 ordering | +| `http.method` | `mhttp.HTTP.Method` | Uppercase | +| `http.url` | `mhttp.HTTP.Url` | | +| `http.headers` | `[]mhttp.HTTPHeader` | `disabled` → `Enabled: false` | +| `http.params` (query) | `[]mhttp.HTTPSearchParam` | | +| `http.body.type: json/xml/text` | `mhttp.HTTPBodyRaw` | `BodyKind: Raw` | +| `http.body.type: form-urlencoded` | `[]mhttp.HTTPBodyUrlencoded` | | +| `http.body.type: multipart-form` | `[]mhttp.HTTPBodyForm` | | +| `http.auth.type: bearer` | `mhttp.HTTPHeader` | → `Authorization: Bearer ` | +| `http.auth.type: basic` | `mhttp.HTTPHeader` | → `Authorization: Basic ` | +| `http.auth.type: apikey` | Header or SearchParam | Based on `placement` | +| `runtime.assertions` | `[]mhttp.HTTPAssert` | `expr operator value` format | +| `info.type: graphql` | **Skipped** (log warning) | Not supported yet | +| `info.type: ws` | **Skipped** (log warning) | WebSocket not supported yet | +| `info.type: grpc` | **Skipped** (log warning) | gRPC not supported yet | +| `runtime.scripts` | Not imported (log warning) | DevTools uses JS flow nodes | +| `docs` | `mhttp.HTTP.Description` | | +| Directory structure | `mfile.File` hierarchy | Nesting preserved | +| `environments/*.yml` | `menv.Env` + `menv.Variable` | | ### 1.5 Package Structure @@ -525,31 +531,31 @@ Request files are `YamlRequestDefV2` (with `Order` field added to the struct): ```yaml name: Get Users method: GET -url: "{{base_url}}/users" -description: "Fetch all users with optional pagination" +url: '{{base_url}}/users' +description: 'Fetch all users with optional pagination' order: 1 headers: - name: Authorization - value: "Bearer {{token}}" + value: 'Bearer {{token}}' - name: Accept value: application/json - name: X-Debug - value: "true" + value: 'true' enabled: false query_params: - name: page - value: "1" + value: '1' - name: limit - value: "10" + value: '10' enabled: false ``` ```yaml name: Create User method: POST -url: "{{base_url}}/users" +url: '{{base_url}}/users' order: 2 headers: @@ -565,24 +571,24 @@ body: } assertions: - - "res.status eq 201" - - "res.body.id neq null" + - 'res.status eq 201' + - 'res.body.id neq null' ``` ```yaml name: Upload File method: POST -url: "{{base_url}}/upload" +url: '{{base_url}}/upload' order: 3 body: type: form_data form_data: - name: file - value: "@./fixtures/test.png" - description: "File to upload" + value: '@./fixtures/test.png' + description: 'File to upload' - name: description - value: "Test upload" + value: 'Test upload' ``` ### 2.4 Flow File Format @@ -594,13 +600,13 @@ name: Smoke Test variables: - name: auth_token type: string - default: "" + default: '' steps: - request: name: Login method: POST - url: "{{base_url}}/auth/login" + url: '{{base_url}}/auth/login' body: type: raw raw: '{"email": "test@example.com", "password": "test"}' @@ -609,9 +615,9 @@ steps: name: Get Profile depends_on: [Login] method: GET - url: "{{base_url}}/users/me" + url: '{{base_url}}/users/me' headers: - Authorization: "Bearer {{Login.response.body.token}}" + Authorization: 'Bearer {{Login.response.body.token}}' - js: name: Validate Response @@ -627,8 +633,8 @@ Environment files use `YamlEnvironmentV2` directly: ```yaml name: Development variables: - base_url: "http://localhost:3000" - token: "dev-token-123" + base_url: 'http://localhost:3000' + token: 'dev-token-123' ``` ### 2.6 Changes to `yamlflowsimplev2` @@ -911,20 +917,20 @@ func (m *SyncManager) Shutdown() error ### 3.8 Safety Mechanisms -| Mechanism | Implementation | -|---|---| -| Path validation | `filepath.Rel()` must not escape collection root | -| Filename sanitization | Strip invalid chars, truncate at 255 | -| Write stabilization | 80ms debounce on watcher events | -| Autosave debounce | 500ms debounce on SQLite→disk writes | +| Mechanism | Implementation | +| ---------------------- | ---------------------------------------------------- | +| Path validation | `filepath.Rel()` must not escape collection root | +| Filename sanitization | Strip invalid chars, truncate at 255 | +| Write stabilization | 80ms debounce on watcher events | +| Autosave debounce | 500ms debounce on SQLite→disk writes | | Self-write suppression | 2s window to suppress watcher events from our writes | -| Atomic writes | Write temp file → `os.Rename()` | -| UID preservation | `pathToID` map persists during session | -| Conflict resolution | Folder always wins (it's the source of truth) | -| Large file guard | Skip files >5MB | -| Cross-platform | `filepath.Clean/Rel/Join`, handle `\r\n` | -| Recursive watch | Walk tree on start, add subdirs on `DirCreated` | -| Max depth | 20 levels | +| Atomic writes | Write temp file → `os.Rename()` | +| UID preservation | `pathToID` map persists during session | +| Conflict resolution | Folder always wins (it's the source of truth) | +| Large file guard | Skip files >5MB | +| Cross-platform | `filepath.Clean/Rel/Join`, handle `\r\n` | +| Recursive watch | Walk tree on start, add subdirs on `DirCreated` | +| Max depth | 20 levels | --- @@ -964,6 +970,7 @@ type SyncStatus { ### 4.2 Desktop UI Integration **New UI elements needed:** + - Workspace settings: "Link to Folder" button with folder picker - Workspace settings: "Unlink Folder" button - Status bar: sync status indicator (synced, syncing, error) @@ -998,6 +1005,7 @@ devtools run --env dev # With environment **Scope**: Parse Bruno's OpenCollection YAML directories → DevTools models. **Files**: + ``` packages/server/pkg/translate/topencollection/ ├── types.go, converter.go, collection.go, environment.go @@ -1012,6 +1020,7 @@ packages/server/pkg/translate/topencollection/ **Scope**: Add `Order` field to `YamlRequestDefV2` (only change to `yamlflowsimplev2`). Create `openyaml` package — the OpenYAML format with multi-file collection support. No Bruno dependency. **Files**: + ``` packages/server/pkg/translate/yamlflowsimplev2/ └── types.go # Add Order to YamlRequestDefV2 @@ -1032,6 +1041,7 @@ packages/server/pkg/openyaml/ # NEW: OpenYAML format **Scope**: Add `sync_path`, `sync_format`, `sync_enabled` to workspace table and model. **Files**: + - `packages/db/pkg/sqlc/schema/` — new migration SQL - `packages/db/pkg/sqlc/queries/` — updated workspace queries - `packages/server/pkg/model/mworkspace/` — updated model @@ -1043,6 +1053,7 @@ packages/server/pkg/openyaml/ # NEW: OpenYAML format **Scope**: `fsnotify` watcher, debouncer, self-write tracker, SyncCoordinator, SyncManager. **Files**: + ``` packages/server/pkg/foldersync/ ├── watcher.go, debouncer.go, filter.go, selftrack.go @@ -1057,6 +1068,7 @@ packages/server/pkg/foldersync/ **Scope**: TypeSpec definitions, RPC handlers for sync management, CLI `import bruno` command. **Files**: + - `packages/spec/` — new TypeSpec definitions - `packages/server/internal/api/` — RPC handlers - `apps/cli/cmd/import.go` — `importBrunoCmd` @@ -1068,6 +1080,7 @@ packages/server/pkg/foldersync/ **Scope**: Electron folder picker, workspace sync settings UI, status indicators. **Files**: + - `packages/client/` — React hooks/services for sync - `packages/ui/` — sync status components - `apps/desktop/` — Electron IPC for folder picker @@ -1079,6 +1092,7 @@ packages/server/pkg/foldersync/ **Scope**: `devtools run ` command. **Files**: + - `apps/cli/cmd/run.go` **Deps**: Phase 2 (openyaml), existing runner @@ -1101,6 +1115,7 @@ Phase 2 ──────────────────────── ``` **Parallel work:** + - Phase 1, 2, 3 can all be developed in parallel - Phase 4 depends on 2+3 - Phase 5 depends on 1+4 @@ -1110,7 +1125,7 @@ Phase 2 ──────────────────────── ## External Dependencies -| Dependency | Purpose | Already in use? | -|---|---|---| -| `gopkg.in/yaml.v3` | YAML parsing | Yes (`yamlflowsimplev2`) | -| `github.com/fsnotify/fsnotify` | Filesystem notifications | No (new) | +| Dependency | Purpose | Already in use? | +| ------------------------------ | ------------------------ | ------------------------ | +| `gopkg.in/yaml.v3` | YAML parsing | Yes (`yamlflowsimplev2`) | +| `github.com/fsnotify/fsnotify` | Filesystem notifications | No (new) | diff --git a/packages/server/pkg/openyaml/testdata/collection/environments/dev.yaml b/packages/server/pkg/openyaml/testdata/collection/environments/dev.yaml index 8ac570a5..73d8b65a 100644 --- a/packages/server/pkg/openyaml/testdata/collection/environments/dev.yaml +++ b/packages/server/pkg/openyaml/testdata/collection/environments/dev.yaml @@ -1,4 +1,4 @@ name: Development variables: - base_url: "http://localhost:3000" - token: "dev-token-123" + base_url: 'http://localhost:3000' + token: 'dev-token-123' diff --git a/packages/server/pkg/openyaml/testdata/collection/flows/smoke-test.yaml b/packages/server/pkg/openyaml/testdata/collection/flows/smoke-test.yaml index 5c6549b2..95ce9f97 100644 --- a/packages/server/pkg/openyaml/testdata/collection/flows/smoke-test.yaml +++ b/packages/server/pkg/openyaml/testdata/collection/flows/smoke-test.yaml @@ -1,9 +1,9 @@ name: Smoke Test variables: - name: auth_token - value: "" + value: '' steps: - request: name: Get Users method: GET - url: "{{base_url}}/users" + url: '{{base_url}}/users' diff --git a/packages/server/pkg/openyaml/testdata/collection/users/create-user.yaml b/packages/server/pkg/openyaml/testdata/collection/users/create-user.yaml index 542eab4b..14e85835 100644 --- a/packages/server/pkg/openyaml/testdata/collection/users/create-user.yaml +++ b/packages/server/pkg/openyaml/testdata/collection/users/create-user.yaml @@ -1,6 +1,6 @@ name: Create User method: POST -url: "{{base_url}}/users" +url: '{{base_url}}/users' order: 2 headers: Content-Type: application/json @@ -12,4 +12,4 @@ body: "email": "john@example.com" } assertions: - - "res.status eq 201" + - 'res.status eq 201' diff --git a/packages/server/pkg/openyaml/testdata/collection/users/get-users.yaml b/packages/server/pkg/openyaml/testdata/collection/users/get-users.yaml index 047ed9d6..351775c1 100644 --- a/packages/server/pkg/openyaml/testdata/collection/users/get-users.yaml +++ b/packages/server/pkg/openyaml/testdata/collection/users/get-users.yaml @@ -1,15 +1,15 @@ name: Get Users method: GET -url: "{{base_url}}/users" +url: '{{base_url}}/users' description: Fetch all users order: 1 headers: Accept: application/json query_params: - name: page - value: "1" + value: '1' - name: limit - value: "10" + value: '10' enabled: false assertions: - - "res.status eq 200" + - 'res.status eq 200' diff --git a/packages/server/pkg/translate/topencollection/testdata/basic-collection/auth/login.yml b/packages/server/pkg/translate/topencollection/testdata/basic-collection/auth/login.yml index aaa53be2..b088becc 100644 --- a/packages/server/pkg/translate/topencollection/testdata/basic-collection/auth/login.yml +++ b/packages/server/pkg/translate/topencollection/testdata/basic-collection/auth/login.yml @@ -5,7 +5,7 @@ info: http: method: POST - url: "{{base_url}}/auth/login" + url: '{{base_url}}/auth/login' headers: - name: Content-Type value: application/json @@ -21,4 +21,4 @@ runtime: assertions: - expression: res.status operator: eq - value: "200" + value: '200' diff --git a/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/dev.yml b/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/dev.yml index eeb167d4..425a2e1c 100644 --- a/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/dev.yml +++ b/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/dev.yml @@ -1,6 +1,6 @@ name: Development variables: - name: base_url - value: "http://localhost:3000" + value: 'http://localhost:3000' - name: token - value: "dev-token-123" + value: 'dev-token-123' diff --git a/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/prod.yml b/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/prod.yml index 7471da38..ef2d82b2 100644 --- a/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/prod.yml +++ b/packages/server/pkg/translate/topencollection/testdata/basic-collection/environments/prod.yml @@ -1,7 +1,7 @@ name: Production variables: - name: base_url - value: "https://api.example.com" + value: 'https://api.example.com' - name: token - value: "prod-token-456" + value: 'prod-token-456' enabled: false diff --git a/packages/server/pkg/translate/topencollection/testdata/basic-collection/opencollection.yml b/packages/server/pkg/translate/topencollection/testdata/basic-collection/opencollection.yml index 41e0ddb3..7f7ef46e 100644 --- a/packages/server/pkg/translate/topencollection/testdata/basic-collection/opencollection.yml +++ b/packages/server/pkg/translate/topencollection/testdata/basic-collection/opencollection.yml @@ -1,5 +1,5 @@ -opencollection: "1.0.0" +opencollection: '1.0.0' info: - name: "Test API Collection" - summary: "A test collection for unit tests" - version: "1.0.0" + name: 'Test API Collection' + summary: 'A test collection for unit tests' + version: '1.0.0' diff --git a/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/create-user.yml b/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/create-user.yml index dde47f9c..e2290bbb 100644 --- a/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/create-user.yml +++ b/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/create-user.yml @@ -5,7 +5,7 @@ info: http: method: POST - url: "{{base_url}}/users" + url: '{{base_url}}/users' headers: - name: Content-Type value: application/json @@ -18,15 +18,15 @@ http: } auth: type: bearer - token: "{{token}}" + token: '{{token}}' runtime: assertions: - expression: res.status operator: eq - value: "201" + value: '201' - expression: res.body.id operator: neq - value: "" + value: '' -docs: "Creates a new user account" +docs: 'Creates a new user account' diff --git a/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/get-users.yml b/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/get-users.yml index e58e09ac..7a52a17a 100644 --- a/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/get-users.yml +++ b/packages/server/pkg/translate/topencollection/testdata/basic-collection/users/get-users.yml @@ -5,22 +5,22 @@ info: http: method: GET - url: "{{base_url}}/users" + url: '{{base_url}}/users' headers: - name: Accept value: application/json params: - name: page - value: "1" + value: '1' type: query - name: limit - value: "10" + value: '10' type: query runtime: assertions: - expression: res.status operator: eq - value: "200" + value: '200' -docs: "Fetch all users with pagination" +docs: 'Fetch all users with pagination' From b55143fc7aa21a1e39319ce4639832e8451e2c46 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 21:00:18 +0000 Subject: [PATCH 18/20] fix: add sqlc overrides for workspace sync columns Add column overrides in sqlc.yaml for workspaces.sync_path and workspaces.sync_format to generate *string instead of sql.NullString. Regenerated sqlc code matches the model layer types. https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- go.work.sum | 332 ++++++++++++++++++++- packages/db/pkg/sqlc/gen/db.go | 20 +- packages/db/pkg/sqlc/gen/workspaces.sql.go | 110 +++---- packages/db/pkg/sqlc/sqlc.yaml | 10 + 4 files changed, 403 insertions(+), 69 deletions(-) diff --git a/go.work.sum b/go.work.sum index 746f0e55..4e5215be 100644 --- a/go.work.sum +++ b/go.work.sum @@ -27,8 +27,10 @@ cloud.google.com/go v0.118.3/go.mod h1:Lhs3YLnBlwJ4KA6nuObNMZ/fCbOQBPuWKPoE0Wa/9 cloud.google.com/go v0.120.0/go.mod h1:/beW32s8/pGRuj4IILWQNd4uuebeT4dkOhKmkfit64Q= cloud.google.com/go v0.121.2 h1:v2qQpN6Dx9x2NmwrqlesOt3Ys4ol5/lFZ6Mg1B7OJCg= cloud.google.com/go v0.121.2/go.mod h1:nRFlrHq39MNVWu+zESP2PosMWA0ryJw8KUBZ2iZpxbw= +cloud.google.com/go/accessapproval v1.8.6 h1:UkmDPCKvj24bkGVrvgJPcgSDkmIPw/bAmOiDb9avOiE= cloud.google.com/go/accessapproval v1.8.6/go.mod h1:FfmTs7Emex5UvfnnpMkhuNkRCP85URnBFt5ClLxhZaQ= cloud.google.com/go/accessapproval v1.8.8/go.mod h1:RFwPY9JDKseP4gJrX1BlAVsP5O6kI8NdGlTmaeDefmk= +cloud.google.com/go/accesscontextmanager v1.9.6 h1:2LnncRqfYB8NEdh9+FeYxAt9POTW/0zVboktnRlO11w= cloud.google.com/go/accesscontextmanager v1.9.6/go.mod h1:884XHwy1AQpCX5Cj2VqYse77gfLaq9f8emE2bYriilk= cloud.google.com/go/accesscontextmanager v1.9.7/go.mod h1:i6e0nd5CPcrh7+YwGq4bKvju5YB9sgoAip+mXU73aMM= cloud.google.com/go/ai v0.8.0 h1:rXUEz8Wp2OlrM8r1bfmpF2+VKqc1VJpafE3HgzRnD/w= @@ -38,24 +40,35 @@ cloud.google.com/go/ai v0.12.1/go.mod h1:5vIPNe1ZQsVZqCliXIPL4QnhObQQY4d9hAGHdVc cloud.google.com/go/aiplatform v1.89.0 h1:niSJYc6ldWWVM9faXPo1Et1MVSQoLvVGriD7fwbJdtE= cloud.google.com/go/aiplatform v1.89.0/go.mod h1:TzZtegPkinfXTtXVvZZpxx7noINFMVDrLkE7cEWhYEk= cloud.google.com/go/aiplatform v1.109.0/go.mod h1:4rwKOMdubQOND81AlO3EckcskvEFCYSzXKfn42GMm8k= +cloud.google.com/go/alloydb v1.14.0 h1:aEmmIHmiHDs46wTr/YqXuumuUGNc5QKYA7317nEFj2Y= cloud.google.com/go/alloydb v1.14.0/go.mod h1:OTBY1HoL0Z8PsHoMMVhkaUPKyY8oP7hzIAe/Dna6UHk= +cloud.google.com/go/alloydbconn v1.13.2 h1:ipxhHyQAZ0NS5XUuPSXlSCPEUI83YVibkLcFAOfSMW0= cloud.google.com/go/alloydbconn v1.13.2/go.mod h1:0wlYQAOr2XuvxYsvNNVckmG2v17WVUKzMD+gmTOibSU= +cloud.google.com/go/analytics v0.28.1 h1:W2ft49J/LeEj9A07Jsd5Q2kAzajK0j0IffOyyzbxw04= cloud.google.com/go/analytics v0.28.1/go.mod h1:iPaIVr5iXPB3JzkKPW1JddswksACRFl3NSHgVHsuYC4= cloud.google.com/go/analytics v0.30.1/go.mod h1:V/FnINU5kMOsttZnKPnXfKi6clJUHTEXUKQjHxcNK8A= +cloud.google.com/go/apigateway v1.7.6 h1:do+u3rjDYuTxD2ypRfv4uwTMoy/VHFLclvaYcb5Mv6I= cloud.google.com/go/apigateway v1.7.6/go.mod h1:SiBx36VPjShaOCk8Emf63M2t2c1yF+I7mYZaId7OHiA= cloud.google.com/go/apigateway v1.7.7/go.mod h1:j1bCmrUK1BzVHpiIyTApxB7cRyhivKzltqLmp6j6i7U= +cloud.google.com/go/apigeeconnect v1.7.6 h1:ijEJSni5xROOn1YyiHgqcW0B0TWr0di9VgIi2gvyNjY= cloud.google.com/go/apigeeconnect v1.7.6/go.mod h1:zqDhHY99YSn2li6OeEjFpAlhXYnXKl6DFb/fGu0ye2w= cloud.google.com/go/apigeeconnect v1.7.7/go.mod h1:ftGK3nca0JePiVLl0A6alaMjKdOc5C+sAkFMyH2RH8U= +cloud.google.com/go/apigeeregistry v0.9.6 h1:TgdjAoGoRY81DEc2LYsYvi/OqCFImMzAk/TVKiSRsQw= cloud.google.com/go/apigeeregistry v0.9.6/go.mod h1:AFEepJBKPtGDfgabG2HWaLH453VVWWFFs3P4W00jbPs= cloud.google.com/go/apigeeregistry v0.10.0/go.mod h1:SAlF5OhKvyLDuwWAaFAIVJjrEqKRrGTPkJs+TWNnSqg= +cloud.google.com/go/appengine v1.9.6 h1:JJyY8icMmQeWfQ+d36IhkGvd3Guzvw0UAkvxT0wmUx8= cloud.google.com/go/appengine v1.9.6/go.mod h1:jPp9T7Opvzl97qytaRGPwoH7pFI3GAcLDaui1K8PNjY= cloud.google.com/go/appengine v1.9.7/go.mod h1:y1XpGVeAhbsNzHida79cHbr3pFRsym0ob8xnC8yphbo= +cloud.google.com/go/area120 v0.9.6 h1:iJrZ6AleZr4l+q0/fWVANFOhs90KiSB1Ccait5OYyNg= cloud.google.com/go/area120 v0.9.6/go.mod h1:qKSokqe0iTmwBDA3tbLWonMEnh0pMAH4YxiceiHUed4= cloud.google.com/go/area120 v0.9.7/go.mod h1:5nJ0yksmjOMfc4Zpk+okWfJ3A1004FvB82rfia+ZLaY= +cloud.google.com/go/artifactregistry v1.17.1 h1:A20kj2S2HO9vlyBVyVFHPxArjxkXvLP5LjcdE7NhaPc= cloud.google.com/go/artifactregistry v1.17.1/go.mod h1:06gLv5QwQPWtaudI2fWO37gfwwRUHwxm3gA8Fe568Hc= cloud.google.com/go/artifactregistry v1.17.2/go.mod h1:h4CIl9TJZskg9c9u1gC9vTsOTo1PrAnnxntprqS3AjM= +cloud.google.com/go/asset v1.21.1 h1:i55wWC/EwVdHMyJgRfbLp/L6ez4nQuOpZwSxkuqN9ek= cloud.google.com/go/asset v1.21.1/go.mod h1:7AzY1GCC+s1O73yzLM1IpHFLHz3ws2OigmCpOQHwebk= cloud.google.com/go/asset v1.22.0/go.mod h1:q80JP2TeWWzMCazYnrAfDf36aQKf1QiKzzpNLflJwf8= +cloud.google.com/go/assuredworkloads v1.12.6 h1:ip/shfJYx6lrHBWYADjrrrubcm7uZzy50TTF5tPG7ek= cloud.google.com/go/assuredworkloads v1.12.6/go.mod h1:QyZHd7nH08fmZ+G4ElihV1zoZ7H0FQCpgS0YWtwjCKo= cloud.google.com/go/assuredworkloads v1.13.0/go.mod h1:o/oHEOnUlribR+uJWTKQo8A5RhSl9K9FNeMOew4TJ3M= cloud.google.com/go/auth v0.3.0/go.mod h1:lBv6NKTWp8E3LPzmO1TbiiRKc4drLOfHsgmlH9ogv5w= @@ -73,12 +86,16 @@ cloud.google.com/go/auth/oauth2adapt v0.2.2/go.mod h1:wcYjgpZI9+Yu7LyYBg4pqSiaRk cloud.google.com/go/auth/oauth2adapt v0.2.6/go.mod h1:AlmsELtlEBnaNTL7jCj8VQFLy6mbZv0s4Q7NGBeQ5E8= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= +cloud.google.com/go/automl v1.14.7 h1:ZLj48Ur2Qcso4M3bgOtjsOmeV5Ee92N14wuOc8OW+L0= cloud.google.com/go/automl v1.14.7/go.mod h1:8a4XbIH5pdvrReOU72oB+H3pOw2JBxo9XTk39oljObE= cloud.google.com/go/automl v1.15.0/go.mod h1:U9zOtQb8zVrFNGTuW3BfxeqmLyeleLgT9B12EaXfODg= +cloud.google.com/go/baremetalsolution v1.3.6 h1:9bdGlpY1LgLONQjFsDwrkjLzdPTlROpfU+GhA97YpOk= cloud.google.com/go/baremetalsolution v1.3.6/go.mod h1:7/CS0LzpLccRGO0HL3q2Rofxas2JwjREKut414sE9iM= cloud.google.com/go/baremetalsolution v1.4.0/go.mod h1:K6C6g4aS8LW95I0fEHZiBsBlh0UxwDLGf+S/vyfXbvg= +cloud.google.com/go/batch v1.12.2 h1:gWQdvdPplptpvrkqF6ibtxZkOsYKLTFbxYawHa/TvCg= cloud.google.com/go/batch v1.12.2/go.mod h1:tbnuTN/Iw59/n1yjAYKV2aZUjvMM2VJqAgvUgft6UEU= cloud.google.com/go/batch v1.13.0/go.mod h1:yHFeqBn8wUjmJs4sYbwZ7N3HdeGA+FkPAXjoCKMwGak= +cloud.google.com/go/beyondcorp v1.1.6 h1:4FcR+4QmcNGkhVij6TrYS4AQVNLBo7PBXKxNrKzpclQ= cloud.google.com/go/beyondcorp v1.1.6/go.mod h1:V1PigSWPGh5L/vRRmyutfnjAbkxLI2aWqJDdxKbwvsQ= cloud.google.com/go/beyondcorp v1.2.0/go.mod h1:sszcgxpPPBEfLzbI0aYCTg6tT1tyt3CmKav3NZIUcvI= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= @@ -88,23 +105,33 @@ cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUM cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0 h1:PQcPefKFdaIzjQFbiyOgAqyx8q5djaE7x9Sqe712DPA= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/bigquery v1.69.0 h1:rZvHnjSUs5sHK3F9awiuFk2PeOaB8suqNuim21GbaTc= cloud.google.com/go/bigquery v1.69.0/go.mod h1:TdGLquA3h/mGg+McX+GsqG9afAzTAcldMjqhdjHTLew= cloud.google.com/go/bigquery v1.72.0/go.mod h1:GUbRtmeCckOE85endLherHD9RsujY+gS7i++c1CqssQ= +cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= cloud.google.com/go/bigtable v1.37.0/go.mod h1:HXqddP6hduwzrtiTCqZPpj9ij4hGZb4Zy1WF/dT+yaU= cloud.google.com/go/bigtable v1.40.1/go.mod h1:LtPzCcrAFaGRZ82Hs8xMueUeYW9Jw12AmNdUTMfDnh4= +cloud.google.com/go/billing v1.20.4 h1:pqM5/c9UGydB9H90IPCxSvfCNLUPazAOSMsZkz5q5P4= cloud.google.com/go/billing v1.20.4/go.mod h1:hBm7iUmGKGCnBm6Wp439YgEdt+OnefEq/Ib9SlJYxIU= cloud.google.com/go/billing v1.21.0/go.mod h1:ZGairB3EVnb3i09E2SxFxo50p5unPaMTuo1jh6jW9js= +cloud.google.com/go/binaryauthorization v1.9.5 h1:T0zYEroXT+y0O/x/yZd5SwQdFv4UbUINjvJyJKzDm0Q= cloud.google.com/go/binaryauthorization v1.9.5/go.mod h1:CV5GkS2eiY461Bzv+OH3r5/AsuB6zny+MruRju3ccB8= cloud.google.com/go/binaryauthorization v1.10.0/go.mod h1:WOuiaQkI4PU/okwrcREjSAr2AUtjQgVe+PlrXKOmKKw= +cloud.google.com/go/certificatemanager v1.9.5 h1:+ZPglfDurCcsv4azizDFpBucD1IkRjWjbnU7zceyjfY= cloud.google.com/go/certificatemanager v1.9.5/go.mod h1:kn7gxT/80oVGhjL8rurMUYD36AOimgtzSBPadtAeffs= cloud.google.com/go/certificatemanager v1.9.6/go.mod h1:vWogV874jKZkSRDFCMM3r7wqybv8WXs3XhyNff6o/Zo= +cloud.google.com/go/channel v1.19.5 h1:UI+ZsRkS15hi9DRF+WAvTVLVuSeZiRmvCU8cjkjOwUU= cloud.google.com/go/channel v1.19.5/go.mod h1:vevu+LK8Oy1Yuf7lcpDbkQQQm5I7oiY5fFTn3uwfQLY= cloud.google.com/go/channel v1.20.0/go.mod h1:nBR1Lz+/1TjSA16HTllvW9Y+QULODj3o3jEKrNNeOp4= +cloud.google.com/go/cloudbuild v1.22.2 h1:4LlrIFa3IFLgD1mGEXmUE4cm9fYoU71OLwTvjM7Dg3c= cloud.google.com/go/cloudbuild v1.22.2/go.mod h1:rPyXfINSgMqMZvuTk1DbZcbKYtvbYF/i9IXQ7eeEMIM= cloud.google.com/go/cloudbuild v1.23.1/go.mod h1:Gh/k1NnFRw1DkhekO2BaR4MTg30Op6EQQHCUZCIyTAg= +cloud.google.com/go/clouddms v1.8.7 h1:IWJbQBEECTaNanDRN1XdR7FU53MJ1nylTl3s9T3MuyI= cloud.google.com/go/clouddms v1.8.7/go.mod h1:DhWLd3nzHP8GoHkA6hOhso0R9Iou+IGggNqlVaq/KZ4= cloud.google.com/go/clouddms v1.8.8/go.mod h1:QtCyw+a73dlkDb2q20aTAPvfaTZCepDDi6Gb1AKq0a4= +cloud.google.com/go/cloudsqlconn v1.14.1 h1:OtVShGJMQ/WEOTNP7TWidx0wnDE+eVYXeSg1ANTJpCI= cloud.google.com/go/cloudsqlconn v1.14.1/go.mod h1:pM5Xp20GsQosQ/cP9awtha5SMgmzbLubb/dbVsTg3Fo= +cloud.google.com/go/cloudtasks v1.13.6 h1:Fwan19UiNoFD+3KY0MnNHE5DyixOxNzS1mZ4ChOdpy0= cloud.google.com/go/cloudtasks v1.13.6/go.mod h1:/IDaQqGKMixD+ayM43CfsvWF2k36GeomEuy9gL4gLmU= cloud.google.com/go/cloudtasks v1.13.7/go.mod h1:H0TThOUG+Ml34e2+ZtW6k6nt4i9KuH3nYAJ5mxh7OM4= cloud.google.com/go/compute v1.6.1 h1:2sMmt8prCn7DPaG4Pmh0N3Inmc8cT8ae5k1M6VJ9Wqc= @@ -117,86 +144,124 @@ cloud.google.com/go/compute/metadata v0.5.2 h1:UxK4uu/Tn+I3p2dYWTfiX4wva7aYlKixA cloud.google.com/go/compute/metadata v0.5.2/go.mod h1:C66sj2AluDcIqakBq/M8lw8/ybHgOZqin2obFxa/E5k= cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= +cloud.google.com/go/contactcenterinsights v1.17.3 h1:lenyU3uzHwKDveCwmpfNxHYvLS3uEBWdn+O7+rSxy+Q= cloud.google.com/go/contactcenterinsights v1.17.3/go.mod h1:7Uu2CpxS3f6XxhRdlEzYAkrChpR5P5QfcdGAFEdHOG8= cloud.google.com/go/contactcenterinsights v1.17.4/go.mod h1:kZe6yOnKDfpPz2GphDHynxk/Spx+53UX/pGf+SmWAKM= +cloud.google.com/go/container v1.43.0 h1:A6J92FJPfxTvyX7MHF+w4t2W9WCqvHOi9UB5SAeSy3w= cloud.google.com/go/container v1.43.0/go.mod h1:ETU9WZ1KM9ikEKLzrhRVao7KHtalDQu6aPqM34zDr/U= cloud.google.com/go/container v1.45.0/go.mod h1:eB6jUfJLjne9VsTDGcH7mnj6JyZK+KOUIA6KZnYE/ds= +cloud.google.com/go/containeranalysis v0.14.1 h1:1SoHlNqL3XrhqcoozB+3eoHif2sRUFtp/JeASQTtGKo= cloud.google.com/go/containeranalysis v0.14.1/go.mod h1:28e+tlZgauWGHmEbnI5UfIsjMmrkoR1tFN0K2i71jBI= cloud.google.com/go/containeranalysis v0.14.2/go.mod h1:FjppROiUtP9cyMegdWdY/TsBSGc6kqh1GjA2NOJXXL8= +cloud.google.com/go/datacatalog v1.26.0 h1:eFgygb3DTufTWWUB8ARk+dSuXz+aefNJXTlkWlQcWwE= cloud.google.com/go/datacatalog v1.26.0/go.mod h1:bLN2HLBAwB3kLTFT5ZKLHVPj/weNz6bR0c7nYp0LE14= cloud.google.com/go/datacatalog v1.26.1/go.mod h1:2Qcq8vsHNxMDgjgadRFmFG47Y+uuIVsyEGUrlrKEdrg= +cloud.google.com/go/dataflow v0.11.0 h1:AdhB4cAkMOC9NtrHJxpKOVvO/VqBLaIyk0tEEhbGjYM= cloud.google.com/go/dataflow v0.11.0/go.mod h1:gNHC9fUjlV9miu0hd4oQaXibIuVYTQvZhMdPievKsPk= cloud.google.com/go/dataflow v0.11.1/go.mod h1:3s6y/h5Qz7uuxTmKJKBifkYZ3zs63jS+6VGtSu8Cf7Y= +cloud.google.com/go/dataform v0.12.0 h1:0eCPTPUC/RZ863aVfXTJLkg0tEpdpn62VD6ywSmmzxM= cloud.google.com/go/dataform v0.12.0/go.mod h1:PuDIEY0lSVuPrZqcFji1fmr5RRvz3DGz4YP/cONc8g4= cloud.google.com/go/dataform v0.12.1/go.mod h1:atGS8ReRjfNDUQib0X/o/7Gi2bqHI2G7/J86LKiGimE= +cloud.google.com/go/datafusion v1.8.6 h1:GZ6J+CR8CEeWAj8luRCtr8GvImSQRkArIIqGiZOnzBA= cloud.google.com/go/datafusion v1.8.6/go.mod h1:fCyKJF2zUKC+O3hc2F9ja5EUCAbT4zcH692z8HiFZFw= cloud.google.com/go/datafusion v1.8.7/go.mod h1:4dkFb1la41qCEXh1AzYtFwl842bu2ikTUXyKhjvFCb0= +cloud.google.com/go/datalabeling v0.9.6 h1:VOZ5U+78ttnhNCEID7qdeogqZQzK5N+LPHIQ9Q3YDsc= cloud.google.com/go/datalabeling v0.9.6/go.mod h1:n7o4x0vtPensZOoFwFa4UfZgkSZm8Qs0Pg/T3kQjXSM= cloud.google.com/go/datalabeling v0.9.7/go.mod h1:EEUVn+wNn3jl19P2S13FqE1s9LsKzRsPuuMRq2CMsOk= +cloud.google.com/go/dataplex v1.25.3 h1:Xr0Toh6wyBlmL3H4EPu1YKwxUtkDSzzq+IP0iLc88kk= cloud.google.com/go/dataplex v1.25.3/go.mod h1:wOJXnOg6bem0tyslu4hZBTncfqcPNDpYGKzed3+bd+E= cloud.google.com/go/dataplex v1.28.0/go.mod h1:VB+xlYJiJ5kreonXsa2cHPj0A3CfPh/mgiHG4JFhbUA= +cloud.google.com/go/dataproc/v2 v2.11.2 h1:KhC8wdLILpAs17yeTG6Miwg1v0nOP/OXD+9QNg3w6AQ= cloud.google.com/go/dataproc/v2 v2.11.2/go.mod h1:xwukBjtfiO4vMEa1VdqyFLqJmcv7t3lo+PbLDcTEw+g= cloud.google.com/go/dataproc/v2 v2.15.0/go.mod h1:tSdkodShfzrrUNPDVEL6MdH9/mIEvp/Z9s9PBdbsZg8= +cloud.google.com/go/dataqna v0.9.7 h1:qTRAG/E3T63Xj1orefRlwupfwH9c9ERUAnWSRGp75so= cloud.google.com/go/dataqna v0.9.7/go.mod h1:4ac3r7zm7Wqm8NAc8sDIDM0v7Dz7d1e/1Ka1yMFanUM= cloud.google.com/go/dataqna v0.9.8/go.mod h1:2lHKmGPOqzzuqCc5NI0+Xrd5om4ulxGwPpLB4AnFgpA= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0 h1:/May9ojXjRkPBNVrq+oWLqmWCkr4OU5uRY29bu0mRyQ= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/datastore v1.20.0 h1:NNpXoyEqIJmZFc0ACcwBEaXnmscUpcG4NkKnbCePmiM= cloud.google.com/go/datastore v1.20.0/go.mod h1:uFo3e+aEpRfHgtp5pp0+6M0o147KoPaYNaPAKpfh8Ew= cloud.google.com/go/datastore v1.21.0/go.mod h1:9l+KyAHO+YVVcdBbNQZJu8svF17Nw5sMKuFR0LYf1nY= +cloud.google.com/go/datastream v1.14.1 h1:j+y0lUKm9pbDjJn0YcWxPI/hXNGUQ80GE6yrFuJC/JA= cloud.google.com/go/datastream v1.14.1/go.mod h1:JqMKXq/e0OMkEgfYe0nP+lDye5G2IhIlmencWxmesMo= cloud.google.com/go/datastream v1.15.1/go.mod h1:aV1Grr9LFon0YvqryE5/gF1XAhcau2uxN2OvQJPpqRw= +cloud.google.com/go/deploy v1.27.2 h1:C0VqBhFyQFp6+xgPHZAD7LeRA4XGy5YLzGmPQ2NhlLk= cloud.google.com/go/deploy v1.27.2/go.mod h1:4NHWE7ENry2A4O1i/4iAPfXHnJCZ01xckAKpZQwhg1M= cloud.google.com/go/deploy v1.27.3/go.mod h1:7LFIYYTSSdljYRqY3n+JSmIFdD4lv6aMD5xg0crB5iw= +cloud.google.com/go/dialogflow v1.68.2 h1:bXpoqPRf37KKxB79PKr20B/TAU/Z5iA0FnB6C5N2jrA= cloud.google.com/go/dialogflow v1.68.2/go.mod h1:E0Ocrhf5/nANZzBju8RX8rONf0PuIvz2fVj3XkbAhiY= cloud.google.com/go/dialogflow v1.71.0/go.mod h1:mP4XrpgDvPYBP+cdLxFC1WJJlkwuy0H8L1Lada9No/M= +cloud.google.com/go/dlp v1.23.0 h1:3xWRKylXxhysaQaV+DLev1YcIywFUCc7yJEE6R7ZGDQ= cloud.google.com/go/dlp v1.23.0/go.mod h1:vVT4RlyPMEMcVHexdPT6iMVac3seq3l6b8UPdYpgFrg= cloud.google.com/go/dlp v1.27.0/go.mod h1:PY4DMzV7lqRC5JvpxL05fXNeL8dknxYpFp4WjxmE22M= +cloud.google.com/go/documentai v1.37.0 h1:7fla8GcarupO15eatRTUveXCob6DOSW1Wa+1i63CM3Q= cloud.google.com/go/documentai v1.37.0/go.mod h1:qAf3ewuIUJgvSHQmmUWvM3Ogsr5A16U2WPHmiJldvLA= cloud.google.com/go/documentai v1.39.0/go.mod h1:KmlLO93F7GRU8dENXRxvt+7V8o7eCG6Y6WDitKbcYJs= +cloud.google.com/go/domains v0.10.6 h1:TI+Aavwc31KD8huOquJz0ISchCq1zSEWc9M+JcPJyxc= cloud.google.com/go/domains v0.10.6/go.mod h1:3xzG+hASKsVBA8dOPc4cIaoV3OdBHl1qgUpAvXK7pGY= cloud.google.com/go/domains v0.10.7/go.mod h1:T3WG/QUAO/52z4tUPooKS8AY7yXaFxPYn1V3F0/JbNQ= +cloud.google.com/go/edgecontainer v1.4.3 h1:9tfGCicvrki927T+hGMB0yYmwIbRuZY6JR1/awrKiZ0= cloud.google.com/go/edgecontainer v1.4.3/go.mod h1:q9Ojw2ox0uhAvFisnfPRAXFTB1nfRIOIXVWzdXMZLcE= cloud.google.com/go/edgecontainer v1.4.4/go.mod h1:yyNVHsCKtsX/0mqFdbljQw0Uo660q2dlMPaiqYiC2Tg= +cloud.google.com/go/errorreporting v0.3.2 h1:isaoPwWX8kbAOea4qahcmttoS79+gQhvKsfg5L5AgH8= cloud.google.com/go/errorreporting v0.3.2/go.mod h1:s5kjs5r3l6A8UUyIsgvAhGq6tkqyBCUss0FRpsoVTww= +cloud.google.com/go/essentialcontacts v1.7.6 h1:ysHZ4gr4plW1CL1Ur/AucUUfh20hDjSFbfjxSK0q/sk= cloud.google.com/go/essentialcontacts v1.7.6/go.mod h1:/Ycn2egr4+XfmAfxpLYsJeJlVf9MVnq9V7OMQr9R4lA= cloud.google.com/go/essentialcontacts v1.7.7/go.mod h1:ytycWAEn/aKUMRKQPMVgMrAtphEMgjbzL8vFwM3tqXs= +cloud.google.com/go/eventarc v1.15.5 h1:bZW7ZMM+XXNErg6rOZcgxUzAgz4vpReRDP3ZiGf7/sI= cloud.google.com/go/eventarc v1.15.5/go.mod h1:vDCqGqyY7SRiickhEGt1Zhuj81Ya4F/NtwwL3OZNskg= cloud.google.com/go/eventarc v1.17.0/go.mod h1:wB3NTIQ+l4QPirJiTMeU+YpSc5+iyoDYWV4n2/Vmh78= +cloud.google.com/go/filestore v1.10.2 h1:LjoAyp9TvVNBns3sUUzPaNsQiGpR2BReGmTS3bUCuBE= cloud.google.com/go/filestore v1.10.2/go.mod h1:w0Pr8uQeSRQfCPRsL0sYKW6NKyooRgixCkV9yyLykR4= cloud.google.com/go/filestore v1.10.3/go.mod h1:94ZGyLTx9j+aWKozPQ6Wbq1DuImie/L/HIdGMshtwac= cloud.google.com/go/firestore v1.6.1 h1:8rBq3zRjnHx8UtBvaOWqBB1xq9jH6/wltfQLlTMh2Fw= cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= +cloud.google.com/go/firestore v1.18.0 h1:cuydCaLS7Vl2SatAeivXyhbhDEIR8BDmtn4egDhIn2s= cloud.google.com/go/firestore v1.18.0/go.mod h1:5ye0v48PhseZBdcl0qbl3uttu7FIEwEYVaWm0UIEOEU= cloud.google.com/go/firestore v1.20.0/go.mod h1:jqu4yKdBmDN5srneWzx3HlKrHFWFdlkgjgQ6BKIOFQo= +cloud.google.com/go/functions v1.19.6 h1:vJgWlvxtJG6p/JrbXAkz83DbgwOyFhZZI1Y32vUddjY= cloud.google.com/go/functions v1.19.6/go.mod h1:0G0RnIlbM4MJEycfbPZlCzSf2lPOjL7toLDwl+r0ZBw= cloud.google.com/go/functions v1.19.7/go.mod h1:xbcKfS7GoIcaXr2FSwmtn9NXal1JR4TV6iYZlgXffwA= +cloud.google.com/go/gkebackup v1.8.0 h1:eBqOt61yEChvj7I/GDPBbdCCRdUPudD1qrQYfYWV3Ok= cloud.google.com/go/gkebackup v1.8.0/go.mod h1:FjsjNldDilC9MWKEHExnK3kKJyTDaSdO1vF0QeWSOPU= cloud.google.com/go/gkebackup v1.8.1/go.mod h1:GAaAl+O5D9uISH5MnClUop2esQW4pDa2qe/95A4l7YQ= +cloud.google.com/go/gkeconnect v0.12.4 h1:67/rnPmF/I1Wmf7jWyKH+z4OWjU8ZUI0Vmzxvmzf3KY= cloud.google.com/go/gkeconnect v0.12.4/go.mod h1:bvpU9EbBpZnXGo3nqJ1pzbHWIfA9fYqgBMJ1VjxaZdk= cloud.google.com/go/gkeconnect v0.12.5/go.mod h1:wMD2RXcsAWlkREZWJDVeDV70PYka1iEb9stFmgpw+5o= +cloud.google.com/go/gkehub v0.15.6 h1:9iogrmNNa+drDPf/zkLH/6KGgUf7FuuyokmithoGwMQ= cloud.google.com/go/gkehub v0.15.6/go.mod h1:sRT0cOPAgI1jUJrS3gzwdYCJ1NEzVVwmnMKEwrS2QaM= cloud.google.com/go/gkehub v0.16.0/go.mod h1:ADp27Ucor8v81wY+x/5pOxTorxkPj/xswH3AUpN62GU= +cloud.google.com/go/gkemulticloud v1.5.3 h1:334aZmOzIt3LVBpguCof8IHaLaftcZlx+L0TGBukYkY= cloud.google.com/go/gkemulticloud v1.5.3/go.mod h1:KPFf+/RcfvmuScqwS9/2MF5exZAmXSuoSLPuaQ98Xlk= cloud.google.com/go/gkemulticloud v1.5.4/go.mod h1:7l9+6Tp4jySSGj4PStO8CE6RrHFdcRARK4ScReHX1bU= +cloud.google.com/go/grafeas v0.3.15 h1:lBjwKmhpiqOAFaE0xdqF8CqO74a99s8tUT5mCkBBxPs= cloud.google.com/go/grafeas v0.3.15/go.mod h1:irwcwIQOBlLBotGdMwme8PipnloOPqILfIvMwlmu8Pk= +cloud.google.com/go/gsuiteaddons v1.7.7 h1:sk0SxpCGIA7tIO//XdiiG29f2vrF6Pq/dsxxyBGiRBY= cloud.google.com/go/gsuiteaddons v1.7.7/go.mod h1:zTGmmKG/GEBCONsvMOY2ckDiEsq3FN+lzWGUiXccF9o= cloud.google.com/go/gsuiteaddons v1.7.8/go.mod h1:DBKNHH4YXAdd/rd6zVvtOGAJNGo0ekOh+nIjTUDEJ5U= cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8= cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE= cloud.google.com/go/iam v1.5.3/go.mod h1:MR3v9oLkZCTlaqljW6Eb2d3HGDGK5/bDv93jhfISFvU= +cloud.google.com/go/iap v1.11.2 h1:VIioCrYsyWiRGx7Y8RDNylpI6d4t1Qx5ZgSLUVmWWPo= cloud.google.com/go/iap v1.11.2/go.mod h1:Bh99DMUpP5CitL9lK0BC8MYgjjYO4b3FbyhgW1VHJvg= cloud.google.com/go/iap v1.11.3/go.mod h1:+gXO0ClH62k2LVlfhHzrpiHQNyINlEVmGAE3+DB4ShU= +cloud.google.com/go/ids v1.5.6 h1:uKGuaWozDcjg3wyf54Gd7tCH2YK8BFeH9qo1xBNiPKE= cloud.google.com/go/ids v1.5.6/go.mod h1:y3SGLmEf9KiwKsH7OHvYYVNIJAtXybqsD2z8gppsziQ= cloud.google.com/go/ids v1.5.7/go.mod h1:N3ZQOIgIBwwOu2tzyhmh3JDT+kt8PcoKkn2BRT9Qe4A= +cloud.google.com/go/iot v1.8.6 h1:A3AhugnIViAZkC3/lHAQDaXBIk2ZOPBZS0XQCyZsjjc= cloud.google.com/go/iot v1.8.6/go.mod h1:MThnkiihNkMysWNeNje2Hp0GSOpEq2Wkb/DkBCVYa0U= cloud.google.com/go/iot v1.8.7/go.mod h1:HvVcypV8LPv1yTXSLCNK+YCtqGHhq+p0F3BXETfpN+U= +cloud.google.com/go/kms v1.22.0 h1:dBRIj7+GDeeEvatJeTB19oYZNV0aj6wEqSIT/7gLqtk= cloud.google.com/go/kms v1.22.0/go.mod h1:U7mf8Sva5jpOb4bxYZdtw/9zsbIjrklYwPcvMk34AL8= cloud.google.com/go/kms v1.23.2/go.mod h1:rZ5kK0I7Kn9W4erhYVoIRPtpizjunlrfU4fUkumUp8g= +cloud.google.com/go/language v1.14.5 h1:BVJ/POtlnJ55LElvnQY19UOxpMVtHoHHkFJW2uHJsVU= cloud.google.com/go/language v1.14.5/go.mod h1:nl2cyAVjcBct1Hk73tzxuKebk0t2eULFCaruhetdZIA= cloud.google.com/go/language v1.14.6/go.mod h1:7y3J9OexQsfkWNGCxhT+7lb64pa60e12ZCoWDOHxJ1M= +cloud.google.com/go/lifesciences v0.10.6 h1:Vu7XF4s5KJ8+mSLIL4eaQM6JTyWXvSB54oqC+CUZH20= cloud.google.com/go/lifesciences v0.10.6/go.mod h1:1nnZwaZcBThDujs9wXzECnd1S5d+UiDkPuJWAmhRi7Q= cloud.google.com/go/lifesciences v0.10.7/go.mod h1:v3AbTki9iWttEls/Wf4ag3EqeLRHofploOcpsLnu7iY= +cloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc= cloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA= cloud.google.com/go/logging v1.13.1/go.mod h1:XAQkfkMBxQRjQek96WLPNze7vsOmay9H5PqfsNYDqvw= cloud.google.com/go/longrunning v0.5.6/go.mod h1:vUaDrWYOMKRuhiv6JBnn49YxCPz2Ayn9GqyjaBT8/mA= @@ -206,43 +271,61 @@ cloud.google.com/go/longrunning v0.6.6/go.mod h1:hyeGJUrPHcx0u2Uu1UFSoYZLn4lkMrc cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE= cloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY= cloud.google.com/go/longrunning v0.7.0/go.mod h1:ySn2yXmjbK9Ba0zsQqunhDkYi0+9rlXIwnoAf+h+TPY= +cloud.google.com/go/managedidentities v1.7.6 h1:zrZVWXZJlmHnfpyCrTQIbDBGUBHrcOOvrsjMjoXRxrk= cloud.google.com/go/managedidentities v1.7.6/go.mod h1:pYCWPaI1AvR8Q027Vtp+SFSM/VOVgbjBF4rxp1/z5p4= cloud.google.com/go/managedidentities v1.7.7/go.mod h1:nwNlMxtBo2YJMvsKXRtAD1bL41qiCI9npS7cbqrsJUs= +cloud.google.com/go/maps v1.21.0 h1:El61AfMxC1sU/RU8Wzs9dkZEgltyunKM86aKF9aDlaE= cloud.google.com/go/maps v1.21.0/go.mod h1:cqzZ7+DWUKKbPTgqE+KuNQtiCRyg/o7WZF9zDQk+HQs= cloud.google.com/go/maps v1.26.0/go.mod h1:+auempdONAP8emtm48aCfNo1ZC+3CJniRA1h8J4u7bY= +cloud.google.com/go/mediatranslation v0.9.6 h1:SDGatA73TgZ8iCvILVXpk/1qhTK5DJyufUDEWgbmbV8= cloud.google.com/go/mediatranslation v0.9.6/go.mod h1:WS3QmObhRtr2Xu5laJBQSsjnWFPPthsyetlOyT9fJvE= cloud.google.com/go/mediatranslation v0.9.7/go.mod h1:mz3v6PR7+Fd/1bYrRxNFGnd+p4wqdc/fyutqC5QHctw= +cloud.google.com/go/memcache v1.11.6 h1:33IVqQEmFiITsBXwGHeTkUhWz0kLNKr90nV3e22uLPs= cloud.google.com/go/memcache v1.11.6/go.mod h1:ZM6xr1mw3F8TWO+In7eq9rKlJc3jlX2MDt4+4H+/+cc= cloud.google.com/go/memcache v1.11.7/go.mod h1:AU1jYlUqCihxapcJ1GGMtlMWDVhzjbfUWBXqsXa4rBg= +cloud.google.com/go/metastore v1.14.7 h1:dLm59AHHZCorveCylj7c2iWhkQsmMIeWTsV+tG/BXtY= cloud.google.com/go/metastore v1.14.7/go.mod h1:0dka99KQofeUgdfu+K/Jk1KeT9veWZlxuZdJpZPtuYU= cloud.google.com/go/metastore v1.14.8/go.mod h1:h1XI2LpD4ohJhQYn9TwXqKb5sVt6KSo47ft96SiFF1s= cloud.google.com/go/monitoring v1.21.2 h1:FChwVtClH19E7pJ+e0xUhJPGksctZNVOk2UhMmblmdU= cloud.google.com/go/monitoring v1.21.2/go.mod h1:hS3pXvaG8KgWTSz+dAdyzPrGUYmi2Q+WFX8g2hqVEZU= cloud.google.com/go/monitoring v1.24.0/go.mod h1:Bd1PRK5bmQBQNnuGwHBfUamAV1ys9049oEPHnn4pcsc= +cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM= cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U= cloud.google.com/go/monitoring v1.24.3/go.mod h1:nYP6W0tm3N9H/bOw8am7t62YTzZY+zUeQ+Bi6+2eonI= +cloud.google.com/go/networkconnectivity v1.17.1 h1:RQcG1rZNCNV5Dn3tnINs4TYswDXk2hKH+85eh+JvoWU= cloud.google.com/go/networkconnectivity v1.17.1/go.mod h1:DTZCq8POTkHgAlOAAEDQF3cMEr/B9k1ZbpklqvHEBtg= cloud.google.com/go/networkconnectivity v1.19.1/go.mod h1:Q5v6uNNNz8BP232uuXM66XgWML9m379xhwv58Y+8Kb0= +cloud.google.com/go/networkmanagement v1.19.1 h1:ecukgArkYCVcK5w2h7WDDd+nHgmBAp9Bst7ClmVKz5A= cloud.google.com/go/networkmanagement v1.19.1/go.mod h1:icgk265dNnilxQzpr6rO9WuAuuCmUOqq9H6WBeM2Af4= cloud.google.com/go/networkmanagement v1.21.0/go.mod h1:clG/5Yt0wQ57qSH6Yh7oehQYlobHw3F6nb3Pn4ig5hU= +cloud.google.com/go/networksecurity v0.10.6 h1:6b6fcCG9BFNcmtNO+VuPE04vkZb5TKNX9+7ZhYMgstE= cloud.google.com/go/networksecurity v0.10.6/go.mod h1:FTZvabFPvK2kR/MRIH3l/OoQ/i53eSix2KA1vhBMJec= cloud.google.com/go/networksecurity v0.10.7/go.mod h1:FgoictpfaJkeBlM1o2m+ngPZi8mgJetbFDH4ws1i2fQ= +cloud.google.com/go/notebooks v1.12.6 h1:nCfZwVihArMPP2atRoxRrXOXJ/aC9rAgpBQGCc2zpYw= cloud.google.com/go/notebooks v1.12.6/go.mod h1:3Z4TMEqAKP3pu6DI/U+aEXrNJw9hGZIVbp+l3zw8EuA= cloud.google.com/go/notebooks v1.12.7/go.mod h1:uR9pxAkKmlNloibMr9Q1t8WhIu4P2JeqJs7c064/0Mo= +cloud.google.com/go/optimization v1.7.6 h1:jDvIuSxDsXI2P7l2sYXm6CoX1YBIIT6Khm5m0hq0/KQ= cloud.google.com/go/optimization v1.7.6/go.mod h1:4MeQslrSJGv+FY4rg0hnZBR/tBX2awJ1gXYp6jZpsYY= cloud.google.com/go/optimization v1.7.7/go.mod h1:OY2IAlX23o52qwMAZ0w65wibKuV12a4x6IHDTCq6kcU= +cloud.google.com/go/orchestration v1.11.9 h1:PnlZ/O4R/eiounpxUkhI9ZXRMWbG7vFqxc6L6sR+31k= cloud.google.com/go/orchestration v1.11.9/go.mod h1:KKXK67ROQaPt7AxUS1V/iK0Gs8yabn3bzJ1cLHw4XBg= cloud.google.com/go/orchestration v1.11.10/go.mod h1:tz7m1s4wNEvhNNIM3JOMH0lYxBssu9+7si5MCPw/4/0= +cloud.google.com/go/orgpolicy v1.15.0 h1:uQziDu3UKYk9ZwUgneZAW5aWxZFKgOXXsuVKFKh0z7Y= cloud.google.com/go/orgpolicy v1.15.0/go.mod h1:NTQLwgS8N5cJtdfK55tAnMGtvPSsy95JJhESwYHaJVs= cloud.google.com/go/orgpolicy v1.15.1/go.mod h1:bpvi9YIyU7wCW9WiXL/ZKT7pd2Ovegyr2xENIeRX5q0= +cloud.google.com/go/osconfig v1.14.6 h1:4uJrA1obzMBp1I+DF15y/MvsXKIODevuANpq3QhvX30= cloud.google.com/go/osconfig v1.14.6/go.mod h1:LS39HDBH0IJDFgOUkhSZUHFQzmcWaCpYXLrc3A4CVzI= cloud.google.com/go/osconfig v1.15.1/go.mod h1:NegylQQl0+5m+I+4Ey/g3HGeQxKkncQ1q+Il4DZ8PME= +cloud.google.com/go/oslogin v1.14.6 h1:BDKVcxo1OO4ZT+PbuFchZjnbrlUGfChilt6+pITY1VI= cloud.google.com/go/oslogin v1.14.6/go.mod h1:xEvcRZTkMXHfNSKdZ8adxD6wvRzeyAq3cQX3F3kbMRw= cloud.google.com/go/oslogin v1.14.7/go.mod h1:NB6NqBHfDMwznePdBVX+ILllc1oPCdNSGp5u/WIyndY= +cloud.google.com/go/phishingprotection v0.9.6 h1:yl572bBQbPjflX250SOflN6gwO2uYoddN2uRp36fDTo= cloud.google.com/go/phishingprotection v0.9.6/go.mod h1:VmuGg03DCI0wRp/FLSvNyjFj+J8V7+uITgHjCD/x4RQ= cloud.google.com/go/phishingprotection v0.9.7/go.mod h1:JTI4HNGyAbWolBoNOoCyCF0e3cqPNrYnlievHU49EwE= +cloud.google.com/go/policytroubleshooter v1.11.6 h1:Z8+tO2z21MY1arBBuJjwrOjbw8fbZb13AZTHXdzkl2U= cloud.google.com/go/policytroubleshooter v1.11.6/go.mod h1:jdjYGIveoYolk38Dm2JjS5mPkn8IjVqPsDHccTMu3mY= cloud.google.com/go/policytroubleshooter v1.11.7/go.mod h1:JP/aQ+bUkt4Gz6lQXBi/+A/6nyNRZ0Pvxui5Xl9ieyk= +cloud.google.com/go/privatecatalog v0.10.7 h1:R951ikhxIanXEijBCu0xnoUAOteS5m/Xplek0YvsNTE= cloud.google.com/go/privatecatalog v0.10.7/go.mod h1:Fo/PF/B6m4A9vUYt0nEF1xd0U6Kk19/Je3eZGrQ6l60= cloud.google.com/go/privatecatalog v0.10.8/go.mod h1:BkLHi+rtAGYBt5DocXLytHhF0n6F03Tegxgty40Y7aA= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= @@ -250,39 +333,57 @@ cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+ cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1 h1:ukjixP1wl0LpnZ6LWtZJ0mX5tBmjp1f8Sqer8Z2OMUU= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/pubsub v1.49.0 h1:5054IkbslnrMCgA2MAEPcsN3Ky+AyMpEZcii/DoySPo= cloud.google.com/go/pubsub v1.49.0/go.mod h1:K1FswTWP+C1tI/nfi3HQecoVeFvL4HUOB1tdaNXKhUY= cloud.google.com/go/pubsub v1.50.1/go.mod h1:6YVJv3MzWJUVdvQXG081sFvS0dWQOdnV+oTo++q/xFk= cloud.google.com/go/pubsub/v2 v2.0.0/go.mod h1:0aztFxNzVQIRSZ8vUr79uH2bS3jwLebwK6q1sgEub+E= +cloud.google.com/go/pubsublite v1.8.2 h1:jLQozsEVr+c6tOU13vDugtnaBSUy/PD5zK6mhm+uF1Y= cloud.google.com/go/pubsublite v1.8.2/go.mod h1:4r8GSa9NznExjuLPEJlF1VjOPOpgf3IT6k8x/YgaOPI= +cloud.google.com/go/recaptchaenterprise/v2 v2.20.4 h1:P4QMryKcWdi4LIe1Sx0b2ZOAQv5gVfdzPt2peXcN32Y= cloud.google.com/go/recaptchaenterprise/v2 v2.20.4/go.mod h1:3H8nb8j8N7Ss2eJ+zr+/H7gyorfzcxiDEtVBDvDjwDQ= cloud.google.com/go/recaptchaenterprise/v2 v2.20.5/go.mod h1:TCHn8+vtwgygBOwwbUJgRi6R9qglIpTeImsWsWDr5Lo= +cloud.google.com/go/recommendationengine v0.9.6 h1:slN7h23vswGccW8x3f+xUXCu9Yo18/GNkazH93LJbFk= cloud.google.com/go/recommendationengine v0.9.6/go.mod h1:nZnjKJu1vvoxbmuRvLB5NwGuh6cDMMQdOLXTnkukUOE= cloud.google.com/go/recommendationengine v0.9.7/go.mod h1:snZ/FL147u86Jqpv1j95R+CyU5NvL/UzYiyDo6UByTM= +cloud.google.com/go/recommender v1.13.5 h1:cIsyRKGNw4LpCfY5c8CCQadhlp54jP4fHtP+d5Sy2xE= cloud.google.com/go/recommender v1.13.5/go.mod h1:v7x/fzk38oC62TsN5Qkdpn0eoMBh610UgArJtDIgH/E= cloud.google.com/go/recommender v1.13.6/go.mod h1:y5/5womtdOaIM3xx+76vbsiA+8EBTIVfWnxHDFHBGJM= +cloud.google.com/go/redis v1.18.2 h1:JlHLceAOILEmbn+NIS7l+vmUKkFuobLToCWTxL7NGcQ= cloud.google.com/go/redis v1.18.2/go.mod h1:q6mPRhLiR2uLf584Lcl4tsiRn0xiFlu6fnJLwCORMtY= cloud.google.com/go/redis v1.18.3/go.mod h1:x8HtXZbvMBDNT6hMHaQ022Pos5d7SP7YsUH8fCJ2Wm4= +cloud.google.com/go/resourcemanager v1.10.6 h1:LIa8kKE8HF71zm976oHMqpWFiaDHVw/H1YMO71lrGmo= cloud.google.com/go/resourcemanager v1.10.6/go.mod h1:VqMoDQ03W4yZmxzLPrB+RuAoVkHDS5tFUUQUhOtnRTg= cloud.google.com/go/resourcemanager v1.10.7/go.mod h1:rScGkr6j2eFwxAjctvOP/8sqnEpDbQ9r5CKwKfomqjs= +cloud.google.com/go/resourcesettings v1.8.3 h1:13HOFU7v4cEvIHXSAQbinF4wp2Baybbq7q9FMctg1Ek= cloud.google.com/go/resourcesettings v1.8.3/go.mod h1:BzgfXFHIWOOmHe6ZV9+r3OWfpHJgnqXy8jqwx4zTMLw= +cloud.google.com/go/retail v1.21.0 h1:8jgWgtAg1mk91WmaoWRTlL9CcvazPwqZ3YT9n6Gva9U= cloud.google.com/go/retail v1.21.0/go.mod h1:LuG+QvBdLfKfO+7nnF3eA3l1j4TQw3Sg+UqlUorquRc= cloud.google.com/go/retail v1.25.1/go.mod h1:J75G8pd+DH0SHueL9IJw7Y5d2VhTsjFsk+F1t9f8jXc= +cloud.google.com/go/run v1.10.0 h1:CDhz0PPzI/cVpmNFyHe3Yp21jNpiAqtkfRxuoLi+JU0= cloud.google.com/go/run v1.10.0/go.mod h1:z7/ZidaHOCjdn5dV0eojRbD+p8RczMk3A7Qi2L+koHg= cloud.google.com/go/run v1.12.1/go.mod h1:DdMsf2m0/n3WHNDcyoqZmfE+LMd/uEJ7j1yIooDrgXU= +cloud.google.com/go/scheduler v1.11.7 h1:zkMEJ0UbEJ3O7NwEUlKLIp6eXYv1L7wHjbxyxznajKM= cloud.google.com/go/scheduler v1.11.7/go.mod h1:gqYs8ndLx2M5D0oMJh48aGS630YYvC432tHCnVWN13s= cloud.google.com/go/scheduler v1.11.8/go.mod h1:bNKU7/f04eoM6iKQpwVLvFNBgGyJNS87RiFN73mIPik= +cloud.google.com/go/secretmanager v1.14.7 h1:VkscIRzj7GcmZyO4z9y1EH7Xf81PcoiAo7MtlD+0O80= cloud.google.com/go/secretmanager v1.14.7/go.mod h1:uRuB4F6NTFbg0vLQ6HsT7PSsfbY7FqHbtJP1J94qxGc= cloud.google.com/go/secretmanager v1.16.0/go.mod h1://C/e4I8D26SDTz1f3TQcddhcmiC3rMEl0S1Cakvs3Q= +cloud.google.com/go/security v1.18.5 h1:6hqzvuwC8za9jyCTxygmEHnp4vZ8hfhwKVArxSCAVCo= cloud.google.com/go/security v1.18.5/go.mod h1:D1wuUkDwGqTKD0Nv7d4Fn2Dc53POJSmO4tlg1K1iS7s= cloud.google.com/go/security v1.19.2/go.mod h1:KXmf64mnOsLVKe8mk/bZpU1Rsvxqc0Ej0A6tgCeN93w= +cloud.google.com/go/securitycenter v1.36.2 h1:hLA58IBYmWrNiXDIONvuCUQ4sHLVPy8JvDo2j1wSYCw= cloud.google.com/go/securitycenter v1.36.2/go.mod h1:80ocoXS4SNWxmpqeEPhttYrmlQzCPVGaPzL3wVcoJvE= cloud.google.com/go/securitycenter v1.38.1/go.mod h1:Ge2D/SlG2lP1FrQD7wXHy8qyeloRenvKXeB4e7zO6z0= +cloud.google.com/go/servicedirectory v1.12.6 h1:pl/KUNvFzlXpxgnPgzQjyTQQcv5WsQ97zCHaPrLQlYA= cloud.google.com/go/servicedirectory v1.12.6/go.mod h1:OojC1KhOMDYC45oyTn3Mup08FY/S0Kj7I58dxUMMTpg= cloud.google.com/go/servicedirectory v1.12.7/go.mod h1:gOtN+qbuCMH6tj2dqlDY3qQL7w3V0+nkWaZElnJK8Ps= +cloud.google.com/go/shell v1.8.6 h1:jLWyztGlNWBx55QXBM4HbWvfv7aiRjPzRKTUkZA8dXk= cloud.google.com/go/shell v1.8.6/go.mod h1:GNbTWf1QA/eEtYa+kWSr+ef/XTCDkUzRpV3JPw0LqSk= cloud.google.com/go/shell v1.8.7/go.mod h1:OTke7qc3laNEW5Jr5OV9VR3IwU5x5VqGOE6705zFex4= +cloud.google.com/go/spanner v1.82.0 h1:w9uO8RqEoBooBLX4nqV1RtgudyU2ZX780KTLRgeVg60= cloud.google.com/go/spanner v1.82.0/go.mod h1:BzybQHFQ/NqGxvE/M+/iU29xgutJf7Q85/4U9RWMto0= cloud.google.com/go/spanner v1.86.1/go.mod h1:bbwCXbM+zljwSPLZ44wZOdzcdmy89hbUGmM/r9sD0ws= +cloud.google.com/go/speech v1.27.1 h1:+OktATNlQc+4WH78OrQadIP4CzXb9mBucdDGCO1NrlI= cloud.google.com/go/speech v1.27.1/go.mod h1:efCfklHFL4Flxcdt9gpEMEJh9MupaBzw3QiSOVeJ6ck= cloud.google.com/go/speech v1.28.1/go.mod h1:+EN8Zuy6y2BKe9P1RAmMaFPAgBns6m+XMgXAfkYtSSE= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= @@ -294,53 +395,78 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9 cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= cloud.google.com/go/storage v1.49.0 h1:zenOPBOWHCnojRd9aJZAyQXBYqkJkdQS42dxL55CIMw= cloud.google.com/go/storage v1.49.0/go.mod h1:k1eHhhpLvrPjVGfo0mOUPEJ4Y2+a/Hv5PiwehZI9qGU= +cloud.google.com/go/storage v1.53.0 h1:gg0ERZwL17pJ+Cz3cD2qS60w1WMDnwcm5YPAIQBHUAw= cloud.google.com/go/storage v1.53.0/go.mod h1:7/eO2a/srr9ImZW9k5uufcNahT2+fPb8w5it1i5boaA= +cloud.google.com/go/storagetransfer v1.13.0 h1:uqKX3OgcYzR1W1YI943ZZ45id0RqA2eXXoCBSPstlbw= cloud.google.com/go/storagetransfer v1.13.0/go.mod h1:+aov7guRxXBYgR3WCqedkyibbTICdQOiXOdpPcJCKl8= cloud.google.com/go/storagetransfer v1.13.1/go.mod h1:S858w5l383ffkdqAqrAA+BC7KlhCqeNieK3sFf5Bj4Y= +cloud.google.com/go/talent v1.8.3 h1:wDP+++O/P1cTJBMkYlSY46k0a6atSoyO+UkBGuU9+Ao= cloud.google.com/go/talent v1.8.3/go.mod h1:oD3/BilJpJX8/ad8ZUAxlXHCslTg2YBbafFH3ciZSLQ= cloud.google.com/go/talent v1.8.4/go.mod h1:3yukBXUTVFNyKcJpUExW/k5gqEy8qW6OCNj7WdN0MWo= +cloud.google.com/go/texttospeech v1.13.0 h1:oWWFQp0yFl4EJOr3opDkKH9304wUsZjgPjrTDS6S1a8= cloud.google.com/go/texttospeech v1.13.0/go.mod h1:g/tW/m0VJnulGncDrAoad6WdELMTes8eb77Idz+4HCo= cloud.google.com/go/texttospeech v1.16.0/go.mod h1:AeSkoH3ziPvapsuyI07TWY4oGxluAjntX+pF4PJ2jy0= +cloud.google.com/go/tpu v1.8.3 h1:S4Ptq+yFIPNLEzQ/OQwiIYDNzk5I2vYmhf0SmFQOmWo= cloud.google.com/go/tpu v1.8.3/go.mod h1:Do6Gq+/Jx6Xs3LcY2WhHyGwKDKVw++9jIJp+X+0rxRE= cloud.google.com/go/tpu v1.8.4/go.mod h1:ul0cyWSHr6jHGZYElZe6HvQn35VY93RAlwpDiSBRnPA= +cloud.google.com/go/trace v1.11.6 h1:2O2zjPzqPYAHrn3OKl029qlqG6W8ZdYaOWRyr8NgMT4= cloud.google.com/go/trace v1.11.6/go.mod h1:GA855OeDEBiBMzcckLPE2kDunIpC72N+Pq8WFieFjnI= cloud.google.com/go/trace v1.11.7/go.mod h1:TNn9d5V3fQVf6s4SCveVMIBS2LJUqo73GACmq/Tky0s= cloud.google.com/go/translate v1.10.3/go.mod h1:GW0vC1qvPtd3pgtypCv4k4U8B7EdgK9/QEF2aJEUovs= +cloud.google.com/go/translate v1.12.5 h1:QPMNi4WCtHwc2PPfxbyUMwdN/0+cyCGLaKi2tig41J8= cloud.google.com/go/translate v1.12.5/go.mod h1:o/v+QG/bdtBV1d1edmtau0PwTfActvxPk/gtqdSDBi4= cloud.google.com/go/translate v1.12.7/go.mod h1:wwJp14NZyWvcrFANhIXutXj0pOBkYciBHwSlUOykcjI= +cloud.google.com/go/video v1.24.0 h1:KTB2BEXjGm2K/JcKxQXEgx3nSoMTByepnPZa4kln064= cloud.google.com/go/video v1.24.0/go.mod h1:h6Bw4yUbGNEa9dH4qMtUMnj6cEf+OyOv/f2tb70G6Fk= cloud.google.com/go/video v1.27.1/go.mod h1:xzfAC77B4vtnbi/TT3UUxEjCa/+Ehy5EA8w470ytOig= +cloud.google.com/go/videointelligence v1.12.6 h1:heq7jEO39sH5TycBh8TGFJ827XCxK0tIWatmBY/n0jI= cloud.google.com/go/videointelligence v1.12.6/go.mod h1:/l34WMndN5/bt04lHodxiYchLVuWPQjCU6SaiTswrIw= cloud.google.com/go/videointelligence v1.12.7/go.mod h1:XAk5hCMY+GihxJ55jNoMdwdXSNZnCl3wGs2+94gK7MA= +cloud.google.com/go/vision/v2 v2.9.5 h1:UJZ0H6UlOaYKgCn6lWG2iMAOJIsJZLnseEfzBR8yIqQ= cloud.google.com/go/vision/v2 v2.9.5/go.mod h1:1SiNZPpypqZDbOzU052ZYRiyKjwOcyqgGgqQCI/nlx8= cloud.google.com/go/vision/v2 v2.9.6/go.mod h1:lJC+vP15D5znJvHQYjEoTKnpToX1L93BUlvBmzM0gyg= +cloud.google.com/go/vmmigration v1.8.6 h1:68hOQDhs1DOITrCrhritrwr8xy6s8QMdwDyMzMiFleU= cloud.google.com/go/vmmigration v1.8.6/go.mod h1:uZ6/KXmekwK3JmC8PzBM/cKQmq404TTfWtThF6bbf0U= cloud.google.com/go/vmmigration v1.9.1/go.mod h1:jI3lBlhQn9+BKIWE/MmMsOzGekCXCc34b1M0CihL3zY= +cloud.google.com/go/vmwareengine v1.3.5 h1:OsGd1SB91y9fDuzdzFngMv4UcT4cqmRxjsCsS4Xmcu8= cloud.google.com/go/vmwareengine v1.3.5/go.mod h1:QuVu2/b/eo8zcIkxBYY5QSwiyEcAy6dInI7N+keI+Jg= cloud.google.com/go/vmwareengine v1.3.6/go.mod h1:ps0rb+Skgpt9ppHYC0o5DqtJ5ld2FyS8sAqtbHH8t9s= +cloud.google.com/go/vpcaccess v1.8.6 h1:RYtUB9rQEijX9Tc6lQcGst58ZOzPgaYTkz6+2pyPQTM= cloud.google.com/go/vpcaccess v1.8.6/go.mod h1:61yymNplV1hAbo8+kBOFO7Vs+4ZHYI244rSFgmsHC6E= cloud.google.com/go/vpcaccess v1.8.7/go.mod h1:9RYw5bVvk4Z51Rc8vwXT63yjEiMD/l7XyEaDyrNHgmk= +cloud.google.com/go/webrisk v1.11.1 h1:yZKNB7zRxOMriLrhP5WDE+BjxXVl0wJHHZSdaYzbdVU= cloud.google.com/go/webrisk v1.11.1/go.mod h1:+9SaepGg2lcp1p0pXuHyz3R2Yi2fHKKb4c1Q9y0qbtA= cloud.google.com/go/webrisk v1.11.2/go.mod h1:yH44GeXz5iz4HFsIlGeoVvnjwnmfbni7Lwj1SelV4f0= +cloud.google.com/go/websecurityscanner v1.7.6 h1:cIPKJKZA3l7D8DfL4nxce8HGOWXBw3WAUBF0ymOW9GQ= cloud.google.com/go/websecurityscanner v1.7.6/go.mod h1:ucaaTO5JESFn5f2pjdX01wGbQ8D6h79KHrmO2uGZeiY= cloud.google.com/go/websecurityscanner v1.7.7/go.mod h1:ng/PzARaus3Bj4Os4LpUnyYHsbtJky1HbBDmz148v1o= +cloud.google.com/go/workflows v1.14.2 h1:phBz5TOAES0YGogxZ6Q7ISSudaf618lRhE3euzBpE9U= cloud.google.com/go/workflows v1.14.2/go.mod h1:5nqKjMD+MsJs41sJhdVrETgvD5cOK3hUcAs8ygqYvXQ= cloud.google.com/go/workflows v1.14.3/go.mod h1:CC9+YdVI2Kvp0L58WajHpEfKJxhrtRh3uQ0SYWcmAk4= +codeberg.org/go-fonts/liberation v0.5.0 h1:SsKoMO1v1OZmzkG2DY+7ZkCL9U+rrWI09niOLfQ5Bo0= codeberg.org/go-fonts/liberation v0.5.0/go.mod h1:zS/2e1354/mJ4pGzIIaEtm/59VFCFnYC7YV6YdGl5GU= +codeberg.org/go-latex/latex v0.1.0 h1:hoGO86rIbWVyjtlDLzCqZPjNykpWQ9YuTZqAzPcfL3c= codeberg.org/go-latex/latex v0.1.0/go.mod h1:LA0q/AyWIYrqVd+A9Upkgsb+IqPcmSTKc9Dny04MHMw= +codeberg.org/go-pdf/fpdf v0.10.0 h1:u+w669foDDx5Ds43mpiiayp40Ov6sZalgcPMDBcZRd4= codeberg.org/go-pdf/fpdf v0.10.0/go.mod h1:Y0DGRAdZ0OmnZPvjbMp/1bYxmIPxm0ws4tfoPOc4LjU= +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9 h1:VpgP7xuJadIUuKccphEpTJnWhS2jkQyMt6Y7pJCD7fY= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +git.sr.ht/~sbinet/gg v0.6.0 h1:RIzgkizAk+9r7uPzf/VfbJHBMKUr0F5hRFxTUGMnt38= git.sr.ht/~sbinet/gg v0.6.0/go.mod h1:uucygbfC9wVPQIfrmwM2et0imr8L7KQWywX0xpFMm94= github.com/99designs/gqlgen v0.17.44/go.mod h1:UTCu3xpK2mLI5qcMNw+HKDiEL77it/1XtAjisC4sLwM= +github.com/AssemblyAI/assemblyai-go-sdk v1.3.0 h1:AtOVgGxUycvK4P4ypP+1ZupecvFgnfH+Jsum0o5ILoU= github.com/AssemblyAI/assemblyai-go-sdk v1.3.0/go.mod h1:H0naZbvpIW49cDA5ZZ/gggeXqi7ojSGB1mqshRk6kNE= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg= github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802 h1:1BDTz0u9nC3//pOCMdNH+CiXJVYJh5UQNCOBG7jbELc= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Code-Hex/go-generics-cache v1.3.1 h1:i8rLwyhoyhaerr7JpjtYjJZUcCbWOdiYO3fZXLiEC4g= github.com/Code-Hex/go-generics-cache v1.3.1/go.mod h1:qxcC9kRVrct9rHeiYpFWSoW1vxyillCVzX13KZG8dl4= github.com/Crocmagnon/fatcontext v0.7.2 h1:BY5/dUhs2kuD3sDn7vZrgOneRib5EHk9GOiyK8Vg+14= +github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.2 h1:DBjmt6/otSdULyJdVg2BlG0qGZO5tKL4VzOs0jpvw5Q= github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.2/go.mod h1:dppbR7CwXD4pgtV9t3wD1812RaLDcBjtblcDF5f1vI0= github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2 h1:cZpsGsWTIFKymTA0je7IIvi1O7Es7apb9CF3EQlOcfE= github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2/go.mod h1:itPGVDKf9cC/ov4MdvJ2QZ0khw4bfoo9jzwTJlaxy2k= @@ -352,11 +478,15 @@ github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0/go.mod h1:Cz6ft6Dkn3Et6l2v2a9/RpN7epQ1GtDlO6lj8bEcOvw= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.48.1 h1:UQ0AhxogsIRZDkElkblfnwjc3IaltCm2HUMvezQaL7s= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.48.1/go.mod h1:jyqM3eLpJ3IbIFDTKVz2rF9T/xWGW0rIriGwnz8l9Tk= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0 h1:fYE9p3esPxA/C0rQ0AHhP0drtPXDRhaWiwg1DPqO7IU= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0/go.mod h1:BnBReJLvVYx2CS/UHOgVz2BXKXD9wsQPxZug20nZhd0= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.51.0 h1:OqVGm6Ei3x5+yZmSJG1Mh2NwHvpVmZ08CB5qJhT9Nuk= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.51.0/go.mod h1:SZiPHWGOOk3bl8tkevxkoiwPgsIl6CwrWcbwjfHZpdM= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1 h1:8nn+rsCvTq9axyEh382S0PFLBeaFwNsT43IrPWzctRU= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1/go.mod h1:viRWSEhtMZqz1rhwmOVKkWl6SwmVowfL9O2YR5gI2PE= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0 h1:6/0iUd0xrnX7qt+mLNRwg5c0PGv8wpE8K90ryANQwMI= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0/go.mod h1:otE2jQekW/PqXk1Awf5lmfokJx4uwuqcj1ab5SpGeW0= +github.com/IBM/watsonx-go v1.0.0 h1:xG7xA2W9N0RsiztR26dwBI8/VxIX4wTBhdYmEis2Yl8= github.com/IBM/watsonx-go v1.0.0/go.mod h1:8lzvpe/158JkrzvcoIcIj6OdNty5iC9co5nQHfkhRtM= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= @@ -364,7 +494,9 @@ github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3Q github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA= github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM= +github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM= github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ= +github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b h1:slYM766cy2nI3BwyRiyQj/Ud48djTMtMebDqepE95rw= github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM= github.com/alecthomas/kingpin/v2 v2.4.0 h1:f48lwail6p8zpO1bC4TxtqACaGqHYA22qkHjHpqDjYY= github.com/alecthomas/kingpin/v2 v2.4.0/go.mod h1:0gyi0zQnjuFk8xrkNKamJoyUo382HRL7ATRpFZCw6tE= @@ -375,45 +507,79 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d h1:UQZhZ2O0vMHr2cI+DC1Mbh0TJxzA3RcLoMsFw+aXw7E= github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc= github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= +github.com/amikos-tech/chroma-go v0.1.4 h1:MQXFBuKHOuZtlLOF6fLRb1VdXKKWp6TwdWxm6v/RUII= github.com/amikos-tech/chroma-go v0.1.4/go.mod h1:sT6uXOo/L5S/Q0v9jpYtoR1iOM68hUE2itWw8sOwLHY= +github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss= github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU= +github.com/antchfx/htmlquery v1.3.0 h1:5I5yNFOVI+egyia5F2s/5Do2nFWxJz41Tr3DyfKD25E= github.com/antchfx/htmlquery v1.3.0/go.mod h1:zKPDVTMhfOmcwxheXUsx4rKJy8KEY/PU6eXr/2SebQ8= +github.com/antchfx/xmlquery v1.3.17 h1:d0qWjPp/D+vtRw7ivCwT5ApH/3CkQU8JOeo3245PpTk= github.com/antchfx/xmlquery v1.3.17/go.mod h1:Afkq4JIeXut75taLSuI31ISJ/zeq+3jG7TunF7noreA= +github.com/antchfx/xpath v1.2.4 h1:dW1HB/JxKvGtJ9WyVGJ0sIoEcqftV3SqIstujI+B9XY= github.com/antchfx/xpath v1.2.4/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= +github.com/apache/arrow/go/v15 v15.0.2 h1:60IliRbiyTWCWjERBCkO1W4Qun9svcYoZrSLcyOsMLE= github.com/apache/arrow/go/v15 v15.0.2/go.mod h1:DGXsR3ajT524njufqf95822i+KTh+yea1jass9YXgjA= +github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ= github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP2+08jFMw88y4klk= github.com/armon/go-metrics v0.3.10 h1:FR+drcQStOe+32sYyJYyZ7FIdgoGGBnwLl+flodp8Uo= github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/aws/aws-sdk-go-v2 v1.36.3 h1:mJoei2CxPutQVxaATCzDUjcZEjVRdpsiiXi2o38yqWM= github.com/aws/aws-sdk-go-v2 v1.36.3/go.mod h1:LLXuLpgzEbD766Z5ECcRmi8AzSwfZItDtmABVkRLGzg= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10 h1:zAybnyUQXIZ5mok5Jqwlf58/TFE7uvd3IAsa1aF9cXs= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10/go.mod h1:qqvMj6gHLR/EXWZw4ZbqlPbQUyenf4h82UQUlKc+l14= +github.com/aws/aws-sdk-go-v2/config v1.29.4 h1:ObNqKsDYFGr2WxnoXKOhCvTlf3HhwtoGgc+KmZ4H5yg= github.com/aws/aws-sdk-go-v2/config v1.29.4/go.mod h1:j2/AF7j/qxVmsNIChw1tWfsVKOayJoGRDjg1Tgq7NPk= +github.com/aws/aws-sdk-go-v2/credentials v1.17.57 h1:kFQDsbdBAR3GZsB8xA+51ptEnq9TIj3tS4MuP5b+TcQ= github.com/aws/aws-sdk-go-v2/credentials v1.17.57/go.mod h1:2kerxPUUbTagAr/kkaHiqvj/bcYHzi2qiJS/ZinllU0= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.27 h1:7lOW8NUwE9UZekS1DYoiPdVAqZ6A+LheHWb+mHbNOq8= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.27/go.mod h1:w1BASFIPOPUae7AgaH4SbjNbfdkxuggLyGfNFTn8ITY= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 h1:ZK5jHhnrioRkUNOc+hOgQKlUL5JeC3S6JgLxtQ+Rm0Q= github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34/go.mod h1:p4VfIceZokChbA9FzMbRGz5OV+lekcVtHlPKEO0gSZY= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 h1:SZwFm17ZUNNg5Np0ioo/gq8Mn6u9w19Mri8DnJ15Jf0= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34/go.mod h1:dFZsC0BLo346mvKQLWmoJxT+Sjp+qcVR1tRVHQGOH9Q= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.2 h1:Pg9URiobXy85kgFev3og2CuOZ8JZUBENF+dcgWBaYNk= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.2/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.34 h1:ZNTqv4nIdE/DiBfUUfXcLZ/Spcuz+RjeziUtNJackkM= github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.34/go.mod h1:zf7Vcd1ViW7cPqYWEHLHJkS50X0JS2IKz9Cgaj6ugrs= +github.com/aws/aws-sdk-go-v2/service/bedrockagent v1.40.0 h1:MhnbAEHLZZQ4bQ7OJT/HbH5IYpo9UGogsdl/GfabG9E= github.com/aws/aws-sdk-go-v2/service/bedrockagent v1.40.0/go.mod h1:WlMBqEPeaBywfaXoMAfpitHvwezq555o8waYL3cCPqo= +github.com/aws/aws-sdk-go-v2/service/bedrockagentruntime v1.41.0 h1:Q2U7RCZKbWf6B+i8PCvG+LsgY+ANQvi2NueuLGfUMdw= github.com/aws/aws-sdk-go-v2/service/bedrockagentruntime v1.41.0/go.mod h1:Kek1IWlEDT1bp8kO+soWZh37Cb13LppHUTbMiJunna0= +github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.24.3 h1:GXQrb3kyg4EU94onCRH/oG2IsVjHMNE+IPE4RGkgSa4= github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.24.3/go.mod h1:PKGlRhLmSZuA6iCbRD1oZKrTJHdm6NWwWBvHxfDNHTA= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 h1:eAh2A4b5IzM/lum78bZ590jy36+d/aFLgKF/4Vd1xPE= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3/go.mod h1:0yKJC/kb8sAnmlYa6Zs3QVYqaC8ug2AbnNChv5Ox3uA= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.0 h1:lguz0bmOoGzozP9XfRJR1QIayEYo+2vP/No3OfLF0pU= github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.0/go.mod h1:iu6FSzgt+M2/x3Dk8zhycdIcHjEFb36IS8HVUVFoMg0= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 h1:dM9/92u2F1JbDaGooxTq18wmmFzbJRfXfVfy96/1CXM= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15/go.mod h1:SwFBy2vjtA0vZbjjaFtfN045boopadnoVPhu4Fv66vY= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.15 h1:moLQUoVq91LiqT1nbvzDukyqAlCv89ZmwaHw/ZFlFZg= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.15/go.mod h1:ZH34PJUc8ApjBIfgQCFvkWcUDBtl/WTD+uiYHjd8igA= +github.com/aws/aws-sdk-go-v2/service/s3 v1.78.2 h1:jIiopHEV22b4yQP2q36Y0OmwLbsxNWdWwfZRR5QRRO4= github.com/aws/aws-sdk-go-v2/service/s3 v1.78.2/go.mod h1:U5SNqwhXB3Xe6F47kXvWihPl/ilGaEDe8HD/50Z9wxc= +github.com/aws/aws-sdk-go-v2/service/sso v1.24.14 h1:c5WJ3iHz7rLIgArznb3JCSQT3uUMiz9DLZhIX+1G8ok= github.com/aws/aws-sdk-go-v2/service/sso v1.24.14/go.mod h1:+JJQTxB6N4niArC14YNtxcQtwEqzS3o9Z32n7q33Rfs= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.13 h1:f1L/JtUkVODD+k1+IiSJUUv8A++2qVr+Xvb3xWXETMU= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.13/go.mod h1:tvqlFoja8/s0o+UruA1Nrezo/df0PzdunMDDurUfg6U= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.12 h1:fqg6c1KVrc3SYWma/egWue5rKI4G2+M4wMQN2JosNAA= github.com/aws/aws-sdk-go-v2/service/sts v1.33.12/go.mod h1:7Yn+p66q/jt38qMoVfNvjbm3D89mGBnkwDcijgtih8w= +github.com/aws/smithy-go v1.22.2 h1:6D9hW43xKFrRx/tXXfAlIZc4JI+yQe6snnWcQyxSyLQ= github.com/aws/smithy-go v1.22.2/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= github.com/aymanbagabas/go-udiff v0.2.0 h1:TK0fH4MteXUDspT88n8CKzvK0X9O2xu9yQjWpi6yML8= github.com/aymanbagabas/go-udiff v0.2.0/go.mod h1:RE4Ex0qsGkTAJoQdQQCA0uG+nAzJO/pI/QwceO5fgrA= +github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= github.com/bazelbuild/rules_go v0.49.0/go.mod h1:Dhcz716Kqg1RHNWos+N6MlXNkjNP2EwZQ0LukRKJfMs= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= +github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM= github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= +github.com/campoy/embedmd v1.0.0 h1:V4kI2qTJJLf4J29RzI/MAt2c3Bl4dQSYPuflzwFH2hY= github.com/campoy/embedmd v1.0.0/go.mod h1:oxyr9RCiSXg0M3VJ3ks0UGfp98BpSSGr0kpiX3MzVl8= +github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= @@ -436,6 +602,7 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1 h1:q763qf9huN11kDQavWs github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04= github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= +github.com/cilium/ebpf v0.11.0 h1:V8gS/bTCCjX9uUnkUFUpPsksM8n1lXBAvHcpiFk1X2Y= github.com/cilium/ebpf v0.11.0/go.mod h1:WE7CZAnqOL2RouJ4f1uyNhqr2P4CCvXFIqdRDUgWsVs= github.com/client9/misspell v0.3.4 h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= @@ -443,6 +610,7 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f h1:WBZRG4aNOuI15bLRrC github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe h1:QQ3GSy+MqSHxm/d8nCtnAiZdYFd45cYZPs8vOOIYKfk= github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20240423153145-555b57ec207b/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/cncf/xds/go v0.0.0-20240905190251-b4127c9b8d78 h1:QVw89YDxXxEe+l8gU8ETbOasdwEV+avkR75ZzsVV9WI= @@ -454,20 +622,31 @@ github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73l github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls= github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/cockroachdb/errors v1.9.1 h1:yFVvsI0VxmRShfawbt/laCIDy/mtTqqnvoNgiy5bEV8= github.com/cockroachdb/errors v1.9.1/go.mod h1:2sxOtL2WIc096WSZqZ5h8fa17rdDq9HZOZLBCor4mBk= +github.com/cockroachdb/logtags v0.0.0-20211118104740-dabe8e521a4f h1:6jduT9Hfc0njg5jJ1DdKCFPdMBrp/mdZfCpa5h+WM74= github.com/cockroachdb/logtags v0.0.0-20211118104740-dabe8e521a4f/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= +github.com/cockroachdb/redact v1.1.3 h1:AKZds10rFSIj7qADf0g46UixK8NNLwWTNdCIGS5wfSQ= github.com/cockroachdb/redact v1.1.3/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= +github.com/cohere-ai/tokenizer v1.1.2 h1:t3KwUBSpKiBVFtpnHBfVIQNmjfZUuqFVYuSFkZYOWpU= github.com/cohere-ai/tokenizer v1.1.2/go.mod h1:9MNFPd9j1fuiEK3ua2HSCUxxcrfGMlSqpa93livg/C0= +github.com/containerd/cgroups/v3 v3.0.3 h1:S5ByHZ/h9PMe5IOQoN7E+nMc2UcLEM/V48DGDJ9kip0= github.com/containerd/cgroups/v3 v3.0.3/go.mod h1:8HBe7V3aWGLFPd/k03swSIsGjZhHI2WzJmticMgVuz0= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2 h1:D9/bQk5vlXQFZ6Kwuu6zaiXJ9oTPe68++AzAJc1DzSI= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/coreos/go-systemd/v22 v22.5.0 h1:RrqgGjYQKalulkV8NGVIfkXQf6YYmOyiJKk8iXXhfZs= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4= github.com/cpuguy83/go-md2man/v2 v2.0.6 h1:XJtiaUW6dEEqVuZiMTn1ldk455QWwEIsMIJlo5vtkx0= @@ -481,11 +660,16 @@ github.com/cznic/sortutil v0.0.0-20181122101858-f5f958428db8 h1:LpMLYGyy67BoAFGd github.com/cznic/sortutil v0.0.0-20181122101858-f5f958428db8/go.mod h1:q2w6Bg5jeox1B+QkJ6Wp/+Vn0G/bo3f1uY7Fn3vivIQ= github.com/cznic/strutil v0.0.0-20181122101858-275e90344537 h1:MZRmHqDBd0vxNwenEbKSQqRVT24d3C05ft8kduSwlqM= github.com/cznic/strutil v0.0.0-20181122101858-275e90344537/go.mod h1:AHHPPPXTw0h6pVabbcbyGRK1DckRn7r/STdZEeIDzZc= +github.com/deepmap/oapi-codegen/v2 v2.1.0 h1:I/NMVhJCtuvL9x+S2QzZKpSjGi33oDZwPRdemvOZWyQ= github.com/deepmap/oapi-codegen/v2 v2.1.0/go.mod h1:R1wL226vc5VmCNJUvMyYr3hJMm5reyv25j952zAVXZ8= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/docker/docker v28.2.2+incompatible h1:CjwRSksz8Yo4+RmQ339Dp/D2tGO5JxwYeqtMOEe0LDw= github.com/docker/docker v28.2.2+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/ebitengine/purego v0.8.2 h1:jPPGWs2sZ1UgOSgD2bClL0MJIqu58nOmIcBuXr62z1I= github.com/ebitengine/purego v0.8.2/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= @@ -511,9 +695,13 @@ github.com/envoyproxy/protoc-gen-validate v1.1.0 h1:tntQDh69XqOCOZsDz0lVJQez/2L6 github.com/envoyproxy/protoc-gen-validate v1.1.0/go.mod h1:sXRDRVmzEbkM7CVcM06s9shE/m23dg3wzjl0UWqJ2q4= github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8= github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= +github.com/form3tech-oss/jwt-go v3.2.3+incompatible h1:7ZaBxOI7TMoYBfyA3cQHErNNyAWIKUMIwqxEtgHOs5c= github.com/form3tech-oss/jwt-go v3.2.3+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= +github.com/gage-technologies/mistral-go v1.1.0 h1:POv1wM9jA/9OBXGV2YdPi9Y/h09+MjCbUF+9hRYlVUI= github.com/gage-technologies/mistral-go v1.1.0/go.mod h1:tF++Xt7U975GcLlzhrjSQb8l/x+PrriO9QEdsgm9l28= +github.com/getsentry/sentry-go v0.30.0 h1:lWUwDnY7sKHaVIoZ9wYqRHJ5iEmoc0pqcRqFkosKzBo= github.com/getsentry/sentry-go v0.30.0/go.mod h1:WU9B9/1/sHDqeV8T+3VwwbjeR5MSXs/6aqG3mqZrezA= +github.com/getzep/zep-go v1.0.4 h1:09o26bPP2RAPKFjWuVWwUWLbtFDF/S8bfbilxzeZAAg= github.com/getzep/zep-go v1.0.4/go.mod h1:HC1Gz7oiyrzOTvzeKC4dQKUiUy87zpIJl0ZFXXdHuss= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1 h1:QbL/5oDUmRBzO9/Z7Seo6zf912W/a6Sr4Eu0G/3Jho0= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= @@ -532,27 +720,42 @@ github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ4 github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= +github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU= github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo= +github.com/go-openapi/errors v0.22.0 h1:c4xY/OLxUBSTiepAg3j/MHuAv5mJhnf53LLMWFB+u/w= github.com/go-openapi/errors v0.22.0/go.mod h1:J3DmZScxCDufmIMsdOuDHxJbdOGC0xtUynjIx092vXE= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= +github.com/go-openapi/loads v0.22.0 h1:ECPGd4jX1U6NApCGG1We+uEozOAvXvJSF4nnwHZ8Aco= github.com/go-openapi/loads v0.22.0/go.mod h1:yLsaTCS92mnSAZX5WWoxszLj0u+Ojl+Zs5Stn1oF+rs= +github.com/go-openapi/runtime v0.24.2 h1:yX9HMGQbz32M87ECaAhGpJjBmErO3QLcgdZj9BzGx7c= github.com/go-openapi/runtime v0.24.2/go.mod h1:AKurw9fNre+h3ELZfk6ILsfvPN+bvvlaU/M9q/r9hpk= +github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY= github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= +github.com/go-openapi/strfmt v0.23.0 h1:nlUS6BCqcnAk0pyhi9Y+kdDVZdZMHfEKQiS4HaMgO/c= github.com/go-openapi/strfmt v0.23.0/go.mod h1:NrtIpfKtWIygRkKVsxh7XQMDQW5HKQl6S5ik2elW+K4= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= +github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58= github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ= github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= +github.com/goccmack/gocc v0.0.0-20230228185258-2292f9e40198 h1:FSii2UQeSLngl3jFoR4tUKZLprO7qUlh/TKKticc0BM= github.com/goccmack/gocc v0.0.0-20230228185258-2292f9e40198/go.mod h1:DTh/Y2+NbnOVVoypCCQrovMPDKUGp4yZpSbWg5D0XIM= +github.com/gocolly/colly v1.2.0 h1:qRz9YAn8FIH0qzgNUw+HT9UN7wm1oF9OBAilwEWpyrI= github.com/gocolly/colly v1.2.0/go.mod h1:Hof5T3ZswNVsOHYmba1u03W65HDWgpV5HifSuueE0EA= +github.com/godbus/dbus/v5 v5.0.4 h1:9349emZab16e7zQvpmsbtjc18ykshndd8y2PG3sgJbA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gogo/protobuf v1.1.1 h1:72R+M5VuhED/KujmZVcIquuo8mBgX4oVda//DQb3PXo= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.2.1/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= @@ -577,6 +780,7 @@ github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4 h1:l75CXGRSwbaYNpl/Z2X1XIIAMSCquvXgpVZDhwEIJsc= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.7.0-rc.1 h1:YojYx61/OLFsiv6Rw1Z96LpldJIy31o+UHmwAUMJ6/U= github.com/golang/mock v1.7.0-rc.1/go.mod h1:s42URUywIqd+OcERslBJvOjepvNymP31m3q8d/GkuRs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -595,13 +799,16 @@ github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golangci/modinfo v0.3.3 h1:YBQDZpDMJpe5mtd0klUFYL8tSVkmF3cmm0fZ48sc7+s= github.com/golangci/modinfo v0.3.3/go.mod h1:wytF1M5xl9u0ij8YSvhkEVPP3M5Mc7XLl1pxH3B2aUM= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= +github.com/google/flatbuffers v23.5.26+incompatible h1:M9dgRyhJemaM4Sw8+66GHBu8ioaQmyPLg1b8VwK5WJg= github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/generative-ai-go v0.19.0 h1:R71szggh8wHMCUlEMsW2A/3T+5LdEIkiaHSYgSpUgdg= github.com/google/generative-ai-go v0.19.0/go.mod h1:JYolL13VG7j79kM5BtHz4qwONHkeJQzOCkKXnpqtS/E= @@ -617,7 +824,9 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-pkcs11 v0.3.0 h1:PVRnTgtArZ3QQqTGtbtjtnIkzl2iY2kt24yqbrf7td8= github.com/google/go-pkcs11 v0.3.0/go.mod h1:6eQoGcuNJpa7jnd5pMGdkSaQpNDYvPlXWMcjXXThLlY= +github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= @@ -625,6 +834,7 @@ github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXi github.com/google/martian/v3 v3.0.0 h1:pMen7vLs8nvgEYhywH3KDWJIJTeEr2ULsVWHWYHQyBs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= @@ -644,6 +854,7 @@ github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm4 github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/cloud-bigtable-clients-test v0.0.3 h1:afMKTvA/jc6jSTMkeHBZGFDTt8Cc+kb1ATFzqMK85hw= github.com/googleapis/cloud-bigtable-clients-test v0.0.3/go.mod h1:TWtDzrrAI70C3dNLDY+nZN3gxHtFdZIbpL9rCTFyxE0= github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= github.com/googleapis/enterprise-certificate-proxy v0.3.5/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= @@ -657,17 +868,24 @@ github.com/googleapis/gax-go/v2 v2.14.2 h1:eBLnkZ9635krYIPD+ag1USrOAI0Nr0QYF3+/3 github.com/googleapis/gax-go/v2 v2.14.2/go.mod h1:ON64QhlJkhVtSqp4v1uaK92VyZ2gmvDQsweuyLV+8+w= github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo= github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8 h1:tlyzajkF3030q6M8SvmJSemC9DTHL/xaMa18b65+JM4= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/gookit/color v1.5.4 h1:FZmqs7XOyGgCAxmWyPslpiok1k05wmY3SJTytgvYFs0= github.com/gookit/color v1.5.4/go.mod h1:pZJOeOS8DM43rXbp4AZo1n9zCU2qjpcRko0b6/QJi9w= github.com/goph/emperror v0.17.2 h1:yLapQcmEsO0ipe9p5TaN22djm3OFV/TfM/fcYP0/J18= github.com/goph/emperror v0.17.2/go.mod h1:+ZbQ+fUNO/6FNiUo0ujtMjhgad9Xa6fQL9KhH4LNHic= +github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI= github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0 h1:RtRsiaGvWxcwd8y3BiRZxsylPT8hLWZ5SPcfI+3IDNk= github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0/go.mod h1:TzP6duP4Py2pHLVPPQp42aoYI92+PCrVotyR5e8Vqlk= github.com/hashicorp/consul/api v1.12.0 h1:k3y1FYv6nuKyNTqj6w9gXOx5r5CfLj/k/euUeBXj1OY= github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0= @@ -691,6 +909,7 @@ github.com/hashicorp/serf v0.9.7 h1:hkdgbqizGQHuU5IPqYM1JdSMV8nKfpuOnZYXssk9muY= github.com/hashicorp/serf v0.9.7/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4= github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6 h1:UDMh68UUwekSh5iP2OMhRRZJiiBccgV7axzUG8vi56c= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= @@ -714,7 +933,9 @@ github.com/jackc/pgx/v4 v4.18.3 h1:dE2/TrEsGX3RBprb3qryqSV9Y60iZN1C6i8IrmW9/BA= github.com/jackc/pgx/v4 v4.18.3/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= +github.com/jonboulle/clockwork v0.2.2 h1:UOGuzwb1PwsrDAObMuhUnj0p5ULPj8V/xJ7Kx9qUBdQ= github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= @@ -727,9 +948,11 @@ github.com/julienschmidt/httprouter v1.3.0 h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4d github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o= github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak= github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/cpuid/v2 v2.2.9 h1:66ze0taIn2H33fBvCkXuv9BmCwDfafmiIVpKV9kKGuY= github.com/klauspost/cpuid/v2 v2.2.9/go.mod h1:rqkxqrZ1EhYM9G+hXH7YdowN5R5RGN6NK4QwQ3WMXF8= github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8= github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= @@ -740,33 +963,46 @@ github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfn github.com/kr/pty v1.1.1 h1:VkoXIwSboBpnk99O/KFauAEILuNHv5DVFKZMBN/gUgw= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80 h1:6Yzfa6GP0rIo/kULo2bwGEkFvCePZ3qHDDTC3/J9Swo= github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 h1:PpXWgLPs+Fqr325bN2FD2ISlRRztXibcX6e8f5FR5Dc= github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35/go.mod h1:autxFIvghDt3jPTLoqZ9OZ7s9qTGNAWmYCjVFWPX/zg= +github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4 h1:sIXJOMrYnQZJu7OB7ANSF4MYri2fTEGIsRLz6LwI4xE= github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4/go.mod h1:amey7yeodaJhXSbf/TlLvWiqQfLOSpEk//mLlc+axEk= github.com/magefile/mage v1.14.0 h1:6QDX3g6z1YvJ4olPhT1wksUcSa/V0a1B+pJb73fBjyo= github.com/magefile/mage v1.14.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A= github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM= github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/mdelapenya/tlscert v0.2.0 h1:7H81W6Z/4weDvZBNOfQte5GpIMo0lGYEeWbkGp5LJHI= github.com/mdelapenya/tlscert v0.2.0/go.mod h1:O4njj3ELLnJjGdkN7M/vIVCpZ+Cf0L6muqOG4tLSl8o= +github.com/metaphorsystems/metaphor-go v0.0.0-20230816231421-43794c04824e h1:4N462rhrxy7KezYYyL3RjJPWlhXiSkfFes0YsMqicd0= github.com/metaphorsystems/metaphor-go v0.0.0-20230816231421-43794c04824e/go.mod h1:mDz8kHE7x6Ja95drCQ2T1vLyPRc/t69Cf3wau91E3QU= github.com/mgechev/dots v0.0.0-20210922191527-e955255bf517 h1:zpIH83+oKzcpryru8ceC6BxnoG8TBrhgAvRg8obzup0= github.com/mgechev/dots v0.0.0-20210922191527-e955255bf517/go.mod h1:KQ7+USdGKfpPjXk4Ga+5XxQM4Lm4e3gAogrreFAYpOg= github.com/mgechev/dots v1.0.0 h1:o+4OJ3OjWzgQHGJXKfJ8rbH4dqDugu5BiEy84nxg0k4= github.com/mgechev/dots v1.0.0/go.mod h1:rykuMydC9t3wfkM+ccYH3U3ss03vZGg6h3hmOznXLH0= +github.com/microcosm-cc/bluemonday v1.0.26 h1:xbqSvqzQMeEHCqMi64VAs4d8uy6Mequs3rQ0k/Khz58= github.com/microcosm-cc/bluemonday v1.0.26/go.mod h1:JyzOCs9gkyQyjs+6h10UEVSe02CGwkhd72Xdqh78TWs= +github.com/milvus-io/milvus-proto/go-api/v2 v2.6.1-0.20250819024338-07695f709619 h1:eVuQvPQS5WgNm7IClUwIO6140OTzFvvPIcX4s9NnmqE= github.com/milvus-io/milvus-proto/go-api/v2 v2.6.1-0.20250819024338-07695f709619/go.mod h1:/6UT4zZl6awVeXLeE7UGDWZvXj3IWkRsh3mqsn0DiAs= +github.com/milvus-io/milvus-sdk-go/v2 v2.4.0 h1:llESmiYiaFqRh0CUrZCLH0IWWkk5r8/vz0tkaA0YzQo= github.com/milvus-io/milvus-sdk-go/v2 v2.4.0/go.mod h1:8IKyxVV+kd+RADMuMpo8GXnTDq5ZxrSSWpe9nJieboQ= +github.com/milvus-io/milvus/client/v2 v2.6.0 h1:TXeht4yOhqlTsJn33sQoZYjmv+w4mEYPmX+8Wp+ifzM= github.com/milvus-io/milvus/client/v2 v2.6.0/go.mod h1:5ppFKT61Fh5Z1MkAhK7+nLnlh9C+ENBe/dpgFBH0te0= +github.com/milvus-io/milvus/pkg/v2 v2.0.0-20250319085209-5a6b4e56d59e h1:VCr43pG4efacDbM4au70fh8/5hNTftoWzm1iEumvDWM= github.com/milvus-io/milvus/pkg/v2 v2.0.0-20250319085209-5a6b4e56d59e/go.mod h1:37AWzxVs2NS4QUJrkcbeLUwi+4Av0h5mEdjLI62EANU= github.com/mitchellh/copystructure v1.0.0 h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= @@ -774,18 +1010,27 @@ github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyua github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ= github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= +github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= +github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ= github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/mozilla/tls-observatory v0.0.0-20210609171429-7bc42856d2e5 h1:0KqC6/sLy7fDpBdybhVkkv4Yz+PmB7c9Dz9z3dLW804= github.com/mozilla/tls-observatory v0.0.0-20210609171429-7bc42856d2e5/go.mod h1:FUqVoUPHSEdDR0MnFM3Dh8AU0pZHLXUD127SAJGER/s= @@ -793,10 +1038,15 @@ github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+ github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/nikolalohinski/gonja v1.5.3 h1:GsA+EEaZDZPGJ8JtpeGN78jidhOlxeJROpqMT9fTj9c= github.com/nikolalohinski/gonja v1.5.3/go.mod h1:RmjwxNiXAEqcq1HeK5SSMmqFJvKOfTfXhkJv6YBtPa4= +github.com/nlpodyssey/cybertron v0.2.1 h1:zBvzmjP6Teq3u8yiHuLoUPxan6ZDRq/32GpV6Ep8X08= github.com/nlpodyssey/cybertron v0.2.1/go.mod h1:Vg9PeB8EkOTAgSKQ68B3hhKUGmB6Vs734dBdCyE4SVM= +github.com/nlpodyssey/gopickle v0.2.0 h1:4naD2DVylYJupQLbCQFdwo6yiXEmPyp+0xf5MVlrBDY= github.com/nlpodyssey/gopickle v0.2.0/go.mod h1:YIUwjJ2O7+vnBsxUN+MHAAI3N+adqEGiw+nDpwW95bY= +github.com/nlpodyssey/gotokenizers v0.2.0 h1:CWx/sp9s35XMO5lT1kNXCshFGDCfPuuWdx/9JiQBsVc= github.com/nlpodyssey/gotokenizers v0.2.0/go.mod h1:SBLbuSQhpni9M7U+Ie6O46TXYN73T2Cuw/4eeYHYJ+s= +github.com/nlpodyssey/spago v1.1.0 h1:DGUdGfeGR7TxwkYRdSEzbSvunVWN5heNSksmERmj97w= github.com/nlpodyssey/spago v1.1.0/go.mod h1:jDWGZwrB4B61U6Tf3/+MVlWOtNsk3EUA7G13UDHlnjQ= +github.com/oapi-codegen/runtime v1.1.1 h1:EXLHh0DXIJnWhdRPN2w4MXAzFyE4CskzhNLUmtpMYro= github.com/oapi-codegen/runtime v1.1.1/go.mod h1:SK9X900oXmPWilYR5/WKPzt3Kqxn/uS/+lbpREv+eCg= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= @@ -808,13 +1058,19 @@ github.com/onsi/gomega v1.36.3 h1:hID7cr8t3Wp26+cYnfcjR6HpJ00fdogN6dqZ1t6IylU= github.com/onsi/gomega v1.36.3/go.mod h1:8D9+Txp43QWKhM24yyOBEdpkzN8FvJyAwecBgsU4KU0= github.com/onsi/gomega v1.37.0 h1:CdEG8g0S133B4OswTDC/5XPSzE1OeP29QOioj2PID2Y= github.com/onsi/gomega v1.37.0/go.mod h1:8D9+Txp43QWKhM24yyOBEdpkzN8FvJyAwecBgsU4KU0= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= +github.com/opencontainers/runtime-spec v1.0.2 h1:UfAcuLBJB9Coz72x1hgl8O5RVzTdNiaglX6v2DM6FI0= github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opensearch-project/opensearch-go v1.1.0 h1:eG5sh3843bbU1itPRjA9QXbxcg8LaZ+DjEzQH9aLN3M= github.com/opensearch-project/opensearch-go v1.1.0/go.mod h1:+6/XHCuTH+fwsMJikZEWsucZ4eZMma3zNSeLrTtVGbo= +github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= github.com/otiai10/curr v1.0.0 h1:TJIWdbX0B+kpNagQrjgq8bCMrbhiuX73M2XwgtDMoOI= github.com/otiai10/mint v1.3.1 h1:BCmzIS3n71sGfHB5NMNDB3lHYPz8fWSkCAErHed//qc= +github.com/panjf2000/ants/v2 v2.11.3 h1:AfI0ngBoXJmYOpDh9m516vjqoUu2sLrIVgppI9TZVpg= github.com/panjf2000/ants/v2 v2.11.3/go.mod h1:8u92CYMUc6gyvTIw8Ru7Mt7+/ESnJahz5EVtqfrilek= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y= @@ -822,17 +1078,21 @@ github.com/pborman/getopt v0.0.0-20170112200414-7148bc3a4c30 h1:BHT1/DKsYDGkUgQ2 github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= +github.com/pgvector/pgvector-go v0.1.1 h1:kqJigGctFnlWvskUiYIvJRNwUtQl/aMSUZVs0YWQe+g= github.com/pgvector/pgvector-go v0.1.1/go.mod h1:wLJgD/ODkdtd2LJK4l6evHXTuG+8PxymYAVomKHOWac= github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d h1:CdDQnGF8Nq9ocOS/xlSptM1N3BbrA6/kmaep5ggwaIA= github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw= github.com/philhofer/fwd v1.1.2 h1:bnDivRJ1EWPjUIRXV5KfORO897HTbpFAQddBdE8t7Gw= github.com/philhofer/fwd v1.1.2/go.mod h1:qkPdfjR2SIEbspLqpe1tO4n5yICnr2DY7mqEx2tUTP0= +github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ= github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pinecone-io/go-pinecone v0.4.1 h1:hRJgtGUIHwvM1NvzKe+YXog4NxYi9x3NdfFhQ2QWBWk= github.com/pinecone-io/go-pinecone v0.4.1/go.mod h1:KwWSueZFx9zccC+thBk13+LDiOgii8cff9bliUI4tQs= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.13.1 h1:I2qBYMChEhIjOgazfJmV3/mZM256btk6wkCDRmW7JYs= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pkg/sftp v1.13.7 h1:uv+I3nNJvlKZIQGSr8JVQLNHFU9YhhNpvC14Y6KgmSM= github.com/pkg/sftp v1.13.7/go.mod h1:KMKI0t3T6hfA+lTR/ssZdunHo+uwq7ghoN09/FSu3DY= @@ -846,18 +1106,23 @@ github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1: github.com/prometheus/client_model v0.6.0/go.mod h1:NTQHnmxFpouOD0DpvP4XujX3CdOAGQPoaGhyTchlyt8= github.com/quasilyte/go-ruleguard/rules v0.0.0-20211022131956-028d6511ab71 h1:CNooiryw5aisadVfzneSZPswRWvnVW8hF1bS/vo8ReI= github.com/quasilyte/go-ruleguard/rules v0.0.0-20211022131956-028d6511ab71/go.mod h1:4cgAphtvu7Ftv7vOT2ZOYhC6CvBxZixcasr8qIOTA50= +github.com/redis/rueidis v1.0.34 h1:cdggTaDDoqLNeoKMoew8NQY3eTc83Kt6XyfXtoCO2Wc= github.com/redis/rueidis v1.0.34/go.mod h1:g8nPmgR4C68N3abFiOc/gUOSEKw3Tom6/teYMehg4RE= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A= github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/sagikazarmark/crypt v0.6.0 h1:REOEXCs/NFY/1jOCEouMuT4zEniE5YoXbvpC5X/TLF8= github.com/sagikazarmark/crypt v0.6.0/go.mod h1:U8+INwJo3nBv1m6A/8OBXAq7Jnpspk5AxSgDyEQcea8= +github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d h1:hrujxIzL1woJ7AwssoOcM/tq5JjjG2yYOc8odClEiXA= github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU= +github.com/samber/lo v1.27.0 h1:GOyDWxsblvqYobqsmUuMddPa2/mMzkKyojlXol4+LaQ= github.com/samber/lo v1.27.0/go.mod h1:it33p9UtPMS7z72fP4gw/EIfQB2eI8ke7GR2wc6+Rhg= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/shirou/gopsutil/v3 v3.24.5 h1:i0t8kL+kQTvpAYToeuiVk3TgDeKOFioZO3Ztz/iZ9pI= github.com/shirou/gopsutil/v3 v3.24.5/go.mod h1:bsoOS1aStSs9ErQ1WWfxllSeS1K5D+U30r2NfcubMVk= github.com/shirou/gopsutil/v4 v4.25.2/go.mod h1:34gBYJzyqCDT11b6bMHP0XCvWeU3J61XRT7a2EmCRTA= github.com/shirou/gopsutil/v4 v4.25.3 h1:SeA68lsu8gLggyMbmCn8cmp97V1TI9ld9sVzAUcKcKE= @@ -866,14 +1131,18 @@ github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dI github.com/shirou/gopsutil/v4 v4.25.6/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c= github.com/shirou/gopsutil/v4 v4.25.8 h1:NnAsw9lN7587WHxjJA9ryDnqhJpFH6A+wagYWTOH970= github.com/shirou/gopsutil/v4 v4.25.8/go.mod h1:q9QdMmfAOVIw7a+eF86P7ISEU6ka+NLgkUxlopV4RwI= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e h1:MZM7FHLqUHYI0Y/mQAt3d2aYa0SiNms/hFqC9qJYolM= github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041 h1:llrF3Fs4018ePo4+G/HV/uQUqEI1HMDjCeOf2V6puPc= +github.com/soheilhy/cmux v0.1.5 h1:jjzc5WVemNEDTLwv9tlmemhC73tI08BNOIGwBOo10Js= github.com/soheilhy/cmux v0.1.5/go.mod h1:T7TcVDs9LWfQgPlPsdngu6I6QIoyIFZDDC6sNE1GqG0= github.com/sosodev/duration v1.2.0/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg= +github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 h1:+jumHNA0Wrelhe64i8F6HNlS8pkoyMv5sreGx2Ry5Rw= github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cwaq1E1/1lhQhtRK2ts/ZwZEhjcQeJQ1RuC6Q/8U= +github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4= @@ -888,19 +1157,33 @@ github.com/stretchr/testify v1.7.5/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/temoto/robotstxt v1.1.2 h1:W2pOjSJ6SWvldyEuiFXNxz3xZ8aiWX5LbfDiOFd7Fxg= github.com/temoto/robotstxt v1.1.2/go.mod h1:+1AmkuG3IYkh1kv0d2qEB9Le88ehNO0zwOr3ujewlOo= +github.com/testcontainers/testcontainers-go v0.38.0 h1:d7uEapLcv2P8AvH8ahLqDMMxda2W9gQN1nRbHS28HBw= github.com/testcontainers/testcontainers-go v0.38.0/go.mod h1:C52c9MoHpWO+C4aqmgSU+hxlR5jlEayWtgYrb8Pzz1w= +github.com/testcontainers/testcontainers-go/modules/chroma v0.37.0 h1:vb9fb1mogtlQuF3l0vSAu6rqv3y2j9wozve4xnhVyz8= github.com/testcontainers/testcontainers-go/modules/chroma v0.37.0/go.mod h1:IWJavzQy7rxM40OqOgSN5iyckgAw21wDyE+NhSctatk= +github.com/testcontainers/testcontainers-go/modules/mariadb v0.38.0 h1:RfilPieRalCavWFa+XQtatazPn1L57Do/tRxe/B45I8= github.com/testcontainers/testcontainers-go/modules/mariadb v0.38.0/go.mod h1:26mrWngnaRhxmgy942aVfUihLnihbIGsuIds6gGBnIE= +github.com/testcontainers/testcontainers-go/modules/milvus v0.37.0 h1:q+gx0A10DM0VJMJjo9VOXOB1t8Dv3B6EgxXZf2TIzOw= github.com/testcontainers/testcontainers-go/modules/milvus v0.37.0/go.mod h1:bCdLqxjPKax120BMl4aO/A0gs9+4FeJkLBVf9WpjFoQ= +github.com/testcontainers/testcontainers-go/modules/mongodb v0.37.0 h1:drGy4LJOVkIKpKGm1YKTfVzb1qRhN/konVpmuUphq0k= github.com/testcontainers/testcontainers-go/modules/mongodb v0.37.0/go.mod h1:e9/4dGJfSZW59/kXGf/ksrEvA+BqP/daax0Usp2cpsM= +github.com/testcontainers/testcontainers-go/modules/mysql v0.37.0 h1:LqUos1oR5iuuzorFnSvxsHNdYdCHB/DfI82CuT58wbI= github.com/testcontainers/testcontainers-go/modules/mysql v0.37.0/go.mod h1:vHEEHx5Kf+uq5hveaVAMrTzPY8eeRZcKcl23MRw5Tkc= +github.com/testcontainers/testcontainers-go/modules/opensearch v0.37.0 h1:bamwpenM3zl8NCxDEHdR0gpauDS1gK/FOr9yfmVKJug= github.com/testcontainers/testcontainers-go/modules/opensearch v0.37.0/go.mod h1:2jEljlB96QHSHF7Vo9S8zEDisPPrfsddzSvsCR1ihNQ= +github.com/testcontainers/testcontainers-go/modules/postgres v0.37.0 h1:hsVwFkS6s+79MbKEO+W7A1wNIw1fmkMtF4fg83m6kbc= github.com/testcontainers/testcontainers-go/modules/postgres v0.37.0/go.mod h1:Qj/eGbRbO/rEYdcRLmN+bEojzatP/+NS1y8ojl2PQsc= +github.com/testcontainers/testcontainers-go/modules/redis v0.37.0 h1:9HIY28I9ME/Zmb+zey1p/I1mto5+5ch0wLX+nJdOsQ4= github.com/testcontainers/testcontainers-go/modules/redis v0.37.0/go.mod h1:Abu9g/25Qv+FkYVx3U4Voaynou1c+7D0HIhaQJXvk6E= +github.com/testcontainers/testcontainers-go/modules/weaviate v0.37.0 h1:Ou+qJTuaNK1cbT3c13ZQQUnq6VSmDjpMXrE6vVZQmFY= github.com/testcontainers/testcontainers-go/modules/weaviate v0.37.0/go.mod h1:VdjCqOCJGzlGLS2p4NdLjN5rqN3/53mle+Gb+irCbOE= +github.com/tidwall/gjson v1.17.1 h1:wlYEnwqAHgzmhNUFfw7Xalt2JzQvsMx2Se4PcoFCT/U= github.com/tidwall/gjson v1.17.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tinylib/msgp v1.1.8 h1:FCXC1xanKO4I8plpHGH2P7koL/RzZs12l/+r7vakfm0= github.com/tinylib/msgp v1.1.8/go.mod h1:qkpG+2ldGg4xRFmx+jfTvZPxfGFhi64BcnL9vkCm/Tw= @@ -912,17 +1195,25 @@ github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+F github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= github.com/tklauser/numcpus v0.10.0 h1:18njr6LDBk1zuna922MgdjQuJFjrdppsZG60sHGfjso= github.com/tklauser/numcpus v0.10.0/go.mod h1:BiTKazU708GQTYF4mB+cmlpT2Is1gLk7XVuEeem8LsQ= +github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802 h1:uruHq4dN7GR16kFc5fp3d1RIYzJW5onx8Ybykw2YQFA= github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/uber/jaeger-client-go v2.30.0+incompatible h1:D6wyKGCecFaSRUpo8lCVbaOOb6ThwMmTEbhRwtKR97o= github.com/uber/jaeger-client-go v2.30.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/quicktemplate v1.8.0 h1:zU0tjbIqTRgKQzFY1L42zq0qR3eh4WoQQdIdqCysW5k= github.com/valyala/quicktemplate v1.8.0/go.mod h1:qIqW8/igXt8fdrUln5kOSb+KWMaJ4Y8QUsfd1k6L2jM= +github.com/weaviate/weaviate v1.29.0 h1:bVPZlUqlsa7qp1LazxR0r1cJNrddm6xKVXPlMEEXi6E= github.com/weaviate/weaviate v1.29.0/go.mod h1:UsnbM1Kmm5Om+UPU6DTo421SDeMD8SqCJqsBs/nwgcI= +github.com/weaviate/weaviate-go-client/v5 v5.0.2 h1:aptmTJy6d4OxGHBTGnqHheJe0WDbzH2SVmQkvy7+EGY= github.com/weaviate/weaviate-go-client/v5 v5.0.2/go.mod h1:CwZehIL4s3VfkzTu12Wy8VAUtELRtQFUt2ZniBF/lQM= +github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= +github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= +github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= @@ -932,41 +1223,61 @@ github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17 github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8Ydu2Bstc= github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2 h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/yargevad/filepathx v1.0.0 h1:SYcT+N3tYGi+NvazubCNlvgIPbzAk7i7y2dwg3I5FYc= github.com/yargevad/filepathx v1.0.0/go.mod h1:BprfX/gpYNJHJfc35GjRRpVcwWXS89gGulUIU5tK3tA= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM= github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +github.com/zeebo/blake3 v0.2.4 h1:KYQPkhpRtcqh0ssGYcKLG1JYvddkEA8QwCM/yBqhaZI= github.com/zeebo/blake3 v0.2.4/go.mod h1:7eeQ6d2iXWRGF6npfaxl2CU+xy2Fjo2gxeyZGCRUjcE= github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM= github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= +github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= +gitlab.com/golang-commonmark/html v0.0.0-20191124015941-a22733972181 h1:K+bMSIx9A7mLES1rtG+qKduLIXq40DAzYHtb0XuCukA= gitlab.com/golang-commonmark/html v0.0.0-20191124015941-a22733972181/go.mod h1:dzYhVIwWCtzPAa4QP98wfB9+mzt33MSmM8wsKiMi2ow= +gitlab.com/golang-commonmark/linkify v0.0.0-20191026162114-a0c2df6c8f82 h1:oYrL81N608MLZhma3ruL8qTM4xcpYECGut8KSxRY59g= gitlab.com/golang-commonmark/linkify v0.0.0-20191026162114-a0c2df6c8f82/go.mod h1:Gn+LZmCrhPECMD3SOKlE+BOHwhOYD9j7WT9NUtkCrC8= +gitlab.com/golang-commonmark/markdown v0.0.0-20211110145824-bf3e522c626a h1:O85GKETcmnCNAfv4Aym9tepU8OE0NmcZNqPlXcsBKBs= gitlab.com/golang-commonmark/markdown v0.0.0-20211110145824-bf3e522c626a/go.mod h1:LaSIs30YPGs1H5jwGgPhLzc8vkNc/k0rDX/fEZqiU/M= +gitlab.com/golang-commonmark/mdurl v0.0.0-20191124015652-932350d1cb84 h1:qqjvoVXdWIcZCLPMlzgA7P9FZWdPGPvP/l3ef8GzV6o= gitlab.com/golang-commonmark/mdurl v0.0.0-20191124015652-932350d1cb84/go.mod h1:IJZ+fdMvbW2qW6htJx7sLJ04FEs4Ldl/MDsJtMKywfw= +gitlab.com/golang-commonmark/puny v0.0.0-20191124015043-9f83538fa04f h1:Wku8eEdeJqIOFHtrfkYUByc4bCaTeA6fL0UJgfEiFMI= gitlab.com/golang-commonmark/puny v0.0.0-20191124015043-9f83538fa04f/go.mod h1:Tiuhl+njh/JIg0uS/sOJVYi0x2HEa5rc1OAaVsb5tAs= +go.einride.tech/aip v0.68.1 h1:16/AfSxcQISGN5z9C5lM+0mLYXihrHbQ1onvYTr93aQ= go.einride.tech/aip v0.68.1/go.mod h1:XaFtaj4HuA3Zwk9xoBtTWgNubZ0ZZXv9BZJCkuKuWbg= +go.etcd.io/bbolt v1.3.11 h1:yGEzV1wPz2yVCLsD8ZAiGHhHVlczyC9d1rP43/VCRJ0= go.etcd.io/bbolt v1.3.11/go.mod h1:dksAq7YMXoljX0xu6VF5DMZGbhYYoLUalEiSySYAS4I= go.etcd.io/etcd/api/v3 v3.5.4 h1:OHVyt3TopwtUQ2GKdd5wu3PmmipR4FTwCqoEjSyRdIc= go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= +go.etcd.io/etcd/api/v3 v3.5.5 h1:BX4JIbQ7hl7+jL+g+2j5UAr0o1bctCm6/Ct+ArBGkf0= go.etcd.io/etcd/api/v3 v3.5.5/go.mod h1:KFtNaxGDw4Yx/BA4iPPwevUTAuqcsPxzyX8PHydchN8= go.etcd.io/etcd/client/pkg/v3 v3.5.4 h1:lrneYvz923dvC14R54XcA7FXoZ3mlGZAgmwhfm7HqOg= go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/pkg/v3 v3.5.5 h1:9S0JUVvmrVl7wCF39iTQthdaaNIiAaQbmK75ogO6GU8= go.etcd.io/etcd/client/pkg/v3 v3.5.5/go.mod h1:ggrwbk069qxpKPq8/FKkQ3Xq9y39kbFR4LnKszpRXeQ= go.etcd.io/etcd/client/v2 v2.305.4 h1:Dcx3/MYyfKcPNLpR4VVQUP5KgYrBeJtktBwEKkw08Ao= go.etcd.io/etcd/client/v2 v2.305.4/go.mod h1:Ud+VUwIi9/uQHOMA+4ekToJ12lTxlv0zB/+DHwTGEbU= +go.etcd.io/etcd/client/v2 v2.305.5 h1:DktRP60//JJpnPC0VBymAN/7V71GHMdjDCBt4ZPXDjI= go.etcd.io/etcd/client/v2 v2.305.5/go.mod h1:zQjKllfqfBVyVStbt4FaosoX2iYd8fV/GRy/PbowgP4= go.etcd.io/etcd/client/v3 v3.5.4 h1:p83BUL3tAYS0OT/r0qglgc3M1JjhM0diV8DSWAhVXv4= go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/gq3kiY= +go.etcd.io/etcd/client/v3 v3.5.5 h1:q++2WTJbUgpQu4B6hCuT7VkdwaTP7Qz6Daak3WzbrlI= go.etcd.io/etcd/client/v3 v3.5.5/go.mod h1:aApjR4WGlSumpnJ2kloS75h6aHUmAyaPLjHMxpc7E7c= +go.etcd.io/etcd/pkg/v3 v3.5.5 h1:Ablg7T7OkR+AeeeU32kdVhw/AGDsitkKPl7aW73ssjU= go.etcd.io/etcd/pkg/v3 v3.5.5/go.mod h1:6ksYFxttiUGzC2uxyqiyOEvhAiD0tuIqSZkX3TyPdaE= +go.etcd.io/etcd/raft/v3 v3.5.5 h1:Ibz6XyZ60OYyRopu73lLM/P+qco3YtlZMOhnXNS051I= go.etcd.io/etcd/raft/v3 v3.5.5/go.mod h1:76TA48q03g1y1VpTue92jZLr9lIHKUNcYdZOOGyx8rI= +go.etcd.io/etcd/server/v3 v3.5.5 h1:jNjYm/9s+f9A9r6+SC4RvNaz6AqixpOvhrFdT0PvIj0= go.etcd.io/etcd/server/v3 v3.5.5/go.mod h1:rZ95vDw/jrvsbj9XpTqPrTAB9/kzchVdhRirySPkUBc= +go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= +go.mongodb.org/mongo-driver/v2 v2.0.0 h1:Jfd7XpdZa9yk3eY774bO7SWVb30noLSirL9nKTpavhI= go.mongodb.org/mongo-driver/v2 v2.0.0/go.mod h1:nSjmNq4JUstE8IRZKTktLgMHM4F1fccL6HGX1yh+8RA= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= @@ -1004,8 +1315,11 @@ go.opentelemetry.io/otel v1.26.0/go.mod h1:UmLkJHUAidDval2EICqBMbnAd0/m2vmpf/dAM go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY= go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI= go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.20.0 h1:DeFD0VgTZ+Cj6hxravYYZE2W4GlneVH81iAOPjZkzk8= go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.20.0/go.mod h1:GijYcYmNpX1KazD5JmWGsi4P7dDTTTnfv1UbGn84MnU= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.20.0 h1:gvmNvqrPYovvyRmCSygkUDyL8lC5Tl845MLEwqpxhEU= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.20.0/go.mod h1:vNUq47TGFioo+ffTSnKNdob241vePmtNZnAODKapKd0= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.35.0 h1:PB3Zrjs1sG1GBX51SXyTSoOTqcDglmsk7nT6tkKPb/k= go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.35.0/go.mod h1:U2R3XyVPzn0WX7wOIypPuptulsMcPDPs/oiSVOMVnHY= go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco= go.opentelemetry.io/otel/metric v1.26.0/go.mod h1:SY+rHOI4cEawI9a7N1A4nIg/nTQXe1ccCNWYOJUrpX4= @@ -1027,6 +1341,7 @@ go.opentelemetry.io/otel/trace v1.26.0/go.mod h1:4iDxvGDQuUkHve82hJJ8UqrwswHYsZu go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k= go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE= go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= +go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= go.starlark.net v0.0.0-20230302034142-4b1e35fe2254 h1:Ss6D3hLXTM0KobyBYEAygXzFfGcjnmfEJOBgSbemCtg= go.starlark.net v0.0.0-20230302034142-4b1e35fe2254/go.mod h1:jxU+3+j+71eXOW14274+SmmuW82qJzl6iZSeqEtTGds= @@ -1057,6 +1372,7 @@ golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+ golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b h1:+qEpEAPhDZ1o0x3tHzZTQDArnOixOzGD9HUJfcg0mb4= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.25.0 h1:Y6uW6rH1y5y/LK1J8BPWZtr6yZ7hrsy6hFrXjgsc2fQ= golang.org/x/image v0.25.0/go.mod h1:tCAmOEGthTtkalusGp1g3xa2gke8J6c2N565dTyl9Rs= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= @@ -1069,6 +1385,7 @@ golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRu golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b h1:Wh+f8QHJXR411sJR8/vRBTZ7YapZaRvUcLFFJhusH0k= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 h1:2M3HP5CCK1Si9FQhwnzYhXdG6DXeebvUHFpre8QvbyI= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028 h1:4+4C/Iv2U4fMZBiMCc98MG1In4gJY5YRhtpDNeDeHWs= @@ -1211,6 +1528,7 @@ golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 h1:zf5N6UOrA487eEFacMe golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= golang.org/x/telemetry v0.0.0-20250908211612-aef8a434d053 h1:dHQOQddU4YHS5gY33/6klKjq7Gp3WwMyOXGNp5nzRj8= golang.org/x/telemetry v0.0.0-20250908211612-aef8a434d053/go.mod h1:+nZKN+XVh4LCiA9DV3ywrzN4gumyCnKjau3NGb9SGoE= +golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc h1:bH6xUXay0AIFMElXG2rQ4uiE+7ncwtiOdPfYK1NK2XA= golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc/go.mod h1:hKdjCMrbv9skySur+Nek8Hd0uJ0GuxJIoIX2payrIdQ= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8= @@ -1223,6 +1541,7 @@ golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= golang.org/x/term v0.35.0 h1:bZBVKBudEyhRcajGcNc3jIfWPqV4y/Kt2XcoigOWtDQ= golang.org/x/term v0.35.0/go.mod h1:TPGtkTLesOwf2DE8CgVYiZinHAOuy5AYUYT1lENIZnA= +golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q= golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1307,7 +1626,9 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IV golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df h1:5Pf6pFKu98ODmgnpvkJ3kFUOQGGLIzLIkbzUHp47618= golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= +gonum.org/v1/plot v0.15.2 h1:Tlfh/jBk2tqjLZ4/P8ZIwGrLEWQSPDLRm/SNWKNXiGI= gonum.org/v1/plot v0.15.2/go.mod h1:DX+x+DWso3LTha+AdkJEv5Txvi+Tql3KAGkehP0/Ubg= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= @@ -1352,6 +1673,7 @@ google.golang.org/appengine v1.6.6 h1:lMO5rYAqUxkmaj76jAkRUvt5JZgFymx/+Q5Mzfivuh google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -1405,6 +1727,7 @@ google.golang.org/genproto/googleapis/api v0.0.0-20250512202823-5a2f75b736a9/go. google.golang.org/genproto/googleapis/api v0.0.0-20250528174236-200df99c418a/go.mod h1:a77HrdMjoeKbnd2jmgcWdaS++ZLZAEq3orIOAEIKiVw= google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc= google.golang.org/genproto/googleapis/api v0.0.0-20250707201910-8d1bb00bc6a7/go.mod h1:kXqgZtrWaf6qS3jZOCnCH7WYfrvFjkC51bM8fz3RsCA= +google.golang.org/genproto/googleapis/bytestream v0.0.0-20250728155136-f173205681a0 h1:HyHZz/b6hYFm/aS2F0DOAyjK5mQw0Jh8YiZyqlXMfic= google.golang.org/genproto/googleapis/bytestream v0.0.0-20250728155136-f173205681a0/go.mod h1:h6yxum/C2qRb4txaZRLDHK8RyS0H/o2oEDeKY4onY/Y= google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= google.golang.org/genproto/googleapis/rpc v0.0.0-20240429193739-8cf5692501f6/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= @@ -1449,6 +1772,7 @@ google.golang.org/grpc v1.72.1/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3i google.golang.org/grpc v1.72.2/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM= google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc= google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= +google.golang.org/grpc/examples v0.0.0-20230224211313-3775f633ce20 h1:MLBCGN1O7GzIx+cBiwfYPwtmZ41U3Mn/cotLJciaArI= google.golang.org/grpc/examples v0.0.0-20230224211313-3775f633ce20/go.mod h1:Nr5H8+MlGWr5+xX/STzdoEqJrO+YteqFbMyCsrb6mH0= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -1473,6 +1797,7 @@ gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQ gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/errgo.v2 v2.1.0 h1:0vLT13EuvQ0hNvakwLuFZ/jYrLp5F3kcWHXdRggjCE8= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= @@ -1486,6 +1811,7 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +k8s.io/apimachinery v0.28.6 h1:RsTeR4z6S07srPg6XYrwXpTJVMXsjPXn0ODakMytSW0= k8s.io/apimachinery v0.28.6/go.mod h1:QFNX/kCl/EMT2WTSz8k4WLCv2XnkOLMaL8GAVRMdpsA= lukechampine.com/uint128 v1.2.0 h1:mBi/5l91vocEN8otkC5bDLhi2KdCticRiwbdB0O+rjI= lukechampine.com/uint128 v1.2.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= @@ -1514,19 +1840,17 @@ modernc.org/lex v1.1.1 h1:prSCNTLw1R4rn7M/RzwsuMtAuOytfyR3cnyM07P+Pas= modernc.org/lex v1.1.1/go.mod h1:6r8o8DLJkAnOsQaGi8fMoi+Vt6LTbDaCrkUK729D8xM= modernc.org/lexer v1.0.4 h1:hU7xVbZsqwPphyzChc7nMSGrsuaD2PDNOmzrzkS5AlE= modernc.org/lexer v1.0.4/go.mod h1:tOajb8S4sdfOYitzCgXDFmbVJ/LE0v1fNJ7annTw36U= -modernc.org/libc v1.67.4 h1:zZGmCMUVPORtKv95c2ReQN5VDjvkoRm9GWPTEPuvlWg= -modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU= -modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI= modernc.org/parser v1.1.0 h1:XoClYpoz2xHEDIteSQ7tICOTFcNwBI7XRCeghUS6SNI= modernc.org/parser v1.1.0/go.mod h1:CXl3OTJRZij8FeMpzI3Id/bjupHf0u9HSrCUP4Z9pbA= modernc.org/scannertest v1.0.2 h1:JPtfxcVdbRvzmRf2YUvsDibJsQRw8vKA/3jb31y7cy0= modernc.org/scannertest v1.0.2/go.mod h1:RzTm5RwglF/6shsKoEivo8N91nQIoWtcWI7ns+zPyGA= -modernc.org/sqlite v1.43.0 h1:8YqiFx3G1VhHTXO2Q00bl1Wz9KhS9Q5okwfp9Y97VnA= modernc.org/y v1.1.0 h1:JdIvLry+rKeSsVNRCdr6YWYimwwNm0GXtzxid77VfWc= modernc.org/y v1.1.0/go.mod h1:Iz3BmyIS4OwAbwGaUS7cqRrLsSsfp2sFWtpzX+P4CsE= +nhooyr.io/websocket v1.8.7 h1:usjR2uOr/zjjkVMy0lW+PPohFok7PCow5sDjLgX4P4g= nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/pdf v0.1.1 h1:k1MczvYDUvJBe93bYd7wrZLLUEcLZAuF824/I4e5Xr4= rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0 h1:9JKUTTIUgS6kzR9mK1YuGKv6Nl+DijDNIc0ghT58FaY= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= diff --git a/packages/db/pkg/sqlc/gen/db.go b/packages/db/pkg/sqlc/gen/db.go index 80ebb7a7..0bacb298 100644 --- a/packages/db/pkg/sqlc/gen/db.go +++ b/packages/db/pkg/sqlc/gen/db.go @@ -621,6 +621,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.getRootFilesByWorkspaceIDStmt, err = db.PrepareContext(ctx, getRootFilesByWorkspaceID); err != nil { return nil, fmt.Errorf("error preparing query GetRootFilesByWorkspaceID: %w", err) } + if q.getSyncedWorkspacesStmt, err = db.PrepareContext(ctx, getSyncedWorkspaces); err != nil { + return nil, fmt.Errorf("error preparing query GetSyncedWorkspaces: %w", err) + } if q.getTagStmt, err = db.PrepareContext(ctx, getTag); err != nil { return nil, fmt.Errorf("error preparing query GetTag: %w", err) } @@ -648,9 +651,6 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.getVariablesByEnvironmentIDOrderedStmt, err = db.PrepareContext(ctx, getVariablesByEnvironmentIDOrdered); err != nil { return nil, fmt.Errorf("error preparing query GetVariablesByEnvironmentIDOrdered: %w", err) } - if q.getSyncedWorkspacesStmt, err = db.PrepareContext(ctx, getSyncedWorkspaces); err != nil { - return nil, fmt.Errorf("error preparing query GetSyncedWorkspaces: %w", err) - } if q.getWorkspaceStmt, err = db.PrepareContext(ctx, getWorkspace); err != nil { return nil, fmt.Errorf("error preparing query GetWorkspace: %w", err) } @@ -1852,6 +1852,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing getRootFilesByWorkspaceIDStmt: %w", cerr) } } + if q.getSyncedWorkspacesStmt != nil { + if cerr := q.getSyncedWorkspacesStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getSyncedWorkspacesStmt: %w", cerr) + } + } if q.getTagStmt != nil { if cerr := q.getTagStmt.Close(); cerr != nil { err = fmt.Errorf("error closing getTagStmt: %w", cerr) @@ -1897,11 +1902,6 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing getVariablesByEnvironmentIDOrderedStmt: %w", cerr) } } - if q.getSyncedWorkspacesStmt != nil { - if cerr := q.getSyncedWorkspacesStmt.Close(); cerr != nil { - err = fmt.Errorf("error closing getSyncedWorkspacesStmt: %w", cerr) - } - } if q.getWorkspaceStmt != nil { if cerr := q.getWorkspaceStmt.Close(); cerr != nil { err = fmt.Errorf("error closing getWorkspaceStmt: %w", cerr) @@ -2475,6 +2475,7 @@ type Queries struct { getNodeExecutionStmt *sql.Stmt getNodeExecutionsByNodeIDStmt *sql.Stmt getRootFilesByWorkspaceIDStmt *sql.Stmt + getSyncedWorkspacesStmt *sql.Stmt getTagStmt *sql.Stmt getTagsByWorkspaceIDStmt *sql.Stmt getUserStmt *sql.Stmt @@ -2484,7 +2485,6 @@ type Queries struct { getVariableStmt *sql.Stmt getVariablesByEnvironmentIDStmt *sql.Stmt getVariablesByEnvironmentIDOrderedStmt *sql.Stmt - getSyncedWorkspacesStmt *sql.Stmt getWorkspaceStmt *sql.Stmt getWorkspaceByUserIDStmt *sql.Stmt getWorkspaceByUserIDandWorkspaceIDStmt *sql.Stmt @@ -2757,6 +2757,7 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { getNodeExecutionStmt: q.getNodeExecutionStmt, getNodeExecutionsByNodeIDStmt: q.getNodeExecutionsByNodeIDStmt, getRootFilesByWorkspaceIDStmt: q.getRootFilesByWorkspaceIDStmt, + getSyncedWorkspacesStmt: q.getSyncedWorkspacesStmt, getTagStmt: q.getTagStmt, getTagsByWorkspaceIDStmt: q.getTagsByWorkspaceIDStmt, getUserStmt: q.getUserStmt, @@ -2766,7 +2767,6 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { getVariableStmt: q.getVariableStmt, getVariablesByEnvironmentIDStmt: q.getVariablesByEnvironmentIDStmt, getVariablesByEnvironmentIDOrderedStmt: q.getVariablesByEnvironmentIDOrderedStmt, - getSyncedWorkspacesStmt: q.getSyncedWorkspacesStmt, getWorkspaceStmt: q.getWorkspaceStmt, getWorkspaceByUserIDStmt: q.getWorkspaceByUserIDStmt, getWorkspaceByUserIDandWorkspaceIDStmt: q.getWorkspaceByUserIDandWorkspaceIDStmt, diff --git a/packages/db/pkg/sqlc/gen/workspaces.sql.go b/packages/db/pkg/sqlc/gen/workspaces.sql.go index f08e3dfe..a5edc35e 100644 --- a/packages/db/pkg/sqlc/gen/workspaces.sql.go +++ b/packages/db/pkg/sqlc/gen/workspaces.sql.go @@ -183,6 +183,61 @@ func (q *Queries) GetAllWorkspacesByUserID(ctx context.Context, userID idwrap.ID return items, nil } +const getSyncedWorkspaces = `-- name: GetSyncedWorkspaces :many +SELECT + id, + name, + updated, + collection_count, + flow_count, + active_env, + global_env, + display_order, + sync_path, + sync_format, + sync_enabled +FROM + workspaces +WHERE + sync_enabled = 1 +` + +// Returns all workspaces with sync enabled +func (q *Queries) GetSyncedWorkspaces(ctx context.Context) ([]Workspace, error) { + rows, err := q.query(ctx, q.getSyncedWorkspacesStmt, getSyncedWorkspaces) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Workspace{} + for rows.Next() { + var i Workspace + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Updated, + &i.CollectionCount, + &i.FlowCount, + &i.ActiveEnv, + &i.GlobalEnv, + &i.DisplayOrder, + &i.SyncPath, + &i.SyncFormat, + &i.SyncEnabled, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getWorkspace = `-- name: GetWorkspace :one SELECT id, @@ -620,61 +675,6 @@ func (q *Queries) UpdateWorkspace(ctx context.Context, arg UpdateWorkspaceParams return err } -const getSyncedWorkspaces = `-- name: GetSyncedWorkspaces :many -SELECT - id, - name, - updated, - collection_count, - flow_count, - active_env, - global_env, - display_order, - sync_path, - sync_format, - sync_enabled -FROM - workspaces -WHERE - sync_enabled = 1 -` - -// Returns all workspaces with sync enabled -func (q *Queries) GetSyncedWorkspaces(ctx context.Context) ([]Workspace, error) { - rows, err := q.query(ctx, q.getSyncedWorkspacesStmt, getSyncedWorkspaces) - if err != nil { - return nil, err - } - defer rows.Close() - items := []Workspace{} - for rows.Next() { - var i Workspace - if err := rows.Scan( - &i.ID, - &i.Name, - &i.Updated, - &i.CollectionCount, - &i.FlowCount, - &i.ActiveEnv, - &i.GlobalEnv, - &i.DisplayOrder, - &i.SyncPath, - &i.SyncFormat, - &i.SyncEnabled, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Close(); err != nil { - return nil, err - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - const updateWorkspaceSync = `-- name: UpdateWorkspaceSync :exec UPDATE workspaces SET diff --git a/packages/db/pkg/sqlc/sqlc.yaml b/packages/db/pkg/sqlc/sqlc.yaml index dddf4363..0c1b279d 100644 --- a/packages/db/pkg/sqlc/sqlc.yaml +++ b/packages/db/pkg/sqlc/sqlc.yaml @@ -62,6 +62,16 @@ sql: package: 'idwrap' type: 'IDWrap' pointer: true + ### sync_path + - column: 'workspaces.sync_path' + go_type: + type: 'string' + pointer: true + ### sync_format + - column: 'workspaces.sync_format' + go_type: + type: 'string' + pointer: true ## workspaces_users ### id - column: 'workspaces_users.id' From 72e3c8a5c6bae7fef90f89a1621bdb4022080225 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 21:11:15 +0000 Subject: [PATCH 19/20] fix: resolve golangci-lint issues in openyaml, topencollection, rworkspace - openyaml/directory.go: use 0o750 dir permissions, check error returns in atomicWrite cleanup, add exhaustive switch cases - topencollection/converter.go: suppress gosec G304 for intentional file reads from user-specified collection paths - rworkspace.go: use tagged switch instead of if/else chains (QF1003) https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- .../internal/api/rworkspace/rworkspace.go | 10 ++++++---- packages/server/pkg/openyaml/directory.go | 17 ++++++++++------- .../pkg/translate/topencollection/converter.go | 6 +++--- 3 files changed, 19 insertions(+), 14 deletions(-) diff --git a/packages/server/internal/api/rworkspace/rworkspace.go b/packages/server/internal/api/rworkspace/rworkspace.go index 7554ca14..084b1545 100644 --- a/packages/server/internal/api/rworkspace/rworkspace.go +++ b/packages/server/internal/api/rworkspace/rworkspace.go @@ -499,16 +499,18 @@ func (c *WorkspaceServiceRPC) WorkspaceUpdate(ctx context.Context, req *connect. ws.Order = float64(*item.Order) } if item.SyncPath != nil { - if item.SyncPath.Kind == apiv1.WorkspaceUpdate_SyncPathUnion_KIND_VALUE { + switch item.SyncPath.Kind { + case apiv1.WorkspaceUpdate_SyncPathUnion_KIND_VALUE: ws.SyncPath = item.SyncPath.Value - } else if item.SyncPath.Kind == apiv1.WorkspaceUpdate_SyncPathUnion_KIND_UNSET { + case apiv1.WorkspaceUpdate_SyncPathUnion_KIND_UNSET: ws.SyncPath = nil } } if item.SyncFormat != nil { - if item.SyncFormat.Kind == apiv1.WorkspaceUpdate_SyncFormatUnion_KIND_VALUE { + switch item.SyncFormat.Kind { + case apiv1.WorkspaceUpdate_SyncFormatUnion_KIND_VALUE: ws.SyncFormat = item.SyncFormat.Value - } else if item.SyncFormat.Kind == apiv1.WorkspaceUpdate_SyncFormatUnion_KIND_UNSET { + case apiv1.WorkspaceUpdate_SyncFormatUnion_KIND_UNSET: ws.SyncFormat = nil } } diff --git a/packages/server/pkg/openyaml/directory.go b/packages/server/pkg/openyaml/directory.go index 588c96a8..e11f74d8 100644 --- a/packages/server/pkg/openyaml/directory.go +++ b/packages/server/pkg/openyaml/directory.go @@ -122,7 +122,7 @@ func ReadDirectory(dirPath string, opts ReadOptions) (*ioworkspace.WorkspaceBund // Creates one .yaml file per request, flow, and environment. // Directory structure mirrors the mfile.File hierarchy. func WriteDirectory(dirPath string, bundle *ioworkspace.WorkspaceBundle) error { - if err := os.MkdirAll(dirPath, 0o755); err != nil { + if err := os.MkdirAll(dirPath, 0o750); err != nil { return fmt.Errorf("create directory: %w", err) } @@ -131,7 +131,7 @@ func WriteDirectory(dirPath string, bundle *ioworkspace.WorkspaceBundle) error { // Write environments if len(bundle.Environments) > 0 { envDir := filepath.Join(dirPath, environmentsDir) - if err := os.MkdirAll(envDir, 0o755); err != nil { + if err := os.MkdirAll(envDir, 0o750); err != nil { return fmt.Errorf("create environments dir: %w", err) } @@ -168,7 +168,7 @@ func WriteDirectory(dirPath string, bundle *ioworkspace.WorkspaceBundle) error { // Write flows if len(bundle.Flows) > 0 { flowDir := filepath.Join(dirPath, flowsDir) - if err := os.MkdirAll(flowDir, 0o755); err != nil { + if err := os.MkdirAll(flowDir, 0o750); err != nil { return fmt.Errorf("create flows dir: %w", err) } @@ -541,7 +541,7 @@ func writeFilesRecursive( switch f.ContentType { case mfile.ContentTypeFolder: subDir := filepath.Join(currentDir, sanitizeFilename(f.Name)) - if err := os.MkdirAll(subDir, 0o755); err != nil { + if err := os.MkdirAll(subDir, 0o750); err != nil { return fmt.Errorf("create dir %q: %w", f.Name, err) } if err := writeFilesRecursive(subDir, f.ID.String(), childrenByParent, lk); err != nil { @@ -567,6 +567,9 @@ func writeFilesRecursive( if err := atomicWrite(filepath.Join(currentDir, filename), data); err != nil { return fmt.Errorf("write request %q: %w", httpReq.Name, err) } + + case mfile.ContentTypeHTTPDelta, mfile.ContentTypeFlow, mfile.ContentTypeCredential: + // These content types are not exported to OpenYAML format } } @@ -707,12 +710,12 @@ func atomicWrite(path string, data []byte) error { tmpName := tmp.Name() if _, err := tmp.Write(data); err != nil { - tmp.Close() - os.Remove(tmpName) + _ = tmp.Close() + _ = os.Remove(tmpName) return err } if err := tmp.Close(); err != nil { - os.Remove(tmpName) + _ = os.Remove(tmpName) return err } diff --git a/packages/server/pkg/translate/topencollection/converter.go b/packages/server/pkg/translate/topencollection/converter.go index 134112ef..e8be33fa 100644 --- a/packages/server/pkg/translate/topencollection/converter.go +++ b/packages/server/pkg/translate/topencollection/converter.go @@ -36,7 +36,7 @@ func ConvertOpenCollection(collectionPath string, opts ConvertOptions) (*ioworks // Parse collection root rootPath := filepath.Join(collectionPath, "opencollection.yml") - rootData, err := os.ReadFile(rootPath) + rootData, err := os.ReadFile(rootPath) //nolint:gosec // Intentional: reading user-specified collection path if err != nil { return nil, fmt.Errorf("failed to read opencollection.yml: %w", err) } @@ -73,7 +73,7 @@ func ConvertOpenCollection(collectionPath string, opts ConvertOptions) (*ioworks continue } - envData, err := os.ReadFile(filepath.Join(envDir, entry.Name())) + envData, err := os.ReadFile(filepath.Join(envDir, entry.Name())) //nolint:gosec // Intentional: reading from collection directory if err != nil { logger.Warn("failed to read environment file", "file", entry.Name(), "error", err) continue @@ -138,7 +138,7 @@ func walkCollection( order := float64(1) for _, fileEntry := range files { filePath := filepath.Join(dirPath, fileEntry.Name()) - data, err := os.ReadFile(filePath) + data, err := os.ReadFile(filePath) //nolint:gosec // Intentional: reading from collection directory if err != nil { logger.Warn("failed to read file", "file", filePath, "error", err) continue From 0678e3faa793a5166c35ea20a5cdd01bd43818e4 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 8 Feb 2026 21:19:42 +0000 Subject: [PATCH 20/20] fix: suppress gosec G304 for openyaml directory reads These are intentional file reads from user-specified sync directories, not untrusted path inclusion vulnerabilities. https://claude.ai/code/session_018oGCHwsVoduywqfM8rzmyR --- packages/server/pkg/openyaml/directory.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/server/pkg/openyaml/directory.go b/packages/server/pkg/openyaml/directory.go index e11f74d8..5ab47426 100644 --- a/packages/server/pkg/openyaml/directory.go +++ b/packages/server/pkg/openyaml/directory.go @@ -210,7 +210,7 @@ func readEnvironments(envDir string, workspaceID idwrap.IDWrap, bundle *ioworksp continue } - data, err := os.ReadFile(filepath.Join(envDir, entry.Name())) + data, err := os.ReadFile(filepath.Join(envDir, entry.Name())) //nolint:gosec // Intentional: reading from user-specified sync directory if err != nil { return fmt.Errorf("read %s: %w", entry.Name(), err) } @@ -263,7 +263,7 @@ func readFlows(flowDir string, workspaceID idwrap.IDWrap, bundle *ioworkspace.Wo continue } - data, err := os.ReadFile(filepath.Join(flowDir, entry.Name())) + data, err := os.ReadFile(filepath.Join(flowDir, entry.Name())) //nolint:gosec // Intentional: reading from user-specified sync directory if err != nil { return fmt.Errorf("read %s: %w", entry.Name(), err) } @@ -361,7 +361,7 @@ func readRequestsRecursive( continue } - data, err := os.ReadFile(filepath.Join(dirPath, name)) + data, err := os.ReadFile(filepath.Join(dirPath, name)) //nolint:gosec // Intentional: reading from user-specified sync directory if err != nil { return fmt.Errorf("read %s: %w", name, err) }