From 2976ebed59397113169ca0e3f18161a874c7d3ed Mon Sep 17 00:00:00 2001 From: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Thu, 22 Jan 2026 09:35:10 +0000 Subject: [PATCH 1/2] feat: auto-set Pro models after sign-in while preserving previous configs - Save current STT/LLM configuration before logout to pre_logout_* settings - On sign-in, restore saved configurations if they exist, otherwise set to Pro models - Clear saved pre-logout configs after restoration Co-Authored-By: john@hyprnote.com --- .../components/settings/general/account.tsx | 6 ++++ .../src/hooks/useProModelAutoConfig.ts | 29 ++++++++++++++----- .../src/store/tinybase/store/settings.ts | 16 ++++++++++ 3 files changed, 43 insertions(+), 8 deletions(-) diff --git a/apps/desktop/src/components/settings/general/account.tsx b/apps/desktop/src/components/settings/general/account.tsx index c66cde8e22..7593ff8e13 100644 --- a/apps/desktop/src/components/settings/general/account.tsx +++ b/apps/desktop/src/components/settings/general/account.tsx @@ -60,6 +60,12 @@ export function AccountSettings() { const currentSttProvider = store.getValue("current_stt_provider"); const currentSttModel = store.getValue("current_stt_model"); const currentLlmProvider = store.getValue("current_llm_provider"); + const currentLlmModel = store.getValue("current_llm_model"); + + store.setValue("pre_logout_stt_provider", currentSttProvider ?? ""); + store.setValue("pre_logout_stt_model", currentSttModel ?? ""); + store.setValue("pre_logout_llm_provider", currentLlmProvider ?? ""); + store.setValue("pre_logout_llm_model", currentLlmModel ?? ""); if (currentSttProvider === "hyprnote" && currentSttModel === "cloud") { store.setValue("current_stt_model", ""); diff --git a/apps/desktop/src/hooks/useProModelAutoConfig.ts b/apps/desktop/src/hooks/useProModelAutoConfig.ts index 6fd06d3401..063e4c8b1d 100644 --- a/apps/desktop/src/hooks/useProModelAutoConfig.ts +++ b/apps/desktop/src/hooks/useProModelAutoConfig.ts @@ -17,21 +17,34 @@ export function useProModelAutoConfig() { const isNowPro = isPro === true; if (wasNotPro && isNowPro) { - const currentSttProvider = store.getValue("current_stt_provider"); - const currentLlmProvider = store.getValue("current_llm_provider"); - - const sttNotConfigured = !currentSttProvider; - const llmNotConfigured = !currentLlmProvider; - - if (sttNotConfigured) { + const preLogoutSttProvider = store.getValue("pre_logout_stt_provider"); + const preLogoutSttModel = store.getValue("pre_logout_stt_model"); + const preLogoutLlmProvider = store.getValue("pre_logout_llm_provider"); + const preLogoutLlmModel = store.getValue("pre_logout_llm_model"); + + const hasSavedSttConfig = !!preLogoutSttProvider; + const hasSavedLlmConfig = !!preLogoutLlmProvider; + + if (hasSavedSttConfig) { + store.setValue("current_stt_provider", preLogoutSttProvider); + store.setValue("current_stt_model", preLogoutSttModel ?? ""); + } else { store.setValue("current_stt_provider", "hyprnote"); store.setValue("current_stt_model", "cloud"); } - if (llmNotConfigured) { + if (hasSavedLlmConfig) { + store.setValue("current_llm_provider", preLogoutLlmProvider); + store.setValue("current_llm_model", preLogoutLlmModel ?? ""); + } else { store.setValue("current_llm_provider", "hyprnote"); store.setValue("current_llm_model", "Auto"); } + + store.setValue("pre_logout_stt_provider", ""); + store.setValue("pre_logout_stt_model", ""); + store.setValue("pre_logout_llm_provider", ""); + store.setValue("pre_logout_llm_model", ""); } prevIsProRef.current = isPro; diff --git a/apps/desktop/src/store/tinybase/store/settings.ts b/apps/desktop/src/store/tinybase/store/settings.ts index f53f830bda..b40abe3b15 100644 --- a/apps/desktop/src/store/tinybase/store/settings.ts +++ b/apps/desktop/src/store/tinybase/store/settings.ts @@ -65,6 +65,22 @@ export const SETTINGS_MAPPING = { type: "string", path: ["ai", "current_stt_model"], }, + pre_logout_stt_provider: { + type: "string", + path: ["ai", "pre_logout_stt_provider"], + }, + pre_logout_stt_model: { + type: "string", + path: ["ai", "pre_logout_stt_model"], + }, + pre_logout_llm_provider: { + type: "string", + path: ["ai", "pre_logout_llm_provider"], + }, + pre_logout_llm_model: { + type: "string", + path: ["ai", "pre_logout_llm_model"], + }, trial_expired_modal_dismissed_at: { type: "number", path: ["billing", "trial_expired_modal_dismissed_at"], From 2adc5dc655df3fe98bfbff972b9c645c7779becf Mon Sep 17 00:00:00 2001 From: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Thu, 22 Jan 2026 14:16:00 +0000 Subject: [PATCH 2/2] refactor: correct Pro model auto-config logic and add tests - On becoming Pro: save current non-Pro config to pre_pro_* fields, then set Pro models - On sign-out: restore pre_pro_* config if exists, otherwise clear settings - Renamed pre_logout_* to pre_pro_* for clearer semantics - Added store-level tests for the Pro model auto-config flow Co-Authored-By: john@hyprnote.com --- .../components/settings/general/account.tsx | 29 +-- .../src/hooks/useProModelAutoConfig.test.ts | 176 ++++++++++++++++++ .../src/hooks/useProModelAutoConfig.ts | 40 ++-- .../src/store/tinybase/store/settings.ts | 16 +- 4 files changed, 217 insertions(+), 44 deletions(-) create mode 100644 apps/desktop/src/hooks/useProModelAutoConfig.test.ts diff --git a/apps/desktop/src/components/settings/general/account.tsx b/apps/desktop/src/components/settings/general/account.tsx index 7593ff8e13..7b16b1db3d 100644 --- a/apps/desktop/src/components/settings/general/account.tsx +++ b/apps/desktop/src/components/settings/general/account.tsx @@ -57,24 +57,31 @@ export function AccountSettings() { }); if (store) { - const currentSttProvider = store.getValue("current_stt_provider"); - const currentSttModel = store.getValue("current_stt_model"); - const currentLlmProvider = store.getValue("current_llm_provider"); - const currentLlmModel = store.getValue("current_llm_model"); + const preProSttProvider = store.getValue("pre_pro_stt_provider"); + const preProSttModel = store.getValue("pre_pro_stt_model"); + const preProLlmProvider = store.getValue("pre_pro_llm_provider"); + const preProLlmModel = store.getValue("pre_pro_llm_model"); - store.setValue("pre_logout_stt_provider", currentSttProvider ?? ""); - store.setValue("pre_logout_stt_model", currentSttModel ?? ""); - store.setValue("pre_logout_llm_provider", currentLlmProvider ?? ""); - store.setValue("pre_logout_llm_model", currentLlmModel ?? ""); - - if (currentSttProvider === "hyprnote" && currentSttModel === "cloud") { + if (preProSttProvider) { + store.setValue("current_stt_provider", preProSttProvider); + store.setValue("current_stt_model", preProSttModel ?? ""); + } else { + store.setValue("current_stt_provider", ""); store.setValue("current_stt_model", ""); } - if (currentLlmProvider === "hyprnote") { + if (preProLlmProvider) { + store.setValue("current_llm_provider", preProLlmProvider); + store.setValue("current_llm_model", preProLlmModel ?? ""); + } else { store.setValue("current_llm_provider", ""); store.setValue("current_llm_model", ""); } + + store.setValue("pre_pro_stt_provider", ""); + store.setValue("pre_pro_stt_model", ""); + store.setValue("pre_pro_llm_provider", ""); + store.setValue("pre_pro_llm_model", ""); } await auth?.signOut(); diff --git a/apps/desktop/src/hooks/useProModelAutoConfig.test.ts b/apps/desktop/src/hooks/useProModelAutoConfig.test.ts new file mode 100644 index 0000000000..50dbb81c10 --- /dev/null +++ b/apps/desktop/src/hooks/useProModelAutoConfig.test.ts @@ -0,0 +1,176 @@ +import { describe, expect, test } from "vitest"; + +import { createTestSettingsStore } from "../store/tinybase/persister/testing/mocks"; + +describe("Pro model auto-config store logic", () => { + test("on becoming Pro: saves non-Pro config and sets Pro models", () => { + const store = createTestSettingsStore(); + + store.setValue("current_stt_provider", "openai"); + store.setValue("current_stt_model", "whisper-1"); + store.setValue("current_llm_provider", "anthropic"); + store.setValue("current_llm_model", "claude-3"); + + const currentSttProvider = store.getValue("current_stt_provider"); + const currentSttModel = store.getValue("current_stt_model"); + const currentLlmProvider = store.getValue("current_llm_provider"); + const currentLlmModel = store.getValue("current_llm_model"); + + if (currentSttProvider && currentSttProvider !== "hyprnote") { + store.setValue("pre_pro_stt_provider", currentSttProvider); + store.setValue("pre_pro_stt_model", currentSttModel ?? ""); + } + if (currentLlmProvider && currentLlmProvider !== "hyprnote") { + store.setValue("pre_pro_llm_provider", currentLlmProvider); + store.setValue("pre_pro_llm_model", currentLlmModel ?? ""); + } + + store.setValue("current_stt_provider", "hyprnote"); + store.setValue("current_stt_model", "cloud"); + store.setValue("current_llm_provider", "hyprnote"); + store.setValue("current_llm_model", "Auto"); + + expect(store.getValue("current_stt_provider")).toBe("hyprnote"); + expect(store.getValue("current_stt_model")).toBe("cloud"); + expect(store.getValue("current_llm_provider")).toBe("hyprnote"); + expect(store.getValue("current_llm_model")).toBe("Auto"); + expect(store.getValue("pre_pro_stt_provider")).toBe("openai"); + expect(store.getValue("pre_pro_stt_model")).toBe("whisper-1"); + expect(store.getValue("pre_pro_llm_provider")).toBe("anthropic"); + expect(store.getValue("pre_pro_llm_model")).toBe("claude-3"); + }); + + test("on sign-out: restores pre-Pro config and clears saved values", () => { + const store = createTestSettingsStore(); + + store.setValue("current_stt_provider", "hyprnote"); + store.setValue("current_stt_model", "cloud"); + store.setValue("current_llm_provider", "hyprnote"); + store.setValue("current_llm_model", "Auto"); + store.setValue("pre_pro_stt_provider", "openai"); + store.setValue("pre_pro_stt_model", "whisper-1"); + store.setValue("pre_pro_llm_provider", "anthropic"); + store.setValue("pre_pro_llm_model", "claude-3"); + + const preProSttProvider = store.getValue("pre_pro_stt_provider"); + const preProSttModel = store.getValue("pre_pro_stt_model"); + const preProLlmProvider = store.getValue("pre_pro_llm_provider"); + const preProLlmModel = store.getValue("pre_pro_llm_model"); + + if (preProSttProvider) { + store.setValue("current_stt_provider", preProSttProvider); + store.setValue("current_stt_model", preProSttModel ?? ""); + } else { + store.setValue("current_stt_provider", ""); + store.setValue("current_stt_model", ""); + } + + if (preProLlmProvider) { + store.setValue("current_llm_provider", preProLlmProvider); + store.setValue("current_llm_model", preProLlmModel ?? ""); + } else { + store.setValue("current_llm_provider", ""); + store.setValue("current_llm_model", ""); + } + + store.setValue("pre_pro_stt_provider", ""); + store.setValue("pre_pro_stt_model", ""); + store.setValue("pre_pro_llm_provider", ""); + store.setValue("pre_pro_llm_model", ""); + + expect(store.getValue("current_stt_provider")).toBe("openai"); + expect(store.getValue("current_stt_model")).toBe("whisper-1"); + expect(store.getValue("current_llm_provider")).toBe("anthropic"); + expect(store.getValue("current_llm_model")).toBe("claude-3"); + expect(store.getValue("pre_pro_stt_provider")).toBe(""); + expect(store.getValue("pre_pro_stt_model")).toBe(""); + expect(store.getValue("pre_pro_llm_provider")).toBe(""); + expect(store.getValue("pre_pro_llm_model")).toBe(""); + }); + + test("on sign-out without pre-Pro config: clears current config", () => { + const store = createTestSettingsStore(); + + store.setValue("current_stt_provider", "hyprnote"); + store.setValue("current_stt_model", "cloud"); + store.setValue("current_llm_provider", "hyprnote"); + store.setValue("current_llm_model", "Auto"); + + const preProSttProvider = store.getValue("pre_pro_stt_provider"); + const preProLlmProvider = store.getValue("pre_pro_llm_provider"); + + if (preProSttProvider) { + store.setValue("current_stt_provider", preProSttProvider); + } else { + store.setValue("current_stt_provider", ""); + store.setValue("current_stt_model", ""); + } + + if (preProLlmProvider) { + store.setValue("current_llm_provider", preProLlmProvider); + } else { + store.setValue("current_llm_provider", ""); + store.setValue("current_llm_model", ""); + } + + expect(store.getValue("current_stt_provider")).toBe(""); + expect(store.getValue("current_stt_model")).toBe(""); + expect(store.getValue("current_llm_provider")).toBe(""); + expect(store.getValue("current_llm_model")).toBe(""); + }); + + test("full flow: non-Pro -> Pro -> sign-out restores original config", () => { + const store = createTestSettingsStore(); + + store.setValue("current_stt_provider", "deepgram"); + store.setValue("current_stt_model", "nova-2"); + store.setValue("current_llm_provider", "openai"); + store.setValue("current_llm_model", "gpt-4"); + + const sttBefore = store.getValue("current_stt_provider"); + const sttModelBefore = store.getValue("current_stt_model"); + const llmBefore = store.getValue("current_llm_provider"); + const llmModelBefore = store.getValue("current_llm_model"); + + if (sttBefore && sttBefore !== "hyprnote") { + store.setValue("pre_pro_stt_provider", sttBefore); + store.setValue("pre_pro_stt_model", sttModelBefore ?? ""); + } + if (llmBefore && llmBefore !== "hyprnote") { + store.setValue("pre_pro_llm_provider", llmBefore); + store.setValue("pre_pro_llm_model", llmModelBefore ?? ""); + } + store.setValue("current_stt_provider", "hyprnote"); + store.setValue("current_stt_model", "cloud"); + store.setValue("current_llm_provider", "hyprnote"); + store.setValue("current_llm_model", "Auto"); + + expect(store.getValue("current_stt_provider")).toBe("hyprnote"); + expect(store.getValue("current_llm_provider")).toBe("hyprnote"); + + const preProStt = store.getValue("pre_pro_stt_provider"); + const preProSttModel = store.getValue("pre_pro_stt_model"); + const preProLlm = store.getValue("pre_pro_llm_provider"); + const preProLlmModel = store.getValue("pre_pro_llm_model"); + + if (preProStt) { + store.setValue("current_stt_provider", preProStt); + store.setValue("current_stt_model", preProSttModel ?? ""); + } + if (preProLlm) { + store.setValue("current_llm_provider", preProLlm); + store.setValue("current_llm_model", preProLlmModel ?? ""); + } + store.setValue("pre_pro_stt_provider", ""); + store.setValue("pre_pro_stt_model", ""); + store.setValue("pre_pro_llm_provider", ""); + store.setValue("pre_pro_llm_model", ""); + + expect(store.getValue("current_stt_provider")).toBe("deepgram"); + expect(store.getValue("current_stt_model")).toBe("nova-2"); + expect(store.getValue("current_llm_provider")).toBe("openai"); + expect(store.getValue("current_llm_model")).toBe("gpt-4"); + expect(store.getValue("pre_pro_stt_provider")).toBe(""); + expect(store.getValue("pre_pro_llm_provider")).toBe(""); + }); +}); diff --git a/apps/desktop/src/hooks/useProModelAutoConfig.ts b/apps/desktop/src/hooks/useProModelAutoConfig.ts index 063e4c8b1d..cd9c255afa 100644 --- a/apps/desktop/src/hooks/useProModelAutoConfig.ts +++ b/apps/desktop/src/hooks/useProModelAutoConfig.ts @@ -17,34 +17,24 @@ export function useProModelAutoConfig() { const isNowPro = isPro === true; if (wasNotPro && isNowPro) { - const preLogoutSttProvider = store.getValue("pre_logout_stt_provider"); - const preLogoutSttModel = store.getValue("pre_logout_stt_model"); - const preLogoutLlmProvider = store.getValue("pre_logout_llm_provider"); - const preLogoutLlmModel = store.getValue("pre_logout_llm_model"); - - const hasSavedSttConfig = !!preLogoutSttProvider; - const hasSavedLlmConfig = !!preLogoutLlmProvider; - - if (hasSavedSttConfig) { - store.setValue("current_stt_provider", preLogoutSttProvider); - store.setValue("current_stt_model", preLogoutSttModel ?? ""); - } else { - store.setValue("current_stt_provider", "hyprnote"); - store.setValue("current_stt_model", "cloud"); + const currentSttProvider = store.getValue("current_stt_provider"); + const currentSttModel = store.getValue("current_stt_model"); + const currentLlmProvider = store.getValue("current_llm_provider"); + const currentLlmModel = store.getValue("current_llm_model"); + + if (currentSttProvider && currentSttProvider !== "hyprnote") { + store.setValue("pre_pro_stt_provider", currentSttProvider); + store.setValue("pre_pro_stt_model", currentSttModel ?? ""); } - - if (hasSavedLlmConfig) { - store.setValue("current_llm_provider", preLogoutLlmProvider); - store.setValue("current_llm_model", preLogoutLlmModel ?? ""); - } else { - store.setValue("current_llm_provider", "hyprnote"); - store.setValue("current_llm_model", "Auto"); + if (currentLlmProvider && currentLlmProvider !== "hyprnote") { + store.setValue("pre_pro_llm_provider", currentLlmProvider); + store.setValue("pre_pro_llm_model", currentLlmModel ?? ""); } - store.setValue("pre_logout_stt_provider", ""); - store.setValue("pre_logout_stt_model", ""); - store.setValue("pre_logout_llm_provider", ""); - store.setValue("pre_logout_llm_model", ""); + store.setValue("current_stt_provider", "hyprnote"); + store.setValue("current_stt_model", "cloud"); + store.setValue("current_llm_provider", "hyprnote"); + store.setValue("current_llm_model", "Auto"); } prevIsProRef.current = isPro; diff --git a/apps/desktop/src/store/tinybase/store/settings.ts b/apps/desktop/src/store/tinybase/store/settings.ts index b40abe3b15..db0729ad0c 100644 --- a/apps/desktop/src/store/tinybase/store/settings.ts +++ b/apps/desktop/src/store/tinybase/store/settings.ts @@ -65,21 +65,21 @@ export const SETTINGS_MAPPING = { type: "string", path: ["ai", "current_stt_model"], }, - pre_logout_stt_provider: { + pre_pro_stt_provider: { type: "string", - path: ["ai", "pre_logout_stt_provider"], + path: ["ai", "pre_pro_stt_provider"], }, - pre_logout_stt_model: { + pre_pro_stt_model: { type: "string", - path: ["ai", "pre_logout_stt_model"], + path: ["ai", "pre_pro_stt_model"], }, - pre_logout_llm_provider: { + pre_pro_llm_provider: { type: "string", - path: ["ai", "pre_logout_llm_provider"], + path: ["ai", "pre_pro_llm_provider"], }, - pre_logout_llm_model: { + pre_pro_llm_model: { type: "string", - path: ["ai", "pre_logout_llm_model"], + path: ["ai", "pre_pro_llm_model"], }, trial_expired_modal_dismissed_at: { type: "number",