diff --git a/app/constants.ts b/app/constants.ts
index 2b2463b..3b5acee 100644
--- a/app/constants.ts
+++ b/app/constants.ts
@@ -1,6 +1,7 @@
import { AnthropicIcon } from './src/components/AnthropicIcon'
import { GeminiIcon } from './src/components/GeminiIcon'
import { OpenAIIcon } from './src/components/OpenAIIcon'
+import { KimiIcon } from './src/components/KimiIcon'
const normalizeDomain = (value?: string) => {
if (!value) return ''
@@ -41,6 +42,7 @@ export const MODELS = {
gpt52: { name: 'GPT 5.2', label: 'gpt52', icon: OpenAIIcon },
gpt5Mini: { name: 'GPT 5 Mini', label: 'gpt5Mini', icon: OpenAIIcon },
gemini: { name: 'Gemini', label: 'gemini', icon: GeminiIcon },
+ kimiK25: { name: 'Kimi K2.5', label: 'kimiK25', icon: KimiIcon },
}
export const IMAGE_MODELS = {
diff --git a/app/src/components/KimiIcon.tsx b/app/src/components/KimiIcon.tsx
new file mode 100644
index 0000000..b738268
--- /dev/null
+++ b/app/src/components/KimiIcon.tsx
@@ -0,0 +1,29 @@
+import Svg, { Path } from 'react-native-svg';
+
+interface IKimiIcon {
+ size: number
+ theme: any
+ selected: boolean
+}
+
+export function KimiIcon({
+ size,
+ theme,
+ selected,
+ ...props
+}: IKimiIcon) {
+ const fill = selected ? theme.tintTextColor : theme.textColor
+ return (
+
+ )
+}
diff --git a/app/src/components/index.ts b/app/src/components/index.ts
index 87cc792..6c0ca56 100644
--- a/app/src/components/index.ts
+++ b/app/src/components/index.ts
@@ -3,4 +3,5 @@ export { Header } from './Header'
export { AnthropicIcon } from './AnthropicIcon'
export { GeminiIcon } from './GeminiIcon'
export { OpenAIIcon } from './OpenAIIcon'
+export { KimiIcon } from './KimiIcon'
export { ChatModelModal } from './ChatModelModal'
diff --git a/app/src/screens/chat.tsx b/app/src/screens/chat.tsx
index 13deec5..0efa03a 100644
--- a/app/src/screens/chat.tsx
+++ b/app/src/screens/chat.tsx
@@ -67,6 +67,8 @@ export function Chat() {
generateGptResponse()
} else if (chatType.label.includes('gemini')) {
generateGeminiResponse()
+ } else if (chatType.label.includes('kimi')) {
+ generateKimiResponse()
}
}
async function generateGptResponse() {
@@ -231,6 +233,89 @@ export function Chat() {
es.addEventListener("error", listener);
}
+ async function generateKimiResponse() {
+ if (!input) return
+ Keyboard.dismiss()
+ let localResponse = ''
+ const modelLabel = chatType.label
+ const currentState = getChatState(modelLabel)
+
+ let messageArray = [
+ ...currentState.messages, {
+ user: input,
+ }
+ ] as [{user: string, assistant?: string}]
+
+ updateChatState(modelLabel, prev => ({
+ ...prev,
+ messages: JSON.parse(JSON.stringify(messageArray))
+ }))
+
+ setLoading(true)
+ setTimeout(() => {
+ scrollViewRef.current?.scrollToEnd({
+ animated: true
+ })
+ }, 1)
+ setInput('')
+
+ const messages = messageArray.reduce((acc: any[], message) => {
+ acc.push({ role: 'user', content: message.user })
+ if (message.assistant) {
+ acc.push({ role: 'assistant', content: message.assistant })
+ }
+ return acc
+ }, [])
+
+ const eventSourceArgs = {
+ body: {
+ messages,
+ model: chatType.label
+ },
+ type: getChatType(chatType)
+ }
+
+ const es = await getEventSource(eventSourceArgs)
+
+ const listener = (event) => {
+ if (event.type === "open") {
+ console.log("Open SSE connection.")
+ setLoading(false)
+ } else if (event.type === "message") {
+ if (event.data !== "[DONE]") {
+ if (localResponse.length < 850) {
+ scrollViewRef.current?.scrollToEnd({
+ animated: true
+ })
+ }
+ const data = JSON.parse(event.data)
+ if (typeof data === 'string') {
+ localResponse = localResponse + data
+ } else if (data?.content) {
+ localResponse = localResponse + data.content
+ }
+ messageArray[messageArray.length - 1].assistant = localResponse
+ updateChatState(modelLabel, prev => ({
+ ...prev,
+ messages: JSON.parse(JSON.stringify(messageArray))
+ }))
+ } else {
+ setLoading(false)
+ es.close()
+ }
+ } else if (event.type === "error") {
+ console.error("Connection error:", event.message)
+ setLoading(false)
+ } else if (event.type === "exception") {
+ console.error("Error:", event.message, event.error)
+ setLoading(false)
+ }
+ }
+
+ es.addEventListener("open", listener)
+ es.addEventListener("message", listener)
+ es.addEventListener("error", listener)
+ }
async function generateClaudeResponse() {
if (!input) return
Keyboard.dismiss()
diff --git a/app/src/utils.ts b/app/src/utils.ts
index b8022cd..a6a88b8 100644
--- a/app/src/utils.ts
+++ b/app/src/utils.ts
@@ -49,5 +49,8 @@ export function getChatType(type: Model) {
if (type.label.includes('gemini')) {
return 'gemini'
}
+ if (type.label.includes('kimi')) {
+ return 'kimi'
+ }
else return 'claude'
}
diff --git a/server/src/chat/chatRouter.ts b/server/src/chat/chatRouter.ts
index 6bb52b6..cdec45c 100644
--- a/server/src/chat/chatRouter.ts
+++ b/server/src/chat/chatRouter.ts
@@ -2,11 +2,13 @@ import express from 'express'
import { claude } from './claude'
import { gpt } from './gpt'
import { gemini } from './gemini'
+import { kimi } from './kimi'
const router = express.Router()
router.post('/claude', claude)
router.post('/gpt', gpt)
router.post('/gemini', gemini)
+router.post('/kimi', kimi)
export default router
diff --git a/server/src/chat/kimi.ts b/server/src/chat/kimi.ts
new file mode 100644
index 0000000..f00fbf7
--- /dev/null
+++ b/server/src/chat/kimi.ts
@@ -0,0 +1,89 @@
+import { Request, Response } from "express"
+import asyncHandler from 'express-async-handler'
+
+type ModelLabel = 'kimiK25'
+type ModelName = 'kimi-k2.5'
+
+const models: Record = {
+ kimiK25: 'kimi-k2.5'
+}
+
+export const kimi = asyncHandler(async (req: Request, res: Response) => {
+ try {
+ res.writeHead(200, {
+ 'Content-Type': 'text/event-stream',
+ 'Connection': 'keep-alive',
+ 'Cache-Control': 'no-cache'
+ })
+ const { model, messages } = req.body
+ const selectedModel = models[model as ModelLabel]
+
+ if (!selectedModel) {
+ res.write('data: [DONE]\n\n')
+ res.end()
+ return
+ }
+
+ const response = await fetch('https://api.moonshot.ai/v1/chat/completions', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'Authorization': `Bearer ${process.env.MOONSHOT_API_KEY}`
+ },
+ body: JSON.stringify({
+ model: selectedModel,
+ messages,
+ stream: true
+ })
+ })
+ const reader = response.body?.getReader()
+ const decoder = new TextDecoder()
+ let brokenLine = ''
+ if (reader) {
+ while (true) {
+ const { done, value } = await reader.read()
+ if (done) {
+ break
+ }
+
+ let chunk = decoder.decode(value)
+ if (brokenLine) {
+ chunk = brokenLine + chunk
+ brokenLine = ''
+ }
+
+ const lines = chunk.split("data: ")
+ const parsedLines = lines
+ .filter(line => line !== "" && line !== "[DONE]")
+ .filter(l => {
+ try {
+ JSON.parse(l)
+ return true
+ } catch (err) {
+ if (!l.includes('[DONE]')) {
+ brokenLine = brokenLine + l
+ }
+ return false
+ }
+ })
+ .map(l => JSON.parse(l))
+
+ for (const parsedLine of parsedLines) {
+ const { choices } = parsedLine
+ const { delta } = choices[0]
+ const { content } = delta
+ if (content) {
+ res.write(`data: ${JSON.stringify(content)}\n\n`)
+ }
+ }
+ }
+ }
+
+ res.write('data: [DONE]\n\n')
+ res.end()
+ } catch (err) {
+ console.log('error in Kimi chat: ', err)
+ res.write('data: [DONE]\n\n')
+ res.end()
+ }
+})