From 8474f9ae5836f717723977d89ab4601f9262500e Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 22 Jan 2025 22:31:39 +0000 Subject: [PATCH 1/4] fixed deepseek-1 model name, use uv --- chat_with_deepseek_r1_locally/.gitignore | 1 + chat_with_deepseek_r1_locally/README.md | 8 +++++--- chat_with_deepseek_r1_locally/pyproject.toml | 14 ++++++++++++++ 3 files changed, 20 insertions(+), 3 deletions(-) create mode 100644 chat_with_deepseek_r1_locally/pyproject.toml diff --git a/chat_with_deepseek_r1_locally/.gitignore b/chat_with_deepseek_r1_locally/.gitignore index e12ba56..1818415 100644 --- a/chat_with_deepseek_r1_locally/.gitignore +++ b/chat_with_deepseek_r1_locally/.gitignore @@ -3,3 +3,4 @@ __pycache__/ .web *.py[cod] assets/external/ +.venv diff --git a/chat_with_deepseek_r1_locally/README.md b/chat_with_deepseek_r1_locally/README.md index eaf9df4..d09f283 100644 --- a/chat_with_deepseek_r1_locally/README.md +++ b/chat_with_deepseek_r1_locally/README.md @@ -21,15 +21,17 @@ cd reflex-llm-examples/chat_with_deepseek_r1_locally ``` ### 2. Install Dependencies -Install the required dependencies: +Install `uv` and the required dependencies: ```bash -pip install -r requirements.txt +curl -LsSf https://astral.sh/uv/install.sh | sh +exec bash +uv venv && source .venv/bin/activate && uv pip sync pyproject.toml ``` ### 3. Pull and Run DeepSeek-r1 Using Ollama Download and set up the DeepSeek-r1 model locally: ```bash -ollama pull deepseek-r1:1.5 +ollama pull deepseek-r1:1.5b ``` ### 4. Run the Reflex App diff --git a/chat_with_deepseek_r1_locally/pyproject.toml b/chat_with_deepseek_r1_locally/pyproject.toml new file mode 100644 index 0000000..5ad122a --- /dev/null +++ b/chat_with_deepseek_r1_locally/pyproject.toml @@ -0,0 +1,14 @@ +[project] +name = "chat-w-deepseek-r1-locally" +version = "0.1.0" +description = "Chat with DeepSeek-r1 locally" +readme = "README.md" +requires-python = ">=3.10" +dependencies = [ + "llama-index>=0.12.12", + "llama-index-embeddings-huggingface>=0.5.1", + "llama-index-llms-ollama>=0.5.0", + "ollama>=0.4.7", + "reflex>=0.6.8", + "unzip>=1.0.0", +] From 175a70cfa9d883f43e80973e3b0680056ee5dfb1 Mon Sep 17 00:00:00 2001 From: Amit Arora Date: Wed, 22 Jan 2025 22:33:35 +0000 Subject: [PATCH 2/4] remove requirements.txt as we use uv now --- chat_with_deepseek_r1_locally/requirements.txt | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 chat_with_deepseek_r1_locally/requirements.txt diff --git a/chat_with_deepseek_r1_locally/requirements.txt b/chat_with_deepseek_r1_locally/requirements.txt deleted file mode 100644 index 91700cb..0000000 --- a/chat_with_deepseek_r1_locally/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -reflex>=0.6.7 -ollama==0.4.5 -llama_index -llama-index-embeddings-huggingface -llama-index-llms-ollama From af9c583e72fa984875e5c39382ac900a8a55674f Mon Sep 17 00:00:00 2001 From: Amit Arora Date: Thu, 23 Jan 2025 02:14:57 +0000 Subject: [PATCH 3/4] add pydantic --- chat_with_deepseek_r1_locally/pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/chat_with_deepseek_r1_locally/pyproject.toml b/chat_with_deepseek_r1_locally/pyproject.toml index 5ad122a..14592c8 100644 --- a/chat_with_deepseek_r1_locally/pyproject.toml +++ b/chat_with_deepseek_r1_locally/pyproject.toml @@ -9,6 +9,7 @@ dependencies = [ "llama-index-embeddings-huggingface>=0.5.1", "llama-index-llms-ollama>=0.5.0", "ollama>=0.4.7", + "pydantic>=2.10.5", "reflex>=0.6.8", "unzip>=1.0.0", ] From 844fe6f3932bb8e74827a6d1810c78704573e1e5 Mon Sep 17 00:00:00 2001 From: Amit Arora Date: Thu, 23 Jan 2025 02:16:30 +0000 Subject: [PATCH 4/4] add step in readme to install unzip --- chat_with_deepseek_r1_locally/README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/chat_with_deepseek_r1_locally/README.md b/chat_with_deepseek_r1_locally/README.md index d09f283..9af0aca 100644 --- a/chat_with_deepseek_r1_locally/README.md +++ b/chat_with_deepseek_r1_locally/README.md @@ -28,6 +28,11 @@ exec bash uv venv && source .venv/bin/activate && uv pip sync pyproject.toml ``` +You might have to install `unzip` as well. +```bash +sudo apt-get install unzip -y +``` + ### 3. Pull and Run DeepSeek-r1 Using Ollama Download and set up the DeepSeek-r1 model locally: ```bash