diff --git a/chat_with_deepseek_r1_locally/.gitignore b/chat_with_deepseek_r1_locally/.gitignore index e12ba56..1818415 100644 --- a/chat_with_deepseek_r1_locally/.gitignore +++ b/chat_with_deepseek_r1_locally/.gitignore @@ -3,3 +3,4 @@ __pycache__/ .web *.py[cod] assets/external/ +.venv diff --git a/chat_with_deepseek_r1_locally/README.md b/chat_with_deepseek_r1_locally/README.md index eaf9df4..9af0aca 100644 --- a/chat_with_deepseek_r1_locally/README.md +++ b/chat_with_deepseek_r1_locally/README.md @@ -21,15 +21,22 @@ cd reflex-llm-examples/chat_with_deepseek_r1_locally ``` ### 2. Install Dependencies -Install the required dependencies: +Install `uv` and the required dependencies: ```bash -pip install -r requirements.txt +curl -LsSf https://astral.sh/uv/install.sh | sh +exec bash +uv venv && source .venv/bin/activate && uv pip sync pyproject.toml ``` +You might have to install `unzip` as well. +```bash +sudo apt-get install unzip -y +``` + ### 3. Pull and Run DeepSeek-r1 Using Ollama Download and set up the DeepSeek-r1 model locally: ```bash -ollama pull deepseek-r1:1.5 +ollama pull deepseek-r1:1.5b ``` ### 4. Run the Reflex App diff --git a/chat_with_deepseek_r1_locally/pyproject.toml b/chat_with_deepseek_r1_locally/pyproject.toml new file mode 100644 index 0000000..14592c8 --- /dev/null +++ b/chat_with_deepseek_r1_locally/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "chat-w-deepseek-r1-locally" +version = "0.1.0" +description = "Chat with DeepSeek-r1 locally" +readme = "README.md" +requires-python = ">=3.10" +dependencies = [ + "llama-index>=0.12.12", + "llama-index-embeddings-huggingface>=0.5.1", + "llama-index-llms-ollama>=0.5.0", + "ollama>=0.4.7", + "pydantic>=2.10.5", + "reflex>=0.6.8", + "unzip>=1.0.0", +] diff --git a/chat_with_deepseek_r1_locally/requirements.txt b/chat_with_deepseek_r1_locally/requirements.txt deleted file mode 100644 index 91700cb..0000000 --- a/chat_with_deepseek_r1_locally/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -reflex>=0.6.7 -ollama==0.4.5 -llama_index -llama-index-embeddings-huggingface -llama-index-llms-ollama