1- import time
2-
31import pytest
42import requests
3+ import time
54from test_runner import (
6- run ,
7- start_server_if_needed ,
5+ start_server ,
86 stop_server ,
97 wait_for_websocket_download_success_event ,
108)
119
12-
13- class TestApiEngineUninstall :
10+ class TestApiEngine :
1411
1512 @pytest .fixture (autouse = True )
1613 def setup_and_teardown (self ):
1714 # Setup
18- start_server_if_needed ()
15+ success = start_server ()
16+ if not success :
17+ raise Exception ("Failed to start server" )
1918
2019 yield
2120
2221 # Teardown
2322 stop_server ()
23+
24+ # engines get
25+ def test_engines_get_llamacpp_should_be_successful (self ):
26+ response = requests .get ("http://localhost:3928/engines/llama-cpp" )
27+ assert response .status_code == 200
28+
29+ # engines install
30+ def test_engines_install_llamacpp_specific_version_and_variant (self ):
31+ data = {"version" : "v0.1.35-27.10.24" , "variant" : "linux-amd64-avx-cuda-11-7" }
32+ response = requests .post (
33+ "http://localhost:3928/v1/engines/llama-cpp/install" , json = data
34+ )
35+ assert response .status_code == 200
2436
37+ def test_engines_install_llamacpp_specific_version_and_null_variant (self ):
38+ data = {"version" : "v0.1.35-27.10.24" }
39+ response = requests .post (
40+ "http://localhost:3928/v1/engines/llama-cpp/install" , json = data
41+ )
42+ assert response .status_code == 200
43+
44+ # engines uninstall
2545 @pytest .mark .asyncio
26- async def test_engines_uninstall_llamacpp_should_be_successful (self ):
46+ async def test_engines_install_uninstall_llamacpp_should_be_successful (self ):
2747 response = requests .post ("http://localhost:3928/v1/engines/llama-cpp/install" )
2848 assert response .status_code == 200
2949 await wait_for_websocket_download_success_event (timeout = None )
@@ -33,7 +53,7 @@ async def test_engines_uninstall_llamacpp_should_be_successful(self):
3353 assert response .status_code == 200
3454
3555 @pytest .mark .asyncio
36- async def test_engines_uninstall_llamacpp_with_only_version_should_be_failed (self ):
56+ async def test_engines_install_uninstall_llamacpp_with_only_version_should_be_failed (self ):
3757 # install first
3858 data = {"variant" : "mac-arm64" }
3959 install_response = requests .post (
@@ -50,7 +70,7 @@ async def test_engines_uninstall_llamacpp_with_only_version_should_be_failed(sel
5070 assert response .json ()["message" ] == "No variant provided"
5171
5272 @pytest .mark .asyncio
53- async def test_engines_uninstall_llamacpp_with_variant_should_be_successful (self ):
73+ async def test_engines_install_uninstall_llamacpp_with_variant_should_be_successful (self ):
5474 # install first
5575 data = {"variant" : "mac-arm64" }
5676 install_response = requests .post (
@@ -62,7 +82,7 @@ async def test_engines_uninstall_llamacpp_with_variant_should_be_successful(self
6282 response = requests .delete ("http://127.0.0.1:3928/v1/engines/llama-cpp/install" )
6383 assert response .status_code == 200
6484
65- def test_engines_uninstall_llamacpp_with_specific_variant_and_version_should_be_successful (
85+ def test_engines_install_uninstall_llamacpp_with_specific_variant_and_version_should_be_successful (
6686 self ,
6787 ):
6888 data = {"variant" : "mac-arm64" , "version" : "v0.1.35" }
@@ -76,3 +96,5 @@ def test_engines_uninstall_llamacpp_with_specific_variant_and_version_should_be_
7696 "http://localhost:3928/v1/engines/llama-cpp/install" , json = data
7797 )
7898 assert response .status_code == 200
99+
100+
0 commit comments