Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/cortex-cpp-quality-gate.yml
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ jobs:
if: runner.os == 'Linux'
run: |
cd engine
mkdir -p ~/.config/cortexcpp/
echo "huggingFaceToken: ${{ secrets.HUGGINGFACE_TOKEN_READ }}" > ~/.config/cortexcpp/.cortexrc
echo "gitHubToken: ${{ secrets.PAT_SERVICE_ACCOUNT }}" >> ~/.config/cortexcpp/.cortexrc
# ./build/cortex
Expand All @@ -175,6 +176,7 @@ jobs:
if: runner.os == 'Linux'
run: |
cd engine
mkdir -p ~/.config/cortexcpp/
echo "apiServerPort: 3928" > ~/.config/cortexcpp/.cortexrc
echo "huggingFaceToken: ${{ secrets.HUGGINGFACE_TOKEN_READ }}" >> ~/.config/cortexcpp/.cortexrc
echo "gitHubToken: ${{ secrets.PAT_SERVICE_ACCOUNT }}" >> ~/.config/cortexcpp/.cortexrc
Expand Down Expand Up @@ -453,6 +455,7 @@ jobs:
if: runner.os == 'Linux'
run: |
cd engine
mkdir -p ~/.config/cortexcpp/
echo "gitHubToken: ${{ secrets.GITHUB_TOKEN }}" > ~/.config/cortexcpp/.cortexrc
# ./build/cortex
cat ~/.config/cortexcpp/.cortexrc
Expand All @@ -477,6 +480,7 @@ jobs:
if: runner.os == 'Linux'
run: |
cd engine
mkdir -p ~/.config/cortexcpp/
echo "apiServerPort: 3928" > ~/.config/cortexcpp/.cortexrc
echo "gitHubToken: ${{ secrets.GITHUB_TOKEN }}" > ~/.config/cortexcpp/.cortexrc
# ./build/cortex
Expand Down
1 change: 0 additions & 1 deletion docs/docs/architecture/cortexrc.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ Example of the `.cortexrc` file:
```
logFolderPath: /home/<user>/cortexcpp
logLlamaCppPath: ./logs/cortex.log
logTensorrtLLMPath: ./logs/cortex.log
logOnnxPath: ./logs/cortex.log
dataFolderPath: /home/<user>/cortexcpp
maxLogLines: 100000
Expand Down
63 changes: 37 additions & 26 deletions engine/cli/command_line_parser.cc
Original file line number Diff line number Diff line change
Expand Up @@ -437,7 +437,7 @@ void CommandLineParser::SetupConfigsCommands() {

auto is_empty = true;
for (const auto& [key, value] : config_update_opts_) {
if (!value.empty()) {
if (!value.empty() || CONFIGURATIONS.at(key).allow_empty) {
is_empty = false;
break;
}
Expand Down Expand Up @@ -656,36 +656,47 @@ void CommandLineParser::SetupHardwareCommands() {
void CommandLineParser::SetupSystemCommands() {
auto start_cmd = app_.add_subcommand("start", "Start the API server");
start_cmd->group(kSystemGroup);
cml_data_.port = std::stoi(cml_data_.config.apiServerPort);
start_cmd->add_option("-p, --port", cml_data_.port, "Server port to listen");
start_cmd->add_option("--loglevel", cml_data_.log_level,
"Set up log level for server, accepted TRACE, DEBUG, "
"INFO, WARN, ERROR");
if (cml_data_.log_level != "INFO" && cml_data_.log_level != "TRACE" &&
cml_data_.log_level != "DEBUG" && cml_data_.log_level != "WARN" &&
cml_data_.log_level != "ERROR") {
CLI_LOG("Invalid log level: " << cml_data_.log_level
<< ", Set Loglevel to INFO");
cml_data_.log_level = "INFO";

// Add options dynamically
std::vector<std::pair<std::string, std::string>> option_names = {
{"logspath", "The directory where logs are stored"},
{"logsllama", "The directory where llama-cpp engine logs are stored"},
{"logsonnx", "The directory where onnx engine logs are stored"},
{"datapath", "The directory for storing data"},
{"loglines", "Log size limit"},
{"host", "The host IP for the API server"},
{"port", "The port used by the API server"},
{"hf-token", "HuggingFace authentication token"},
{"gh-agent", "Github user agent"},
{"gh-token", "Github authentication token"},
{"cors", "Cross-Origin Resource Sharing"},
{"origins", "Lists allowed origins for CORS requests"},
{"proxy-url", "Proxy URL"},
{"verify-proxy", "SSL verification for client proxy connections"},
{"verify-proxy-host", "SSL verification for host proxy connections"},
{"proxy-username", "Proxy username"},
{"proxy-password", "Proxy password"},
{"no-proxy", "Specifies exceptions for proxy usage"},
{"verify-ssl-peer", "SSL/TLS verification for peer connections"},
{"verify-ssl-host", "SSL/TLS verification for host connections"},
{"ssl-cert-path", "Path to SSL certificates"},
{"ssl-key-path", "Path to SSL and keys"},
{"loglevel", "Log level"}};
cml_data_.server_start_options["loglevel"] = "INFO";
for (const auto& option_name : option_names) {
start_cmd->add_option(
"--" + std::get<0>(option_name),
cml_data_.server_start_options[std::get<0>(option_name)],
std::get<1>(option_name));
}

start_cmd->callback([this] {
if (std::exchange(executed_, true))
return;
if (cml_data_.port != stoi(cml_data_.config.apiServerPort)) {
CTL_INF("apiServerPort changed from " << cml_data_.config.apiServerPort
<< " to " << cml_data_.port);
auto config_path = file_manager_utils::GetConfigurationPath();
cml_data_.config.apiServerPort = std::to_string(cml_data_.port);
auto result =
config_yaml_utils::CortexConfigMgr::GetInstance().DumpYamlConfig(
cml_data_.config, config_path.string());
if (result.has_error()) {
CLI_LOG("Error update " << config_path.string() << result.error());
}
}

commands::ServerStartCmd ssc;
ssc.Exec(cml_data_.config.apiServerHost,
std::stoi(cml_data_.config.apiServerPort), cml_data_.log_level);
ssc.Exec(cml_data_.server_start_options["loglevel"],
cml_data_.server_start_options, cml_data_.config);
});

auto stop_cmd = app_.add_subcommand("stop", "Stop the API server");
Expand Down
3 changes: 1 addition & 2 deletions engine/cli/command_line_parser.h
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,12 @@ class CommandLineParser {
bool display_gpu_mode = false;
bool display_available_model = false;
std::string filter = "";
std::string log_level = "INFO";

bool show_menu = false;

int port;
config_yaml_utils::CortexConfig config;
std::unordered_map<std::string, std::string> model_update_options;
std::unordered_map<std::string, std::string> server_start_options;
std::string model_src;
};
CmlData cml_data_;
Expand Down
2 changes: 1 addition & 1 deletion engine/cli/commands/config_upd_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ void commands::ConfigUpdCmd::Exec(

auto non_null_opts = std::unordered_map<std::string, std::string>();
for (const auto& [key, value] : options) {
if (value.empty()) {
if (value.empty() && !CONFIGURATIONS.at(key).allow_empty) {
continue;
}
non_null_opts[key] = value;
Expand Down
189 changes: 178 additions & 11 deletions engine/cli/commands/server_start_cmd.cc
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ bool ServerStartCmd::Exec(const std::string& host, int port,
si.cb = sizeof(si);
ZeroMemory(&pi, sizeof(pi));
std::wstring params = L"--start-server";
params += L" --config_file_path \"" +
params += L" --config_file_path \"" +
file_manager_utils::GetConfigurationPath().wstring() + L"\"";
params += L" --data_folder_path \"" +
file_manager_utils::GetCortexDataPath().wstring() + L"\"";
Expand All @@ -80,17 +80,17 @@ bool ServerStartCmd::Exec(const std::string& host, int port,
mutable_cmds.push_back(L'\0');
// Create child process
if (!CreateProcess(
NULL, // No module name (use command line)
NULL, // No module name (use command line)
mutable_cmds
.data(), // Command line (replace with your actual executable)
NULL, // Process handle not inheritable
NULL, // Thread handle not inheritable
FALSE, // Set handle inheritance
CREATE_NO_WINDOW, // No new console
NULL, // Use parent's environment block
NULL, // Use parent's starting directory
&si, // Pointer to STARTUPINFO structure
&pi)) // Pointer to PROCESS_INFORMATION structure
.data(), // Command line (replace with your actual executable)
NULL, // Process handle not inheritable
NULL, // Thread handle not inheritable
FALSE, // Set handle inheritance
CREATE_NO_WINDOW, // No new console
NULL, // Use parent's environment block
NULL, // Use parent's starting directory
&si, // Pointer to STARTUPINFO structure
&pi)) // Pointer to PROCESS_INFORMATION structure
{
std::cout << "Could not start server: " << GetLastError() << std::endl;
return false;
Expand Down Expand Up @@ -136,4 +136,171 @@ bool ServerStartCmd::Exec(const std::string& host, int port,
#endif
return true;
}

bool ServerStartCmd::Exec(
const std::optional<std::string>& log_level,
const std::unordered_map<std::string, std::string>& options,
CortexConfig& data) {
for (const auto& [key, value] : options) {
if (!value.empty()) {
UpdateConfig(data, key, value);
}
}

auto config_path = file_manager_utils::GetConfigurationPath();
auto result =
config_yaml_utils::CortexConfigMgr::GetInstance().DumpYamlConfig(
data, config_path.string());
if (result.has_error()) {
CTL_WRN("Error update " << config_path.string() << result.error());
}
return Exec(data.apiServerHost, std::stoi(data.apiServerPort), log_level);
}

void ServerStartCmd::UpdateConfig(CortexConfig& data, const std::string& key,
const std::string& value) {
static const std::unordered_map<
std::string, std::function<void(CortexConfig&, const std::string&,
const std::string&)>>
updaters = {
{"logspath",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.logFolderPath = v;
}},
{"logsllama",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.logLlamaCppPath = v;
}},
{"logsonnx",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.logOnnxPath = v;
}},
{"loglines",
[this](CortexConfig& data, const std::string& k,
const std::string& v) {
UpdateNumericField(k, v, [&data](float f) {
data.maxLogLines = static_cast<int>(f);
});
}},
{"host",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.apiServerHost = v;
}},
{"port",
[](CortexConfig& data, const std::string& k, const std::string& v) {
data.apiServerPort = v;
}},
{"hf-token",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.huggingFaceToken = v;
}},
{"gh-agent",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.gitHubUserAgent = v;
}},
{"gh-token",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.gitHubToken = v;
}},
{"cors",
[this](CortexConfig& data, const std::string& k,
const std::string& v) {
UpdateBooleanField(k, v, [&data](bool b) { data.enableCors = b; });
}},
{"origins",
[this](CortexConfig& data, const std::string& k,
const std::string& v) {
UpdateVectorField(k, v,
[&data](const std::vector<std::string>& orgs) {
data.allowedOrigins = orgs;
});
}},
{"proxy-url",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.proxyUrl = v;
}},
{"verify-proxy",
[this](CortexConfig& data, const std::string& k,
const std::string& v) {
UpdateBooleanField(k, v,
[&data](bool b) { data.verifyProxySsl = b; });
}},
{"verify-proxy-host",
[this](CortexConfig& data, const std::string& k,
const std::string& v) {
UpdateBooleanField(
k, v, [&data](bool b) { data.verifyProxyHostSsl = b; });
}},
{"proxy-username",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.proxyUsername = v;
}},
{"proxy-password",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.proxyPassword = v;
}},
{"no-proxy",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.noProxy = v;
}},
{"verify-ssl-peer",
[this](CortexConfig& data, const std::string& k,
const std::string& v) {
UpdateBooleanField(k, v,
[&data](bool b) { data.verifyPeerSsl = b; });
}},
{"verify-ssl-host",
[this](CortexConfig& data, const std::string& k,
const std::string& v) {
UpdateBooleanField(k, v,
[&data](bool b) { data.verifyHostSsl = b; });
}},
{"ssl-cert-path",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.sslCertPath = v;
}},
{"ssl-key-path",
[](CortexConfig& data, const std::string&, const std::string& v) {
data.sslKeyPath = v;
}},
};

if (auto it = updaters.find(key); it != updaters.end()) {
it->second(data, key, value);
CTL_INF("Updated " << key << " to: " << value);
} else {
CTL_WRN("Warning: Unknown configuration key '" << key << "' ignored.");
}
}

void ServerStartCmd::UpdateVectorField(
const std::string& key, const std::string& value,
std::function<void(const std::vector<std::string>&)> setter) {
std::vector<std::string> tokens;
std::istringstream iss(value);
std::string token;
while (std::getline(iss, token, ',')) {
tokens.push_back(token);
}
setter(tokens);
}

void ServerStartCmd::UpdateNumericField(const std::string& key,
const std::string& value,
std::function<void(float)> setter) {
try {
float numeric_val = std::stof(value);
setter(numeric_val);
} catch (const std::exception& e) {
CLI_LOG("Failed to parse numeric value for " << key << ": " << e.what());
}
}

void ServerStartCmd::UpdateBooleanField(const std::string& key,
const std::string& value,
std::function<void(bool)> setter) {
bool bool_value = (value == "true" || value == "1");
setter(bool_value);
}

}; // namespace commands
20 changes: 20 additions & 0 deletions engine/cli/commands/server_start_cmd.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,13 @@

#include <optional>
#include <string>
#include "utils/config_yaml_utils.h"
#include "utils/curl_utils.h"
#include "utils/logging_utils.h"
#include "utils/url_parser.h"

namespace commands {
using CortexConfig = config_yaml_utils::CortexConfig;

inline bool IsServerAlive(const std::string& host, int port) {
auto url = url_parser::Url{
Expand All @@ -26,5 +28,23 @@ class ServerStartCmd {
public:
bool Exec(const std::string& host, int port,
const std::optional<std::string>& log_level = std::nullopt);

bool Exec(const std::optional<std::string>& log_level,
const std::unordered_map<std::string, std::string>& options,
CortexConfig& data);

private:
void UpdateConfig(CortexConfig& data, const std::string& key,
const std::string& value);

void UpdateVectorField(
const std::string& key, const std::string& value,
std::function<void(const std::vector<std::string>&)> setter);

void UpdateNumericField(const std::string& key, const std::string& value,
std::function<void(float)> setter);

void UpdateBooleanField(const std::string& key, const std::string& value,
std::function<void(bool)> setter);
};
} // namespace commands
Loading
Loading