Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit 627677c

Browse files
committed
add conditional prompt for role
1 parent 1f1564c commit 627677c

File tree

1 file changed

+12
-1
lines changed

1 file changed

+12
-1
lines changed

controllers/llamaCPP.cc

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,18 @@ void llamaCPP::chatCompletion(
9494

9595
const Json::Value &messages = (*jsonBody)["messages"];
9696
for (const auto &message : messages) {
97-
std::string role = message["role"].asString();
97+
std::string input_role = message["role"].asString();
98+
std::string role;
99+
if (input_role == "user") {
100+
role = user_prompt;
101+
} else if (input_role == "assistant") {
102+
role = ai_prompt;
103+
} else if (input_role == "system") {
104+
role = system_prompt;
105+
} else {
106+
role = input_role;
107+
}
108+
98109
std::string content = message["content"].asString();
99110
formatted_output += role + ": " + content + "\n";
100111
}

0 commit comments

Comments
 (0)