This repository was archived by the owner on Jul 4, 2025. It is now read-only.
File tree Expand file tree Collapse file tree 1 file changed +8
-3
lines changed
Expand file tree Collapse file tree 1 file changed +8
-3
lines changed Original file line number Diff line number Diff line change @@ -107,7 +107,7 @@ void llamaCPP::chatCompletion(
107107 }
108108
109109 std::string content = message[" content" ].asString ();
110- formatted_output += role + " : " + content + " \n " ;
110+ formatted_output += role + content + " \n " ;
111111 }
112112 formatted_output += " assistant:" ;
113113
@@ -116,8 +116,7 @@ void llamaCPP::chatCompletion(
116116 stopWords.push_back (stop_word.asString ());
117117 }
118118 // specify default stop words
119- stopWords.push_back (" user:" );
120- stopWords.push_back (" ### USER:" );
119+ stopWords.push_back (user_prompt);
121120 data[" stop" ] = stopWords;
122121 }
123122
@@ -224,6 +223,12 @@ void llamaCPP::loadModel(
224223 }
225224
226225 params.cont_batching = (*jsonBody)[" cont_batching" ].asBool ();
226+
227+ // Set up prompt
228+ user_prompt = (*jsonBody)[" user_prompt" ].asString ();
229+ ai_prompt = (*jsonBody)[" ai_prompt" ].asString ();
230+ system_prompt = (*jsonBody)[" system_prompt" ].asString ();
231+
227232 // params.n_threads = (*jsonBody)["n_threads"].asInt();
228233 // params.n_threads_batch = params.n_threads;
229234 }
You can’t perform that action at this time.
0 commit comments