This repository was archived by the owner on Jul 4, 2025. It is now read-only.
File tree Expand file tree Collapse file tree 2 files changed +1
-5
lines changed
Expand file tree Collapse file tree 2 files changed +1
-5
lines changed Original file line number Diff line number Diff line change @@ -203,7 +203,7 @@ void llamaCPP::chatCompletion(
203203 data[" presence_penalty" ] = (*jsonBody).get (" presence_penalty" , 0 ).asFloat ();
204204 const Json::Value &messages = (*jsonBody)[" messages" ];
205205
206- if (!multi_modal ) {
206+ if (!llama. multimodal ) {
207207
208208 for (const auto &message : messages) {
209209 std::string input_role = message[" role" ].asString ();
@@ -407,7 +407,6 @@ void llamaCPP::unloadModel(
407407 llama_free_model (llama.model );
408408 llama.ctx = nullptr ;
409409 llama.model = nullptr ;
410- multi_modal = false ;
411410 jsonResp[" message" ] = " Model unloaded successfully" ;
412411 }
413412 auto resp = nitro_utils::nitroHttpJsonResponse (jsonResp);
@@ -442,7 +441,6 @@ bool llamaCPP::loadModelImpl(const Json::Value &jsonBody) {
442441 if (!jsonBody[" mmproj" ].isNull ()) {
443442 LOG_INFO << " MMPROJ FILE detected, multi-model enabled!" ;
444443 params.mmproj = jsonBody[" mmproj" ].asString ();
445- multi_modal = true ;
446444 }
447445 params.model = jsonBody[" llama_model_path" ].asString ();
448446 params.n_gpu_layers = jsonBody.get (" ngl" , 100 ).asInt ();
Original file line number Diff line number Diff line change @@ -732,7 +732,6 @@ struct llama_server_context {
732732 if (images_data != data.end () && images_data->is_array ()) {
733733 for (const auto &img : *images_data) {
734734 std::string data_b64 = img[" data" ].get <std::string>();
735- LOG_INFO << data_b64;
736735 slot_image img_sl;
737736 img_sl.id =
738737 img.count (" id" ) != 0 ? img[" id" ].get <int >() : slot->images .size ();
@@ -1913,6 +1912,5 @@ class llamaCPP : public drogon::HttpController<llamaCPP> {
19131912 bool caching_enabled;
19141913 std::atomic<int > no_of_chats = 0 ;
19151914 int clean_cache_threshold;
1916- bool multi_modal = false ;
19171915};
19181916}; // namespace inferences
You can’t perform that action at this time.
0 commit comments