Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit a87551d

Browse files
authored
Merge pull request #311 from janhq/310-bug-cannot-unload-model-on-nitro-022
310 bug cannot unload model on nitro 022
2 parents 1e47736 + be6ef92 commit a87551d

File tree

2 files changed

+5
-5
lines changed

2 files changed

+5
-5
lines changed

CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ add_executable(${PROJECT_NAME} main.cc)
5757
#
5858
# and comment out the following lines
5959
find_package(Drogon CONFIG REQUIRED)
60-
target_link_libraries(${PROJECT_NAME} PRIVATE Drogon::Drogon common llama llava
60+
target_link_libraries(${PROJECT_NAME} PRIVATE Drogon::Drogon common llava
6161
${CMAKE_THREAD_LIBS_INIT})
6262

6363
# ##############################################################################

controllers/llamaCPP.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,10 @@
1212
#include <fstream>
1313

1414
// External
15+
#include "clip.h"
1516
#include "common.h"
1617
#include "llama.h"
1718

18-
#include "../../llama.cpp/examples/llava/clip.h"
19-
2019
#include "stb_image.h"
2120

2221
#ifndef NDEBUG
@@ -1538,8 +1537,9 @@ struct llama_server_context {
15381537
"cache\n");
15391538
kv_cache_clear();
15401539
}
1541-
std::unique_lock<std::mutex> lock(mutex_tasks);
1542-
condition_tasks.wait(lock, [&] { return !queue_tasks.empty(); });
1540+
// TODO: Need to implement queueing using CV for better performance
1541+
// std::unique_lock<std::mutex> lock(mutex_tasks);
1542+
// condition_tasks.wait(lock, [&] { return !queue_tasks.empty(); });
15431543
}
15441544

15451545
for (llama_client_slot &slot : slots) {

0 commit comments

Comments
 (0)