Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit 8c95f6b

Browse files
authored
Merge pull request #230 from tikikun/main
bug: Fix caching compatibility issues regarding system prompt
2 parents 9615282 + 61fc781 commit 8c95f6b

File tree

1 file changed

+9
-2
lines changed

1 file changed

+9
-2
lines changed

controllers/llamaCPP.cc

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -186,15 +186,22 @@ void llamaCPP::chatCompletion(
186186
std::string role;
187187
if (input_role == "user") {
188188
role = user_prompt;
189+
std::string content = message["content"].asString();
190+
formatted_output += role + content;
189191
} else if (input_role == "assistant") {
190192
role = ai_prompt;
193+
std::string content = message["content"].asString();
194+
formatted_output += role + content;
191195
} else if (input_role == "system") {
192196
role = system_prompt;
197+
std::string content = message["content"].asString();
198+
formatted_output = role + content + formatted_output;
199+
193200
} else {
194201
role = input_role;
202+
std::string content = message["content"].asString();
203+
formatted_output += role + content;
195204
}
196-
std::string content = message["content"].asString();
197-
formatted_output += role + content;
198205
}
199206
formatted_output += ai_prompt;
200207

0 commit comments

Comments
 (0)