Skip to content

Commit 72c6bc3

Browse files
authored
llama : better rwkv chat template and add missing inputs.use_jinja setting (#14336)
* llama-cli : add missing `inputs.use_jinja` setting Signed-off-by: Molly Sophia <[email protected]> * llama : better legacy chat template for rwkv Signed-off-by: Molly Sophia <[email protected]> --------- Signed-off-by: Molly Sophia <[email protected]>
1 parent defe215 commit 72c6bc3

File tree

2 files changed

+12
-6
lines changed

2 files changed

+12
-6
lines changed

src/llama-chat.cpp

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -528,12 +528,17 @@ int32_t llm_chat_apply_template(
528528
}
529529
} else if (tmpl == LLM_CHAT_TEMPLATE_RWKV_WORLD) {
530530
// this template requires the model to have "\n\n" as EOT token
531-
for (auto message : chat) {
532-
std::string role(message->role);
533-
if (role == "user") {
534-
ss << "User: " << message->content << "\n\nAssistant:";
535-
} else {
536-
ss << message->content << "\n\n";
531+
for (size_t i = 0; i < chat.size(); i++) {
532+
std::string role(chat[i]->role);
533+
if (role == "system") {
534+
ss << "System: " << trim(chat[i]->content) << "\n\n";
535+
} else if (role == "user") {
536+
ss << "User: " << trim(chat[i]->content) << "\n\n";
537+
if (i == chat.size() - 1) {
538+
ss << "Assistant:";
539+
}
540+
} else if (role == "assistant") {
541+
ss << "Assistant: " << trim(chat[i]->content) << "\n\n";
537542
}
538543
}
539544
} else if (tmpl == LLM_CHAT_TEMPLATE_GRANITE) {

tools/main/main.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -292,6 +292,7 @@ int main(int argc, char ** argv) {
292292

293293
if (!params.system_prompt.empty() || !params.prompt.empty()) {
294294
common_chat_templates_inputs inputs;
295+
inputs.use_jinja = g_params->use_jinja;
295296
inputs.messages = chat_msgs;
296297
inputs.add_generation_prompt = !params.prompt.empty();
297298

0 commit comments

Comments
 (0)