ExLLama.ChatTemplate.ChatML (LLama CPP Nif Wrapper v0.0.2)
based on: [https://github.com/chujiezheng/chat_templates/blob/main/chat_templates/chatml.jinja]
{% if messages[0]['role'] == 'system' %}
{% set offset = 1 %}
{% else %}
{% set offset = 0 %}
{% endif %}
{{ bos_token }}
{% for message in messages %}
{% if (message['role'] == 'user') != (loop.index0 % 2 == offset) %}
{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}
{% endif %}
{{ '<|im_start|>' + message['role'] + '
' + message['content'].strip() + '<|im_end|>
' }}
{% if loop.last and message['role'] == 'user' and add_generation_prompt %}
{{ '<|im_start|>assistant
' }}
{% endif %}
{% endfor %}
````
Link to this section Summary
Link to this section Functions
Link to this function
extract_response(responses, model, options)
Link to this function