ExLLama.ChatTemplate.Vicuna (LLama CPP Nif Wrapper v0.0.2)

based on: https://github.com/chujiezheng/chat_templates/blob/main/chat_templates/zephyr.jinja

  {% if messages[0]['role'] == 'system' %}
    {% set loop_messages = messages[1:] %}
    {% set system_message = messages[0]['content'].strip() + '

' %}
{% else %}
    {% set loop_messages = messages %}
    {% set system_message = '' %}
{% endif %}

{{ bos_token + system_message }}
{% for message in loop_messages %}
    {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}
        {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}
    {% endif %}

    {% if message['role'] == 'user' %}
        {{ 'USER: ' + message['content'].strip() + '
' }}
    {% elif message['role'] == 'assistant' %}
        {{ 'ASSISTANT: ' + message['content'].strip() + eos_token + '
' }}
    {% endif %}

    {% if loop.last and message['role'] == 'user' and add_generation_prompt %}
        {{ 'ASSISTANT:' }}
    {% endif %}
{% endfor %}
````

Link to this section Summary

Link to this section Functions

Link to this function

extract_response(responses, model, options)

Link to this function

to_context(thread, model, options)