ExLLama.ChatTemplate.Zephyr (LLama CPP Nif Wrapper v0.0.2)

based on: https://github.com/chujiezheng/chat_templates/blob/main/chat_templates/zephyr.jinja

{% if messages[0]['role'] == 'system' %}
  {% set offset = 1 %}
{% else %}
  {% set offset = 0 %}
{% endif %}

{% for message in messages %}
  {% if (message['role'] == 'user') != (loop.index0 % 2 == offset) %}
      {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}
  {% endif %}

  {{ '<|' + message['role'] + '|>
' + message['content'].strip() + eos_token + '
' }}

  {% if loop.last and message['role'] == 'user' and add_generation_prompt %}
      {{ '<|assistant|>
' }}
  {% endif %}
{% endfor %}

Link to this section Summary

Link to this section Functions

Link to this function

extract_response(responses, model, options)

Link to this function

to_context(thread, model, options)