ExLLama.ChatTemplate.GemmaInstruct (LLama CPP Nif Wrapper v0.0.2)

based on: [https://github.com/chujiezheng/chat_templates/blob/main/chat_templates/gemma-it.jinja]

{% if messages[0]['role'] == 'system' %}
  {% set loop_messages = messages[1:] %}
  {% set system_message = messages[0]['content'].strip() + '

' %}
{% else %}
  {% set loop_messages = messages %}
  {% set system_message = '' %}
{% endif %}

{% for message in loop_messages %}
  {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}
      {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}
  {% endif %}

  {% if loop.index0 == 0 %}
      {% set content = system_message + message['content'] %}
  {% else %}
      {% set content = message['content'] %}
  {% endif %}

  {% if (message['role'] == 'assistant') %}
      {% set role = 'model' %}
  {% else %}
      {% set role = message['role'] %}
  {% endif %}

  {{ '<start_of_turn>' + role + '
' + content.strip() + '<end_of_turn>
' }}

  {% if loop.last and message['role'] == 'user' and add_generation_prompt %}
      {{'<start_of_turn>model
'}}
  {% endif %}
{% endfor %}
````

Link to this section Summary

Link to this section Functions

Link to this function

compact(thread, acc \\ [])

Link to this function

extract_response(responses, model, options)

Link to this function

to_context(thread, model, options)