View Source HyperLLM.Conversation (hyper_llm v0.6.0)

HyperLLM.Conversation handles the lifecycle of a conversation, including starting, appending messages, and running the conversation.

## Example

A Liveview that sends messages to the chat and updates the chat with the response.

defmodule ChatLive do
  use Phoenix.LiveView

  def mount(params, session, socket) do
    {:ok,
    socket
    |> assign(conv: HyperLLM.Conversation.start(model: "openai/gpt-4o-mini"))}
  end

  def handle_event("send_message", %{"message" => message}, socket) do
    conv = HyperLLM.Conversation.append(socket.assigns.conv, message)

    send(self(), :chat_completion)

    {:noreply, socket |> assign(conv: conv)}
  end

  def handle_info(:chat_completion, socket) do
    with {:ok, conv} <- HyperLLM.Conversation.run(socket.assigns.conv) do
      {:noreply, socket |> assign(conv: conv)}
    end
  end
end

Summary

Functions

Append a message to the conversation.

Run the conversation to get a response.

Start a new conversation.

Types

model_config()

@type model_config() :: [Keyword.t()]

t()

@type t() :: %HyperLLM.Conversation{model: term(), thread: term()}

Functions

append(conv, messages)

@spec append(t(), list()) :: t()
@spec append(t(), String.t()) :: t()
@spec append(t(), map()) :: t()

append(conv, role, content)

@spec append(t(), atom(), binary()) :: t()

Append a message to the conversation.

Example

iex> HyperLLM.Conversation.start(model: "openai/gpt-4o-mini") |> HyperLLM.Conversation.append(:user, "Hello")
%HyperLLM.Conversation{
  thread: [%{role: :user, content: "Hello"}],
  model: %HyperLLM.Model{
    provider: HyperLLM.Provider.OpenAI,
    model: "gpt-4o-mini",
    config: []
  }
}

run(conv)

@spec run(t()) :: {:ok, binary()} | {:error, binary()}

Run the conversation to get a response.

Example

iex> HyperLLM.Conversation.start(model: "openai/gpt-4o-mini") |> HyperLLM.Conversation.append(:user, "Hello") |> HyperLLM.Conversation.run()
{:ok, %HyperLLM.Conversation{
  thread: [%{role: :user, content: "Hello"}, %{role: :assistant, content: "Hello, how can I help you today?"}],
  model: "gpt-4o-mini"
}}

run!(conv)

@spec run!(t()) :: t()

start(model_config \\ [])

@spec start(model_config()) :: t()

Start a new conversation.

Examples

iex> HyperLLM.Conversation.start(model: "openai/gpt-4o-mini")
%HyperLLM.Conversation{
  thread: [],
  model: %HyperLLM.Model{
    provider: HyperLLM.Provider.OpenAI,
    model: "gpt-4o-mini",
    config: []
  }
}