Skip to content

Commit

Permalink
Rename chat_gpt to chat
Browse files Browse the repository at this point in the history
  • Loading branch information
michalwarda committed Feb 17, 2024
1 parent cb4fd53 commit 749a76e
Show file tree
Hide file tree
Showing 5 changed files with 165 additions and 164 deletions.
2 changes: 1 addition & 1 deletion apps/api/config/config.exs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ config :nx, :default_backend, EXLA.Backend
config :buildel, :deepgram, Buildel.Clients.Deepgram
config :buildel, :elevenlabs, Buildel.Clients.Elevenlabs
config :buildel, :webhook, Buildel.Clients.Webhook
config :buildel, :chat_gpt, Buildel.Clients.ChatGPT
config :buildel, :chat, Buildel.Clients.Chat
config :buildel, :vector_db, Buildel.VectorDB.EctoAdapter
config :buildel, :embeddings, Buildel.Clients.OpenAIEmbeddings
config :buildel, :file_loader, Buildel.FileLoaderNLMApiAdapter
Expand Down
2 changes: 1 addition & 1 deletion apps/api/config/test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ config :buildel, :secret_key_base, "secret_key_base"
config :buildel, :deepgram, Buildel.ClientMocks.Deepgram
config :buildel, :elevenlabs, Buildel.ClientMocks.Elevenlabs
config :buildel, :webhook, Buildel.ClientMocks.Webhook
config :buildel, :chat_gpt, Buildel.ClientMocks.ChatGPT
config :buildel, :chat, Buildel.ClientMocks.ChatGPT
config :buildel, :stream_timeout, 10
config :buildel, :vector_db, Buildel.ClientMocks.VectorDB.QdrantAdapter
config :buildel, :embeddings, Buildel.ClientMocks.Embeddings
Expand Down
6 changes: 3 additions & 3 deletions apps/api/lib/buildel/blocks/chat.ex
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ defmodule Buildel.Blocks.Chat do

with {:ok, messages} <- fill_messages(state),
{:ok, _, message} <-
chat_gpt().stream_chat(%{
chat().stream_chat(%{
context: %{messages: messages},
on_content: fn text_chunk ->
Buildel.BlockPubSub.broadcast_to_io(
Expand Down Expand Up @@ -491,7 +491,7 @@ defmodule Buildel.Blocks.Chat do
end
end

defp chat_gpt() do
Application.fetch_env!(:buildel, :chat_gpt)
defp chat() do
Application.fetch_env!(:buildel, :chat)
end
end
160 changes: 160 additions & 0 deletions apps/api/lib/buildel/clients/chat.ex
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,163 @@ defmodule Buildel.Clients.ChatBehaviour do
tools: list(any())
}) :: :ok
end

defmodule Buildel.Clients.Chat do
require Logger
alias Buildel.Langchain.ChatModels.ChatMistralAI
alias Buildel.LangChain.ChatModels.ChatGoogleAI
alias Buildel.Langchain.ChatGptTokenizer
alias Buildel.Clients.ChatBehaviour
alias Buildel.LangChain.Chains.LLMChain
alias Buildel.LangChain.ChatModels.ChatOpenAI
alias LangChain.Message
alias LangChain.MessageDelta

@behaviour ChatBehaviour

@impl ChatBehaviour
def stream_chat(
%{
context: context,
on_content: on_content,
on_tool_content: on_tool_content,
on_tool_call: on_tool_call,
on_end: on_end,
on_error: on_error,
model: model,
tools: tools
} = opts
) do
opts =
opts
|> Map.put_new(:api_type, "openai")
|> Map.put_new(:endpoint, "https://api.openai.com/v1/chat/completions")

messages =
context.messages
|> Enum.map(fn
%{role: "assistant"} = message ->
Message.new_assistant!(message.content)

%{role: "system"} = message ->
Message.new_system!(message.content)

%{role: "user"} = message ->
Message.new_user!(message.content)

%{role: "tool"} = message ->
Message.new_function!(message.tool_name, message.content)

%{role: "tool_call"} = message ->
Message.new_function_call!(message.tool_name, Jason.encode!(message.arguments))
end)

with {:ok, chain, message} <-
LLMChain.new!(%{
llm: get_llm(opts),
custom_context: context
})
|> LLMChain.add_functions(tools |> Enum.map(& &1.function))
|> LLMChain.add_messages(messages)
|> LLMChain.run(
while_needs_response: true,
callback_fn: fn
%MessageDelta{content: nil} ->
nil

%MessageDelta{} = data ->
on_content.(data.content)

%Message{function_name: nil} ->
nil

%Message{function_name: function_name, content: content, arguments: nil}
when is_binary(function_name) and is_binary(content) ->
%{response_formatter: response_formatter} =
tools |> Enum.find(fn tool -> tool.function.name == function_name end)

on_tool_content.(function_name, content, response_formatter.(content))

%Message{function_name: function_name, arguments: arguments}
when is_binary(function_name) ->
case tools |> Enum.find(fn tool -> tool.function.name == function_name end) do
nil ->
Logger.debug("Tool not found: #{function_name}")
nil

%{call_formatter: call_formatter} ->
on_tool_call.(function_name, arguments, call_formatter.(arguments))
end

%Message{} ->
nil

{:error, reason} ->
on_error.(reason)
nil
end
) do
statistics =
ChatGptTokenizer.init(model)
|> ChatGptTokenizer.count_chain_tokens(%{
functions: chain.functions,
messages: chain.messages,
input_messages: chain.custom_context.messages
})

on_end.(statistics)

{:ok, chain, message}
else
{:error, "context_length_exceeded"} ->
on_error.(:context_length_exceeded)
{:error, :context_length_exceeded}

{:error, reason} ->
on_error.(reason)
{:error, reason}
end
end

defp get_llm(%{api_type: "mistral"} = opts) do
ChatMistralAI.new!(%{
model: opts.model,
temperature: opts.temperature,
stream: true,
api_key: opts.api_key,
endpoint: opts.endpoint
})
end

defp get_llm(%{api_type: "openai"} = opts) do
ChatOpenAI.new!(%{
model: opts.model,
temperature: opts.temperature,
stream: true,
api_key: opts.api_key,
api_type: opts.api_type,
endpoint: opts.endpoint
})
end

defp get_llm(%{api_type: "azure"} = opts) do
ChatOpenAI.new!(%{
model: opts.model,
temperature: opts.temperature,
stream: true,
api_key: opts.api_key,
api_type: opts.api_type,
endpoint: opts.endpoint
})
end

defp get_llm(%{api_type: "google"} = opts) do
ChatGoogleAI.new!(%{
api_key: opts.api_key,
model: opts.model,
stream: true,
temperature: opts.temperature,
endpoint: opts.endpoint
})
end
end
159 changes: 0 additions & 159 deletions apps/api/lib/buildel/clients/chat_gpt.ex

This file was deleted.

0 comments on commit 749a76e

Please sign in to comment.