From 4972a6ef5e0e73d7985892bd8a3f4ff169198305 Mon Sep 17 00:00:00 2001 From: John Blackwell Date: Mon, 24 Feb 2025 16:42:43 -0500 Subject: [PATCH] cleanup --- go/ai-proxy/api/openai/openai.go | 2 +- go/ai-proxy/main.go | 12 ++++-------- go/ai-proxy/test/bedrock/bedrock_test.go | 2 +- .../test/openai_standard/openai_standard_test.go | 2 +- 4 files changed, 7 insertions(+), 11 deletions(-) diff --git a/go/ai-proxy/api/openai/openai.go b/go/ai-proxy/api/openai/openai.go index bd8380852..1e8bbfcaa 100644 --- a/go/ai-proxy/api/openai/openai.go +++ b/go/ai-proxy/api/openai/openai.go @@ -15,7 +15,7 @@ type Endpoint string const ( EndpointChat = "/openai/v1/chat/completions" EndpointChatCompletions = "/v1/chat/completions" - EndpointEmbeddings = "/v1/embeddings" + EndpointEmbeddings = "/openai/v1/embeddings" ) type ChatCompletionRequest struct { diff --git a/go/ai-proxy/main.go b/go/ai-proxy/main.go index 04288e5cd..9d5f86ac1 100644 --- a/go/ai-proxy/main.go +++ b/go/ai-proxy/main.go @@ -21,15 +21,11 @@ func main() { router := mux.NewRouter() p, err := proxy.NewOllamaTranslationProxy(args.Provider(), args.ProviderHost(), args.ProviderCredentials()) - if err != nil { - if args.Provider() == api.ProviderBedrock { - - } else { - klog.ErrorS(err, "Could not create proxy") - os.Exit(1) - } - } else { + if err == nil { router.HandleFunc(ollama.EndpointChat, p.Proxy()) + } else if args.Provider() != api.ProviderBedrock { + klog.ErrorS(err, "Could not create proxy") + os.Exit(1) } if args.OpenAICompatible() { diff --git a/go/ai-proxy/test/bedrock/bedrock_test.go b/go/ai-proxy/test/bedrock/bedrock_test.go index c5eac66ec..9ac70c001 100644 --- a/go/ai-proxy/test/bedrock/bedrock_test.go +++ b/go/ai-proxy/test/bedrock/bedrock_test.go @@ -152,7 +152,7 @@ func TestBedrockEmbeddingsProxy(t *testing.T) { { Name: "embeddings request should return correct openai response", Method: "POST", - Endpoint: "/v1/embeddings", + Endpoint: "/openai/v1/embeddings", Request: openai.EmbedRequest{ Model: "amazon.titan-embed-text-v2:0", Input: "Hello from Titan embeddings test.", diff --git a/go/ai-proxy/test/openai_standard/openai_standard_test.go b/go/ai-proxy/test/openai_standard/openai_standard_test.go index c13f6bac4..66e2ef813 100644 --- a/go/ai-proxy/test/openai_standard/openai_standard_test.go +++ b/go/ai-proxy/test/openai_standard/openai_standard_test.go @@ -155,7 +155,7 @@ func TestOpenAIEmbeddingsProxy(t *testing.T) { { Name: "chat request should return correct openai response", Method: "POST", - Endpoint: "/v1/embeddings", + Endpoint: "/openai/v1/embeddings", Request: openai.EmbedRequest{ Model: "openai-embedding-model", Input: "Hello from embeddings test.",