Skip to content

Commit

Permalink
cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
JohnBlackwell committed Feb 24, 2025
1 parent e9a7b4b commit 4972a6e
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 11 deletions.
2 changes: 1 addition & 1 deletion go/ai-proxy/api/openai/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ type Endpoint string
const (
EndpointChat = "/openai/v1/chat/completions"
EndpointChatCompletions = "/v1/chat/completions"
EndpointEmbeddings = "/v1/embeddings"
EndpointEmbeddings = "/openai/v1/embeddings"
)

type ChatCompletionRequest struct {
Expand Down
12 changes: 4 additions & 8 deletions go/ai-proxy/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,11 @@ func main() {

router := mux.NewRouter()
p, err := proxy.NewOllamaTranslationProxy(args.Provider(), args.ProviderHost(), args.ProviderCredentials())
if err != nil {
if args.Provider() == api.ProviderBedrock {

} else {
klog.ErrorS(err, "Could not create proxy")
os.Exit(1)
}
} else {
if err == nil {
router.HandleFunc(ollama.EndpointChat, p.Proxy())
} else if args.Provider() != api.ProviderBedrock {
klog.ErrorS(err, "Could not create proxy")
os.Exit(1)
}

if args.OpenAICompatible() {
Expand Down
2 changes: 1 addition & 1 deletion go/ai-proxy/test/bedrock/bedrock_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ func TestBedrockEmbeddingsProxy(t *testing.T) {
{
Name: "embeddings request should return correct openai response",
Method: "POST",
Endpoint: "/v1/embeddings",
Endpoint: "/openai/v1/embeddings",
Request: openai.EmbedRequest{
Model: "amazon.titan-embed-text-v2:0",
Input: "Hello from Titan embeddings test.",
Expand Down
2 changes: 1 addition & 1 deletion go/ai-proxy/test/openai_standard/openai_standard_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ func TestOpenAIEmbeddingsProxy(t *testing.T) {
{
Name: "chat request should return correct openai response",
Method: "POST",
Endpoint: "/v1/embeddings",
Endpoint: "/openai/v1/embeddings",
Request: openai.EmbedRequest{
Model: "openai-embedding-model",
Input: "Hello from embeddings test.",
Expand Down

0 comments on commit 4972a6e

Please sign in to comment.