From 0e5c88310496227e94da2446702180f2e7b68f25 Mon Sep 17 00:00:00 2001 From: sueko Date: Mon, 30 Sep 2024 08:50:41 +0900 Subject: [PATCH] Update models in Ragas documentation: change gpt-3.5-turbo-16k to gpt-4o-mini and gpt-4 to gpt-4o --- docs/concepts/testset_generation.md | 4 ++-- docs/howtos/applications/compare_embeddings.md | 4 ++-- docs/howtos/applications/compare_llms.md | 4 ++-- docs/howtos/applications/use_prompt_adaptation.md | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/concepts/testset_generation.md b/docs/concepts/testset_generation.md index e10ef5acc..fbe19e984 100644 --- a/docs/concepts/testset_generation.md +++ b/docs/concepts/testset_generation.md @@ -61,8 +61,8 @@ from langchain_openai import ChatOpenAI, OpenAIEmbeddings # documents = load your documents # generator with openai models -generator_llm = ChatOpenAI(model="gpt-3.5-turbo-16k") -critic_llm = ChatOpenAI(model="gpt-4") +generator_llm = ChatOpenAI(model="gpt-4o-mini") +critic_llm = ChatOpenAI(model="gpt-4o") embeddings = OpenAIEmbeddings() generator = TestsetGenerator.from_langchain( diff --git a/docs/howtos/applications/compare_embeddings.md b/docs/howtos/applications/compare_embeddings.md index 287c1c000..6902c68fc 100644 --- a/docs/howtos/applications/compare_embeddings.md +++ b/docs/howtos/applications/compare_embeddings.md @@ -34,8 +34,8 @@ query_space = "large language models" documents = loader.load_data(query=query_space, limit=100) # generator with openai models -generator_llm = ChatOpenAI(model="gpt-3.5-turbo-16k") -critic_llm = ChatOpenAI(model="gpt-4") +generator_llm = ChatOpenAI(model="gpt-4o-mini") +critic_llm = ChatOpenAI(model="gpt-4o") embeddings = OpenAIEmbeddings() generator = TestsetGenerator.from_langchain( diff --git a/docs/howtos/applications/compare_llms.md b/docs/howtos/applications/compare_llms.md index 087c9ed85..0d7d4b9d7 100644 --- a/docs/howtos/applications/compare_llms.md +++ b/docs/howtos/applications/compare_llms.md @@ -42,8 +42,8 @@ reader = SimpleDirectoryReader("./arxiv-papers/",num_files_limit=30) documents = reader.load_data() # generator with openai models -generator_llm = ChatOpenAI(model="gpt-3.5-turbo-16k") -critic_llm = ChatOpenAI(model="gpt-4") +generator_llm = ChatOpenAI(model="gpt-4o-mini") +critic_llm = ChatOpenAI(model="gpt-4o") embeddings = OpenAIEmbeddings() generator = TestsetGenerator.from_langchain( diff --git a/docs/howtos/applications/use_prompt_adaptation.md b/docs/howtos/applications/use_prompt_adaptation.md index b82429907..2bba411d8 100644 --- a/docs/howtos/applications/use_prompt_adaptation.md +++ b/docs/howtos/applications/use_prompt_adaptation.md @@ -127,8 +127,8 @@ from ragas.testset.evolutions import simple, reasoning, multi_context,conditiona from langchain_openai import ChatOpenAI, OpenAIEmbeddings # generator with openai models -generator_llm = ChatOpenAI(model="gpt-3.5-turbo-16k") -critic_llm = ChatOpenAI(model="gpt-4") +generator_llm = ChatOpenAI(model="gpt-4o-mini") +critic_llm = ChatOpenAI(model="gpt-4o") embeddings = OpenAIEmbeddings() generator = TestsetGenerator.from_langchain(