Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: better handle search query list in case some llms respond with extra words #5

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 34 additions & 12 deletions open_deep_researcher.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,8 @@
"import asyncio\n",
"import aiohttp\n",
"import json\n",
"import re",
"import ast",
"\n",
"# =======================\n",
"# Configuration Constants\n",
Expand Down Expand Up @@ -131,16 +133,26 @@
" ]\n",
" response = await call_openrouter_async(session, messages)\n",
" if response:\n",
" cleaned = response.strip()\n",
" # Remove triple backticks and language specifier if present\n",
" cleaned = re.sub(r\"```(?:\\w+)?\\n(.*?)\\n```\", r\"\\1\", cleaned, flags=re.DOTALL).strip()\n",
" try:\n",
" # Expect exactly a Python list (e.g., \"['query1', 'query2']\")\n",
" search_queries = eval(response)\n",
" if isinstance(search_queries, list):\n",
" return search_queries\n",
" else:\n",
" print(\"LLM did not return a list. Response:\", response)\n",
" return []\n",
" new_queries = eval(cleaned)\n",
" if isinstance(new_queries, list):\n",
" return new_queries\n",
" except Exception as e:\n",
" print(\"Error parsing search queries:\", e, \"\\nResponse:\", response)\n",
" # Direct evaluation failed; try to extract the list part from the string\n",
" match = re.search(r\"(\\[.*\\])\", cleaned, re.DOTALL)\n",
" if match:\n",
" list_str = match.group(1)\n",
" try:\n",
" new_queries = eval(list_str)\n",
" if isinstance(new_queries, list):\n",
" return new_queries\n",
" except Exception as e_inner:\n",
" print(\"Error parsing extracted list:\", e_inner, \"\\nExtracted text:\", list_str)\n",
" return []\n",
" print(\"Error parsing new search queries or no search queries at all:\", e, \"\\nResponse:\", response)\n",
" return []\n",
" return []\n",
"\n",
Expand Down Expand Up @@ -267,15 +279,25 @@
" cleaned = response.strip()\n",
" if cleaned == \"<done>\":\n",
" return \"<done>\"\n",
" # Remove triple backticks and language specifier if present\n",
" cleaned = re.sub(r\"```(?:\\w+)?\\n(.*?)\\n```\", r\"\\1\", cleaned, flags=re.DOTALL).strip()\n",
" try:\n",
" new_queries = eval(cleaned)\n",
" if isinstance(new_queries, list):\n",
" return new_queries\n",
" else:\n",
" print(\"LLM did not return a list for new search queries. Response:\", response)\n",
" return []\n",
" except Exception as e:\n",
" print(\"Error parsing new search queries:\", e, \"\\nResponse:\", response)\n",
" # Direct evaluation failed; try to extract the list part from the string\n",
" match = re.search(r\"(\\[.*\\])\", cleaned, re.DOTALL)\n",
" if match:\n",
" list_str = match.group(1)\n",
" try:\n",
" new_queries = eval(list_str)\n",
" if isinstance(new_queries, list):\n",
" return new_queries\n",
" except Exception as e_inner:\n",
" print(\"Error parsing extracted list:\", e_inner, \"\\nExtracted text:\", list_str)\n",
" return []\n",
" print(\"Error parsing new search queries or no search queries at all:\", e, \"\\nResponse:\", response)\n",
" return []\n",
" return []\n",
"\n",
Expand Down
45 changes: 34 additions & 11 deletions open_deep_researcher_gradio.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@
"import aiohttp\n",
"import gradio as gr\n",
"import json\n",
"import re",
"import ast",
"\n",
"# ---------------------------\n",
"# Configuration Constants\n",
Expand Down Expand Up @@ -100,15 +102,26 @@
" ]\n",
" response = await call_openrouter_async(session, messages)\n",
" if response:\n",
" cleaned = response.strip()\n",
" # Remove triple backticks and language specifier if present\n",
" cleaned = re.sub(r\"```(?:\\w+)?\\n(.*?)\\n```\", r\"\\1\", cleaned, flags=re.DOTALL).strip()\n",
" try:\n",
" search_queries = eval(response)\n",
" if isinstance(search_queries, list):\n",
" return search_queries\n",
" else:\n",
" print(\"LLM did not return a list. Response:\", response)\n",
" return []\n",
" new_queries = eval(cleaned)\n",
" if isinstance(new_queries, list):\n",
" return new_queries\n",
" except Exception as e:\n",
" print(\"Error parsing search queries:\", e, \"\\nResponse:\", response)\n",
" # Direct evaluation failed; try to extract the list part from the string\n",
" match = re.search(r\"(\\[.*\\])\", cleaned, re.DOTALL)\n",
" if match:\n",
" list_str = match.group(1)\n",
" try:\n",
" new_queries = eval(list_str)\n",
" if isinstance(new_queries, list):\n",
" return new_queries\n",
" except Exception as e_inner:\n",
" print(\"Error parsing extracted list:\", e_inner, \"\\nExtracted text:\", list_str)\n",
" return []\n",
" print(\"Error parsing new search queries or no search queries at all:\", e, \"\\nResponse:\", response)\n",
" return []\n",
" return []\n",
"\n",
Expand Down Expand Up @@ -208,15 +221,25 @@
" cleaned = response.strip()\n",
" if cleaned == \"<done>\":\n",
" return \"<done>\"\n",
" # Remove triple backticks and language specifier if present\n",
" cleaned = re.sub(r\"```(?:\\w+)?\\n(.*?)\\n```\", r\"\\1\", cleaned, flags=re.DOTALL).strip()\n",
" try:\n",
" new_queries = eval(cleaned)\n",
" if isinstance(new_queries, list):\n",
" return new_queries\n",
" else:\n",
" print(\"LLM did not return a list for new search queries. Response:\", response)\n",
" return []\n",
" except Exception as e:\n",
" print(\"Error parsing new search queries:\", e, \"\\nResponse:\", response)\n",
" # Direct evaluation failed; try to extract the list part from the string\n",
" match = re.search(r\"(\\[.*\\])\", cleaned, re.DOTALL)\n",
" if match:\n",
" list_str = match.group(1)\n",
" try:\n",
" new_queries = eval(list_str)\n",
" if isinstance(new_queries, list):\n",
" return new_queries\n",
" except Exception as e_inner:\n",
" print(\"Error parsing extracted list:\", e_inner, \"\\nExtracted text:\", list_str)\n",
" return []\n",
" print(\"Error parsing new search queries or no search queries at all:\", e, \"\\nResponse:\", response)\n",
" return []\n",
" return []\n",
"\n",
Expand Down