-
Notifications
You must be signed in to change notification settings - Fork 280
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* added tests * updated pyproject * fixed flake8
- Loading branch information
1 parent
91693c6
commit 9364e76
Showing
6 changed files
with
167 additions
and
6 deletions.
There are no files selected for viewing
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
from unittest.mock import MagicMock, patch | ||
|
||
import pytest | ||
|
||
|
||
@pytest.fixture | ||
def openai_llm_mock(request): | ||
content = request.param | ||
# Note the path here is adjusted to where OpenAI is used, not where it's defined | ||
with patch("vision_agent.llm.llm.OpenAI") as mock: | ||
# Setup a mock response structure that matches what your code expects | ||
mock_instance = mock.return_value | ||
mock_instance.chat.completions.create.return_value = MagicMock( | ||
choices=[MagicMock(message=MagicMock(content=content))] | ||
) | ||
yield mock_instance | ||
|
||
|
||
@pytest.fixture | ||
def openai_lmm_mock(request): | ||
content = request.param | ||
# Note the path here is adjusted to where OpenAI is used, not where it's defined | ||
with patch("vision_agent.lmm.lmm.OpenAI") as mock: | ||
# Setup a mock response structure that matches what your code expects | ||
mock_instance = mock.return_value | ||
mock_instance.chat.completions.create.return_value = MagicMock( | ||
choices=[MagicMock(message=MagicMock(content=content))] | ||
) | ||
yield mock_instance |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
import pytest | ||
|
||
from vision_agent.llm.llm import OpenAILLM | ||
from vision_agent.tools import CLIP | ||
from vision_agent.tools.tools import GroundingDINO | ||
|
||
from .fixtures import openai_llm_mock # noqa: F401 | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"openai_llm_mock", ["mocked response"], indirect=["openai_llm_mock"] | ||
) | ||
def test_generate_with_mock(openai_llm_mock): # noqa: F811 | ||
llm = OpenAILLM() | ||
response = llm.generate("test prompt") | ||
assert response == "mocked response" | ||
openai_llm_mock.chat.completions.create.assert_called_once_with( | ||
model="gpt-4-turbo-preview", | ||
messages=[{"role": "user", "content": "test prompt"}], | ||
) | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"openai_llm_mock", | ||
['{"Parameters": {"prompt": "cat"}}'], | ||
indirect=["openai_llm_mock"], | ||
) | ||
def test_generate_classifier(openai_llm_mock): # noqa: F811 | ||
llm = OpenAILLM() | ||
prompt = "Can you generate a cat classifier?" | ||
classifier = llm.generate_classifier(prompt) | ||
assert isinstance(classifier, CLIP) | ||
assert classifier.prompt == "cat" | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"openai_llm_mock", | ||
['{"Parameters": {"prompt": "cat"}}'], | ||
indirect=["openai_llm_mock"], | ||
) | ||
def test_generate_detector(openai_llm_mock): # noqa: F811 | ||
llm = OpenAILLM() | ||
prompt = "Can you generate a cat detector?" | ||
detector = llm.generate_detector(prompt) | ||
assert isinstance(detector, GroundingDINO) | ||
assert detector.prompt == "cat" | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"openai_llm_mock", | ||
['{"Parameters": {"prompt": "cat"}}'], | ||
indirect=["openai_llm_mock"], | ||
) | ||
def test_generate_segmentor(openai_llm_mock): # noqa: F811 | ||
llm = OpenAILLM() | ||
prompt = "Can you generate a cat segmentor?" | ||
segmentor = llm.generate_detector(prompt) | ||
assert isinstance(segmentor, GroundingDINO) | ||
assert segmentor.prompt == "cat" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,72 @@ | ||
import tempfile | ||
|
||
import pytest | ||
from PIL import Image | ||
|
||
from vision_agent.lmm.lmm import OpenAILMM | ||
from vision_agent.tools import CLIP, GroundingDINO, GroundingSAM | ||
|
||
from .fixtures import openai_lmm_mock # noqa: F401 | ||
|
||
|
||
def create_temp_image(image_format="jpeg"): | ||
temp_file = tempfile.NamedTemporaryFile(suffix=f".{image_format}", delete=False) | ||
image = Image.new("RGB", (100, 100), color=(255, 0, 0)) | ||
image.save(temp_file, format=image_format) | ||
temp_file.seek(0) | ||
return temp_file.name | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"openai_lmm_mock", ["mocked response"], indirect=["openai_lmm_mock"] | ||
) | ||
def test_generate_with_mock(openai_lmm_mock): # noqa: F811 | ||
temp_image = create_temp_image() | ||
lmm = OpenAILMM() | ||
response = lmm.generate("test prompt", image=temp_image) | ||
assert response == "mocked response" | ||
assert ( | ||
"image_url" | ||
in openai_lmm_mock.chat.completions.create.call_args.kwargs["messages"][0][ | ||
"content" | ||
][1] | ||
) | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"openai_lmm_mock", | ||
['{"Parameters": {"prompt": "cat"}}'], | ||
indirect=["openai_lmm_mock"], | ||
) | ||
def test_generate_classifier(openai_lmm_mock): # noqa: F811 | ||
lmm = OpenAILMM() | ||
prompt = "Can you generate a cat classifier?" | ||
classifier = lmm.generate_classifier(prompt) | ||
assert isinstance(classifier, CLIP) | ||
assert classifier.prompt == "cat" | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"openai_lmm_mock", | ||
['{"Parameters": {"prompt": "cat"}}'], | ||
indirect=["openai_lmm_mock"], | ||
) | ||
def test_generate_classifier(openai_lmm_mock): # noqa: F811 | ||
lmm = OpenAILMM() | ||
prompt = "Can you generate a cat classifier?" | ||
detector = lmm.generate_detector(prompt) | ||
assert isinstance(detector, GroundingDINO) | ||
assert detector.prompt == "cat" | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"openai_lmm_mock", | ||
['{"Parameters": {"prompt": "cat"}}'], | ||
indirect=["openai_lmm_mock"], | ||
) | ||
def test_generate_classifier(openai_lmm_mock): # noqa: F811 | ||
lmm = OpenAILMM() | ||
prompt = "Can you generate a cat classifier?" | ||
segmentor = lmm.generate_segmentor(prompt) | ||
assert isinstance(segmentor, GroundingSAM) | ||
assert segmentor.prompt == "cat" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters