-
Notifications
You must be signed in to change notification settings - Fork 17
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[Issue #3685] save search get opportunity id #3704
base: main
Are you sure you want to change the base?
Changes from 22 commits
d2a1175
c5f2909
e711a3f
d5577f3
59ec1ad
05a0b9f
cddec94
d5af355
7847a9d
873a8b8
1c28968
b6cf6f0
94e0faa
cab3d66
d640e23
011be5b
27ba5e5
3f712f6
bd8029b
154e6c6
9d67700
63a4118
661a62a
5332444
0d88a45
8d42ab4
717324b
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change | ||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
@@ -154,7 +154,7 @@ def _add_aggregations(builder: search.SearchQueryBuilder) -> None: | |||||||||||||||||||||
builder.aggregation_terms("agency", _adjust_field_name("agency_code"), size=1000) | ||||||||||||||||||||||
|
||||||||||||||||||||||
|
||||||||||||||||||||||
def _get_search_request(params: SearchOpportunityParams) -> dict: | ||||||||||||||||||||||
def _get_search_request(params: SearchOpportunityParams, aggregation: bool = True) -> dict: | ||||||||||||||||||||||
builder = search.SearchQueryBuilder() | ||||||||||||||||||||||
|
||||||||||||||||||||||
# Make sure total hit count gets counted for more than 10k records | ||||||||||||||||||||||
|
@@ -176,8 +176,9 @@ def _get_search_request(params: SearchOpportunityParams) -> dict: | |||||||||||||||||||||
# Filters | ||||||||||||||||||||||
_add_search_filters(builder, params.filters) | ||||||||||||||||||||||
|
||||||||||||||||||||||
# Aggregations / Facet / Filter Counts | ||||||||||||||||||||||
_add_aggregations(builder) | ||||||||||||||||||||||
if aggregation: | ||||||||||||||||||||||
# Aggregations / Facet / Filter Counts | ||||||||||||||||||||||
_add_aggregations(builder) | ||||||||||||||||||||||
|
||||||||||||||||||||||
return builder.build() | ||||||||||||||||||||||
|
||||||||||||||||||||||
|
@@ -186,7 +187,6 @@ def search_opportunities( | |||||||||||||||||||||
search_client: search.SearchClient, raw_search_params: dict | ||||||||||||||||||||||
) -> Tuple[Sequence[dict], dict, PaginationInfo]: | ||||||||||||||||||||||
search_params = SearchOpportunityParams.model_validate(raw_search_params) | ||||||||||||||||||||||
|
||||||||||||||||||||||
search_request = _get_search_request(search_params) | ||||||||||||||||||||||
|
||||||||||||||||||||||
index_alias = get_search_config().opportunity_search_index_alias | ||||||||||||||||||||||
|
@@ -213,3 +213,31 @@ def search_opportunities( | |||||||||||||||||||||
records = SCHEMA.load(response.records, many=True) | ||||||||||||||||||||||
|
||||||||||||||||||||||
return records, response.aggregations, pagination_info | ||||||||||||||||||||||
|
||||||||||||||||||||||
|
||||||||||||||||||||||
def search_opportunities_id(search_client: search.SearchClient, search_query: dict) -> list: | ||||||||||||||||||||||
# Override pagination when calling opensearch | ||||||||||||||||||||||
updated_search_query = { | ||||||||||||||||||||||
**search_query, | ||||||||||||||||||||||
"pagination": { | ||||||||||||||||||||||
"order_by": "post_date", | ||||||||||||||||||||||
"page_offset": 1, | ||||||||||||||||||||||
"page_size": 1000, | ||||||||||||||||||||||
"sort_direction": "descending", | ||||||||||||||||||||||
}, | ||||||||||||||||||||||
} | ||||||||||||||||||||||
|
||||||||||||||||||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Could you move the definition of the static pagination values to a constant at the top of the file? A very slightly briefer syntax you can do that avoids needing
Suggested change
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Done |
||||||||||||||||||||||
search_params = SearchOpportunityParams.model_validate(updated_search_query) | ||||||||||||||||||||||
|
||||||||||||||||||||||
search_request = _get_search_request(search_params, False) | ||||||||||||||||||||||
|
||||||||||||||||||||||
index_alias = get_search_config().opportunity_search_index_alias | ||||||||||||||||||||||
logger.info( | ||||||||||||||||||||||
"Querying search index alias %s", index_alias, extra={"search_index_alias": index_alias} | ||||||||||||||||||||||
) | ||||||||||||||||||||||
|
||||||||||||||||||||||
response = search_client.search( | ||||||||||||||||||||||
index_alias, search_request, includes=["opportunity_id"], excludes=["attachments"] | ||||||||||||||||||||||
) | ||||||||||||||||||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This entire chunk is duplicated across both this new implementation and the existing one (with just the aggregation flag different), could you move that into some shared bit of code they both use? That way if we need to make changes, it can affect both without missing it. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. GP. Created a helper function, done. |
||||||||||||||||||||||
|
||||||||||||||||||||||
return [opp["opportunity_id"] for opp in response.records] |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,8 +1,15 @@ | ||
import uuid | ||
|
||
import pytest | ||
|
||
from src.api.opportunities_v1.opportunity_schemas import OpportunityV1Schema | ||
from src.constants.lookup_constants import FundingInstrument | ||
from src.db.models.user_models import UserSavedSearch | ||
from tests.src.api.opportunities_v1.conftest import get_search_request | ||
from tests.src.api.opportunities_v1.test_opportunity_route_search import ( | ||
NASA_INNOVATIONS, | ||
NASA_SUPERSONIC, | ||
) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Don't import across unit tests like this - if the setup data has any shared use, move it to the conftest files that are already shared between the unit tests. Effectively when we're talking test setup, it should only go in one of three places:
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I did not think creating common test data in the conftest was appropriate. I created reusable opportunities for the test module. |
||
from tests.src.db.models.factories import UserFactory | ||
|
||
|
||
|
@@ -71,14 +78,35 @@ def test_user_save_search_post_invalid_request(client, user, user_auth_token, db | |
assert len(saved_searches) == 0 | ||
|
||
|
||
def test_user_save_search_post(client, user, user_auth_token, enable_factory_create, db_session): | ||
def test_user_save_search_post( | ||
client, | ||
opportunity_index, | ||
opportunity_index_alias, | ||
search_client, | ||
user, | ||
user_auth_token, | ||
enable_factory_create, | ||
db_session, | ||
monkeypatch, | ||
): | ||
# Test data | ||
search_name = "Test Search" | ||
search_query = get_search_request( | ||
funding_instrument_one_of=[FundingInstrument.GRANT], | ||
agency_one_of=["LOC"], | ||
agency_one_of=["NASA"], | ||
) | ||
|
||
# Load into the search index | ||
schema = OpportunityV1Schema() | ||
json_records = [schema.dump(opp) for opp in [NASA_INNOVATIONS, NASA_SUPERSONIC]] | ||
search_client.bulk_upsert(opportunity_index, json_records, "opportunity_id") | ||
|
||
# Swap the search index alias | ||
alias = f"test-user_save_search-index-alias-{uuid.uuid4().int}" | ||
monkeypatch.setenv("OPPORTUNITY_SEARCH_INDEX_ALIAS", alias) | ||
|
||
search_client.swap_alias_index(opportunity_index, alias) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'd recommend putting anything like this into a utility/fixture - this is something we likely want in several unit tests, we shouldn't need to copy-paste chunks of code for every unit test. We do already have a fixture for setting up the search index alias, but it's at the session level (so across all tests), might need to adjust that to also support other levels. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I did not want to change the existing session scoped fixture, as it is used in other session scoped fixtures. Creating another function scoped fixture looked like an overkill that's why i did this. You think we should create function scope fixture for the opportunity alias ? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yeah, a function scoped session would be fine - a module/class level one might also be worth setting up as we usually are fine with test setup for a whole file/class being reused across everything in that file/class (since that's easier to follow / faster running tests). There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Done |
||
|
||
# Make the request to save a search | ||
response = client.post( | ||
f"/v1/users/{user.user_id}/saved-searches", | ||
|
@@ -88,18 +116,23 @@ def test_user_save_search_post(client, user, user_auth_token, enable_factory_cre | |
|
||
assert response.status_code == 200 | ||
assert response.json["message"] == "Success" | ||
|
||
# Verify the search was saved in the database | ||
saved_search = db_session.query(UserSavedSearch).one() | ||
|
||
assert saved_search.user_id == user.user_id | ||
assert saved_search.name == search_name | ||
assert saved_search.search_query == { | ||
"format": "json", | ||
"filters": {"agency": {"one_of": ["LOC"]}, "funding_instrument": {"one_of": ["grant"]}}, | ||
"filters": {"agency": {"one_of": ["NASA"]}, "funding_instrument": {"one_of": ["grant"]}}, | ||
"pagination": { | ||
"order_by": "opportunity_id", | ||
"page_size": 25, | ||
"page_offset": 1, | ||
"sort_direction": "ascending", | ||
}, | ||
} | ||
# Verify pagination for the query was over-written. searched_opportunity_ids should be ordered by "post_date" | ||
assert saved_search.searched_opportunity_ids == [ | ||
NASA_SUPERSONIC.opportunity_id, | ||
NASA_INNOVATIONS.opportunity_id, | ||
] |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Ideally, we'd have the following in this file:
I want to aim for as little logic as possible outside the service files - that makes the code more reusable (like how the search logic can be reused).
I'd just make
create_saved_search
have the first thing it does is fetch the opportunity IDs.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Done