From 0eebd81d1a8fba806f8865510b7c9454e9c63031 Mon Sep 17 00:00:00 2001
From: downiec <42552189+downiec@users.noreply.github.com>
Date: Wed, 8 Jan 2025 14:25:20 -0800
Subject: [PATCH 1/7] Update version number, add version changelog (to be
updated with the changes that will be included in this update)
---
frontend/package.json | 2 +-
frontend/public/changelog/v1.2.2.md | 3 +++
frontend/public/messages/metagrid_messages.md | 2 +-
.../src/components/Messaging/messageDisplayData.ts | 12 +++++++++++-
4 files changed, 16 insertions(+), 3 deletions(-)
create mode 100644 frontend/public/changelog/v1.2.2.md
diff --git a/frontend/package.json b/frontend/package.json
index 0f4920a12..463038f54 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -1,6 +1,6 @@
{
"name": "frontend",
- "version": "1.2.1",
+ "version": "1.2.2",
"private": true,
"scripts": {
"build:local": "env-cmd -f .envs/.react react-scripts build",
diff --git a/frontend/public/changelog/v1.2.2.md b/frontend/public/changelog/v1.2.2.md
new file mode 100644
index 000000000..0b3ef5dd9
--- /dev/null
+++ b/frontend/public/changelog/v1.2.2.md
@@ -0,0 +1,3 @@
+## Summary
+
+1. Minor bugfixes.
diff --git a/frontend/public/messages/metagrid_messages.md b/frontend/public/messages/metagrid_messages.md
index c504c1a83..602aeffda 100644
--- a/frontend/public/messages/metagrid_messages.md
+++ b/frontend/public/messages/metagrid_messages.md
@@ -1,4 +1,4 @@
-# Welcome to the Metagrid Release v1.2.0
+# Welcome to the Metagrid Release v1.2.x
To view the latest documentation and FAQ, please visit this page:
[https://esgf.github.io/esgf-user-support/metagrid.html](https://esgf.github.io/esgf-user-support/metagrid.html)
diff --git a/frontend/src/components/Messaging/messageDisplayData.ts b/frontend/src/components/Messaging/messageDisplayData.ts
index ae035f2bf..15cf63d40 100644
--- a/frontend/src/components/Messaging/messageDisplayData.ts
+++ b/frontend/src/components/Messaging/messageDisplayData.ts
@@ -5,6 +5,7 @@ export const rightDrawerMessages: MarkdownMessage[] = [
];
export const rightDrawerChanges: MarkdownMessage[] = [
+ { title: 'V1.2.2', fileName: 'changelog/v1.2.2.md' },
{ title: 'V1.2.1', fileName: 'changelog/v1.2.1.md' },
{ title: 'V1.2.0', fileName: 'changelog/v1.2.0.md' },
{ title: 'V1.1.3-pre', fileName: 'changelog/v1.1.3-pre.md' },
@@ -17,9 +18,18 @@ export const rightDrawerChanges: MarkdownMessage[] = [
];
const startupMessages: StartPopupData = {
- messageToShow: 'v1.2.1', // This is the version number that appears in the footer
+ messageToShow: 'v1.2.2', // This is the version number that appears in the footer
defaultMessageId: 'welcome',
messageData: [
+ {
+ messageId: 'v1.2.2',
+ template: MessageTemplates.ChangeLog,
+ style: { minWidth: '700px' },
+ data: {
+ changesFile: 'changelog/v1.2.2.md',
+ version: '1.2.2',
+ },
+ },
{
messageId: 'v1.2.1',
template: MessageTemplates.ChangeLog,
From aff5ab532bf6d8b8fffad196a92783ce945366e0 Mon Sep 17 00:00:00 2001
From: Carlos Downie <42552189+downiec@users.noreply.github.com>
Date: Thu, 9 Jan 2025 14:30:59 -0800
Subject: [PATCH 2/7] Updated documentation references to config variables
(#706)
* Updated documentation, specifically references to the configuration parameters which have changed over time, to use the latest variable names currently used in configuration. Updated requirements to use the latest readthedocs them of Sphinx
* Update version number, add version changelog (to be updated with the changes that will be included in this update)
---
.../getting_started_production.md | 31 +++++++------------
docs/requirements.txt | 2 ++
frontend/.envs/.react | 4 ---
3 files changed, 13 insertions(+), 24 deletions(-)
diff --git a/docs/docs/contributors/getting_started_production.md b/docs/docs/contributors/getting_started_production.md
index c44d09f33..547de7f6e 100644
--- a/docs/docs/contributors/getting_started_production.md
+++ b/docs/docs/contributors/getting_started_production.md
@@ -32,20 +32,20 @@ NOTE: You can easily generate a secret key with Python using this command:
| ---------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ---------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| =========== TREAFIK CONFIG ============= |
| `DOMAIN_NAME` | The domain linked to the server hosting the Metagrid site. | | string | `DOMAIN_NAME=esgf-dev1.llnl.gov`
Local environment:
`DOMAIN_NAME=localhost` |
+| `DOMAIN_SUBDIRECTORY` | The domain subdirectory that is proxied to the Django site (e.g. _esgf-dev1.llnl.gov/metagrid-backend_). Omit backslash and match backend rules' `PathPrefix` in `traefik.yml`. | | string | `DOMAIN_SUBDIRECTORY=metagrid-backend` |
| `PUBLIC_URL` | **OPTIONAL** The domain subdirectory that is used to serve the front-end. Leave blank if you want users to access the app from the domain directly. | | string | `DOMAIN_SUBDIRECTORY=metagrid` |
-| `DOMAIN_SUBDIRECTORY` | **OPTIONAL** The domain subdirectory that is proxied to the Django site (e.g. _esgf-dev1.llnl.gov/metagrid-backend_). Omit backslash and match backend rules' `PathPrefix` in `traefik.yml`. | | string | `DOMAIN_SUBDIRECTORY=metagrid-backend` |
| =========== BACKEND CONFIG ============= |
-| `DJANGO_SECRET_KEY` | A secret key for a particular Django installation. This is used to provide cryptographic signing, and should be set to a unique, unpredictable value. | [Link](https://docs.djangoproject.com/en/3.0/ref/settings/#secret-key) | string | `DJANGO_SECRET_KEY=YAFKApvifkIFTw0DDNQQdHI34kyQdyWH89acWTogCfm4SGRz2x` |
+| `DJANGO_SECRET_KEY` | A secret key for a particular Django installation. This is used to provide cryptographic signing, and should be set to a unique, unpredictable value. | [Link](https://docs.djangoproject.com/en/3.0/ref/settings/#secret-key) | string | `DJANGO_SECRET_KEY=example_secret_key_gCfm4SGRz2x` |
| `DJANGO_ADMIN_URL` | The url to access the Django Admin page. It should be set to a unique, unpredictable value (not `admin/`). Take note of this value in order to access the admin page later on. For example with the settings shown here you would go to: https://esgf-dev1.llnl.gov/metagrid-backend/example_admin_url_87261847395 to access the admin site. Then you would use the admin credentials created when creating a django superuser (explained further below). | | string | `DJANGO_ADMIN_URL=example_admin_url_87261847395` |
| `DJANGO_ALLOWED_HOSTS` | A list of strings representing the host/domain names that this Django site can serve. This is a security measure to prevent HTTP Host header attacks, which are possible even under many seemingly-safe web server configurations. | [Link](https://docs.djangoproject.com/en/3.0/ref/settings/#allowed-hosts) | array of strings | `DJANGO_ALLOWED_HOSTS=esgf-dev1.llnl.gov`
Local environment:
`DJANGO_ALLOWED_HOSTS=localhost` |
| `KEYCLOAK_URL` | The url of your hosted Keycloak server, it must end with `/auth`. | [Link](https://django-allauth.readthedocs.io/en/latest/providers.html#keycloak) | string | `KEYCLOAK_URL=https://keycloak.metagrid.com/auth` |
| `KEYCLOAK_REALM` | The name of the Keycloak realm you want to use. | [Link](https://django-allauth.readthedocs.io/en/latest/providers.html#keycloak) | string | `KEYCLOAK_REALM=esgf` |
| `KEYCLOAK_CLIENT_ID` | The id for the Keycloak client, which is the entity that can request Keycloak to authenticate a user. | | string | `KEYCLOAK_CLIENT_ID=metagrid-backend` |
| ========== FRONTEND CONFIG ============= |
-| `REACT_APP_METAGRID_URL` | The URL for the MetaGrid API used to query projects, users, etc. | | string | `REACT_APP_METAGRID_API_URL=https://esgf-dev1.llnl/metagrid-backend`
Local environment:
`REACT_APP_METAGRID_API_URL=http://localhost:8000` |
+| `REACT_APP_METAGRID_API_URL` | The URL for the MetaGrid API used to query projects, users, etc. | | string | `REACT_APP_METAGRID_API_URL=https://esgf-dev1.llnl/metagrid-backend`
Local environment:
`REACT_APP_METAGRID_API_URL=http://localhost:8000` |
| `REACT_APP_WGET_API_URL` | The URL for the ESGF wget API to generate a wget script for downloading selected datasets. | [Link](https://github.com/ESGF/esgf-wget) | string | `REACT_APP_WGET_API_URL=https://pcmdi8vm.llnl.gov/wget` |
-| `REACT_APP_ESGF_NODE_URL` | The URL for the ESGF Search API node used to query datasets, files, and facets. | [Link](https://github.com/ESGF/esgf.github.io/wiki/ESGF_Search_REST_API) | string | `REACT_APP_ESGF_NODE_URL=https://esgf-node.llnl.gov` |
-| `REACT_APP_ESGF_NODE_STATUS` | The URL for the ESGF node status API node used to query node status. | | string | `REACT_APP_ESGF_NODE_STATUS_URL=https://aims4.llnl.gov/prometheus/api/v1/query?query=probe_success%7Bjob%3D%22http_2xx%22%2C+target%3D~%22.%2Athredds.%2A%22%7D` |
+| `REACT_APP_SEARCH_URL` | The URL for the ESGF Search API node used to query datasets, files, and facets. | [Link](https://github.com/ESGF/esgf.github.io/wiki/ESGF_Search_REST_API) | string | `REACT_APP_SEARCH_URL=https://esgf-node.llnl.gov` |
+| `REACT_APP_ESGF_NODE_STATUS_URL` | The URL for the ESGF node status API node used to query node status. | | string | `REACT_APP_ESGF_NODE_STATUS_URL=https://aims4.llnl.gov/prometheus/api/v1/query?query=probe_success%7Bjob%3D%22http_2xx%22%2C+target%3D~%22.%2Athredds.%2A%22%7D` |
| `REACT_APP_KEYCLOAK_URL` | The url of your hosted Keycloak server, it must end with `/auth`. | | string | `REACT_APP_KEYCLOAK_URL=https://keycloak.metagrid.com/auth` |
| `REACT_APP_KEYCLOAK_REALM` | The name of the Keycloak realm you want to use. | | string | `REACT_APP_KEYCLOAK_REALM=esgf` |
| `REACT_APP_KEYCLOAK_CLIENT_ID` | The id for the Keycloak client, which is an entity that can request Keycloak to authenticate a user. | | string | `REACT_APP_KEYCLOAK_CLIENT_ID=frontend` |
@@ -53,12 +53,11 @@ NOTE: You can easily generate a secret key with Python using this command:
| `REACT_APP_HOTJAR_SV` | **OPTIONAL**
The snippet version of the Tracking Code you are using. This is only needed if Hotjar ever updates the Tracking Code and needs to discontinue older ones. Knowing which version your site includes allows Hotjar team to contact you and inform you accordingly. | [Link](https://github.com/abdalla/react-hotjar) | number | `REACT_APP_HOTJAR_SV=6` |
| `REACT_APP_GOOGLE_ANALYTICS_TRACKING_ID` | **OPTIONAL**
Google Analytics tracking id. | [Link](https://github.com/react-ga/react-ga#api) | string | `REACT_APP_GOOGLE_ANALYTICS_TRACKING_ID=UA-000000-01` |
-### Example Production Configuration - v1.0.10
+### Example Production Configuration - v1.2.2
```
# =====================TREAFIK CONFIG=====================
-
DOMAIN_NAME=esgf-dev1.llnl.gov
PUBLIC_URL= # Not used, should this be deprecated
@@ -118,14 +117,7 @@ REACT_APP_GLOBUS_NODES=aims3.llnl.gov,esgf-data1.llnl.gov,esgf-data2.llnl.gov
# https://github.com/ESGF/esgf-wget
-REACT_APP_GLOBUS_SCRIPT_URL=https://greyworm1-rh7.llnl.gov/globusscript
-
-# ESGF Search API
-# https://esgf.github.io/esg-search/ESGF_Search_RESTful_API.html
-
-#REACT_APP_WGET_API_URL=https://esgf-fedtest.llnl.gov/esg-search/wget
-REACT_APP_WGET_API_URL=https://greyworm1-rh7.llnl.gov/wget
-#REACT_APP_WGET_API_URL=https://esgf-node.llnl.gov/esg-search/wget
+REACT_APP_WGET_API_URL=https://esgf-node.llnl.gov/esg-search/wget
# ESGF Search API
@@ -154,7 +146,7 @@ REACT_APP_DJANGO_LOGIN_URL=http://esgf-dev1.llnl.gov/metagrid-backend/login/glob
REACT_APP_DJANGO_LOGOUT_URL=http://esgf-dev1.llnl.gov/metagrid-backend/proxy/globus-logout/
# Authentication Method - switch to keycloak or globus
-REACT_APP_AUTHENTICATION_METHOD=keycloak
+REACT_APP_AUTHENTICATION_METHOD=globus
# https://docs.djangoproject.com/en/4.2/ref/settings/#logout-redirect-url
DJANGO_LOGIN_REDIRECT_URL=http://esgf-dev1.llnl.gov/search
@@ -162,8 +154,7 @@ DJANGO_LOGOUT_REDIRECT_URL=http://esgf-dev1.llnl.gov/search
# https://app.globus.org/settings/developers/registration/confidential_client
# Generate these at Globus, this is a confidential client
-GLOBUS_CLIENT_KEY=25e75a79-7d31-41bd-b1df-0668f7a42d91
-#c111e306-ad45-49ef-af54-6b107ab592ff
+GLOBUS_CLIENT_KEY=
GLOBUS_CLIENT_SECRET=
@@ -172,8 +163,8 @@ GLOBUS_CLIENT_SECRET=
# https://github.com/abdalla/react-hotjar
-REACT_APP_HOTJAR_ID=2079136
-REACT_APP_HOTJAR_SV=6
+REACT_APP_HOTJAR_ID=1234567
+REACT_APP_HOTJAR_SV=1
# react-ga
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 44d40820b..fec67370c 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1,2 +1,4 @@
mkdocs==1.4.2 # https://www.mkdocs.org/
mdx_truly_sane_lists==1.3 # https://github.com/radude/mdx_truly_sane_lists
+sphinx==4.3.2 # https://www.sphinx-doc.org/
+sphinx_rtd_theme==1.0.0 # https://github.com/readthedocs/sphinx_rtd_theme
diff --git a/frontend/.envs/.react b/frontend/.envs/.react
index 64f0af1c3..ee8aa79d6 100644
--- a/frontend/.envs/.react
+++ b/frontend/.envs/.react
@@ -3,10 +3,6 @@
# Redirect the frontend to home page when old subdirectory is used (optional)
REACT_APP_PREVIOUS_URL=metagrid
-# MetaGrid API
-# https://github.com/aims-group/metagrid/tree/master/backend
-REACT_APP_METAGRID_API_URL=
-
# Authentication Method
REACT_APP_AUTHENTICATION_METHOD=globus
From e7683b39d1bd9735c3601f3ac7f4c16889dda006 Mon Sep 17 00:00:00 2001
From: Carlos Downie <42552189+downiec@users.noreply.github.com>
Date: Thu, 9 Jan 2025 14:32:17 -0800
Subject: [PATCH 3/7] fixed the pagination so that it will reset to page one
when doing a new search (#705)
---
.../src/components/Globus/DatasetDownload.tsx | 21 ++++++++-----------
1 file changed, 9 insertions(+), 12 deletions(-)
diff --git a/frontend/src/components/Globus/DatasetDownload.tsx b/frontend/src/components/Globus/DatasetDownload.tsx
index b6a557440..7e8c23731 100644
--- a/frontend/src/components/Globus/DatasetDownload.tsx
+++ b/frontend/src/components/Globus/DatasetDownload.tsx
@@ -176,6 +176,8 @@ const DatasetDownloadForm: React.FC> = () => {
}
);
+ const [searchResultsPage, setSearchResultsPage] = React.useState(1);
+
function redirectToNewURL(newUrl: string): void {
setTimeout(() => {
window.location.replace(newUrl);
@@ -532,6 +534,7 @@ const DatasetDownloadForm: React.FC> = () => {
}
}
setGlobusEndpoints(mappedEndpoints);
+ setSearchResultsPage(1);
} else {
setEndpointSearchValue('');
setGlobusEndpoints([]);
@@ -1085,18 +1088,12 @@ const DatasetDownloadForm: React.FC> = () => {
data-testid="globusEndpointSearchResults"
loading={loadingEndpointSearchResults}
size="small"
- pagination={
- globusEndpoints &&
- globusEndpoints.length > COLLECTION_SEARCH_PAGE_SIZE
- ? {
- pageSize: COLLECTION_SEARCH_PAGE_SIZE,
- position: ['bottomRight'],
- }
- : {
- pageSize: COLLECTION_SEARCH_PAGE_SIZE,
- position: ['none'],
- }
- }
+ pagination={{
+ current: searchResultsPage,
+ pageSize: COLLECTION_SEARCH_PAGE_SIZE,
+ onChange: (page) => setSearchResultsPage(page),
+ position: ['bottomRight'],
+ }}
dataSource={globusEndpoints?.map((endpoint) => {
return { ...endpoint, key: endpoint.id };
})}
From e5a281228c5e680502d48981695ca36903bcf03a Mon Sep 17 00:00:00 2001
From: Carlos Downie <42552189+downiec@users.noreply.github.com>
Date: Wed, 15 Jan 2025 11:32:22 -0800
Subject: [PATCH 4/7] update for obs4MIPs (#711)
Co-authored-by: Sasha Ames
---
backend/metagrid/initial_projects_data.py | 41 ++++++++++++-----------
1 file changed, 22 insertions(+), 19 deletions(-)
diff --git a/backend/metagrid/initial_projects_data.py b/backend/metagrid/initial_projects_data.py
index 45eb18cd7..20afc8d26 100644
--- a/backend/metagrid/initial_projects_data.py
+++ b/backend/metagrid/initial_projects_data.py
@@ -209,23 +209,20 @@
"realm",
"data_node",
],
- GROUPS[1]: ["source_id"],
+ GROUPS[1]: ["source_id"
+ "institution_id",
+ "region",
+ "source_type",
+],
GROUPS[4]: [
"variable",
"variable_long_name",
"cf_standard_name",
],
- GROUPS[5]: [
- "institute",
- "time_frequency",
- ],
- GROUPS[6]: [
- "institution_id",
+ GROUPS[3]: [
"frequency",
"grid_label",
"nominal_resolution",
- "region",
- "source_type",
"variant_label",
],
},
@@ -256,6 +253,22 @@
],
},
},
+ {
+ "name": "DRCDP",
+ "description" : "Downscaled CMIP data for North America",
+ "facets_by_group" :{
+ "Search Properties": [
+ "downscaling_source_id",
+ "institution_id",
+ "driving_source_id",
+ "driving_experiment_id",
+ "source_id",
+ "version_realization",
+ "variable_id",
+ "driving_activity_id"
+ ]
+ }
+ },
{
"name": "All (except CMIP6)",
"description": "Cross project search for all projects except CMIP6.",
@@ -282,16 +295,6 @@
"cf_standard_name",
"driving_model",
],
- "ISIMIP-FT": [
- "impact_model",
- "sector",
- "social_forcing",
- "co2_forcing",
- "irrigation_forcing",
- "crop",
- "pft",
- "vegetation",
- ],
"CORDEX": [
"domain",
"rcm_name",
From 9aeb52da1e3b81239532144a610f94753c958b54 Mon Sep 17 00:00:00 2001
From: Carlos Downie <42552189+downiec@users.noreply.github.com>
Date: Wed, 15 Jan 2025 16:04:15 -0800
Subject: [PATCH 5/7] Updated the settings file to autoformat on save to
hopefully reduce linting errors. Fixed linting errors for the
initial_projects_data update. Fixed the status tooltip to correctly display
the node's status. Updated test fixtures to accurately reflect the node
status response (the node names included the https:// in front and thats why
status tooltip wasn't finding them. (#714)
---
.vscode/settings.json | 3 +-
backend/metagrid/initial_projects_data.py | 134 +++++++++++-------
.../components/NodeStatus/StatusToolTip.tsx | 10 +-
frontend/src/test/mock/fixtures.ts | 8 +-
4 files changed, 97 insertions(+), 58 deletions(-)
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 2ad2432c3..77b977236 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -77,5 +77,6 @@
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
- "python.analysis.extraPaths": ["backend/venv/bin/python"]
+ "python.analysis.extraPaths": ["backend/venv/bin/python"],
+ "editor.formatOnSave": true
}
diff --git a/backend/metagrid/initial_projects_data.py b/backend/metagrid/initial_projects_data.py
index 20afc8d26..9bf14b09f 100644
--- a/backend/metagrid/initial_projects_data.py
+++ b/backend/metagrid/initial_projects_data.py
@@ -38,7 +38,19 @@
"name": "CMIP6",
"full_name": "Coupled Model Intercomparison Project Phase 6",
"project_url": "https://pcmdi.llnl.gov/CMIP6/",
- "description": "The Coupled Model Intercomparison Project, which began in 1995 under the auspices of the World Climate Research Programme (WCRP), is now in its sixth phase (CMIP6). CMIP6 coordinates somewhat independent model intercomparison activities and their experiments which have adopted a common infrastructure for collecting, organizing, and distributing output from models performing common sets of experiments. The simulation data produced by models under previous phases of CMIP have been used in thousands of research papers (some of which are listed here), and the multi-model results provide some perspective on errors and uncertainty in model simulations. This information has proved invaluable in preparing high profile reports assessing our understanding of climate and climate change (e.g., the IPCC Assessment Reports).",
+ "description": (
+ "The Coupled Model Intercomparison Project, which began in 1995 under the "
+ "auspices of the World Climate Research Programme (WCRP), is now in its sixth "
+ "phase (CMIP6). CMIP6 coordinates somewhat independent model intercomparison "
+ "activities and their experiments which have adopted a common infrastructure "
+ "for collecting, organizing, and distributing output from models performing "
+ "common sets of experiments. The simulation data produced by models under "
+ "previous phases of CMIP have been used in thousands of research papers (some "
+ "of which are listed here), and the multi-model results provide some perspective "
+ "on errors and uncertainty in model simulations. This information has proved "
+ "invaluable in preparing high profile reports assessing our understanding of "
+ "climate and climate change (e.g., the IPCC Assessment Reports)."
+ ),
"facets_by_group": {
GROUPS[0]: ["activity_id", "data_node"],
GROUPS[1]: [
@@ -48,13 +60,8 @@
"experiment_id",
"sub_experiment_id",
],
- GROUPS[2]: [
- "nominal_resolution",
- ],
- GROUPS[3]: [
- "variant_label",
- "grid_label",
- ],
+ GROUPS[2]: ["nominal_resolution"],
+ GROUPS[3]: ["variant_label", "grid_label"],
GROUPS[4]: [
"table_id",
"frequency",
@@ -68,7 +75,9 @@
"name": "CMIP6Plus",
"full_name": "Coupled Model Intercomparison Project Phase 6 Plus",
"project_url": "https://wcrp-cmip.org/cmip-overview/",
- "description": "Follow-up to CMIP6 with closely matching configuration. Description of CMIP6 Plus needed.",
+ "description": (
+ "Follow-up to CMIP6 with closely matching configuration. Description of CMIP6 Plus needed."
+ ),
"facets_by_group": {
GROUPS[0]: ["activity_id", "data_node"],
GROUPS[1]: [
@@ -78,13 +87,8 @@
"experiment_id",
"sub_experiment_id",
],
- GROUPS[2]: [
- "nominal_resolution",
- ],
- GROUPS[3]: [
- "variant_label",
- "grid_label",
- ],
+ GROUPS[2]: ["nominal_resolution"],
+ GROUPS[3]: ["variant_label", "grid_label"],
GROUPS[4]: [
"table_id",
"frequency",
@@ -98,7 +102,21 @@
"name": "CMIP5",
"full_name": "Coupled Model Intercomparison Project Phase 5",
"project_url": "https://pcmdi.llnl.gov/mips/cmip5/",
- "description": "Under the World Climate Research Programme (WCRP) the Working Group on Coupled Modelling (WGCM) established the Coupled Model Intercomparison Project (CMIP) as a standard experimental protocol for studying the output of coupled atmosphere-ocean general circulation models (AOGCMs). CMIP provides a community-based infrastructure in support of climate model diagnosis, validation, intercomparison, documentation and data access. This framework enables a diverse community of scientists to analyze GCMs in a systematic fashion, a process which serves to facilitate model improvement. Virtually the entire international climate modeling community has participated in this project since its inception in 1995. The Program for Climate Model Diagnosis and Intercomparison (PCMDI) archives much of the CMIP data and provides other support for CMIP. PCMDI's CMIP effort is funded by the Regional and Global Climate Modeling (RGCM) Program of the Climate and Environmental Sciences Division of the U.S. Department of Energy's Office of Science, Biological and Environmental Research (BER) program.",
+ "description": (
+ "Under the World Climate Research Programme (WCRP) the Working Group on Coupled "
+ "Modelling (WGCM) established the Coupled Model Intercomparison Project (CMIP) as "
+ "a standard experimental protocol for studying the output of coupled atmosphere-ocean "
+ "general circulation models (AOGCMs). CMIP provides a community-based infrastructure "
+ "in support of climate model diagnosis, validation, intercomparison, documentation and "
+ "data access. This framework enables a diverse community of scientists to analyze GCMs "
+ "in a systematic fashion, a process which serves to facilitate model improvement. Virtually "
+ "the entire international climate modeling community has participated in this project since "
+ "its inception in 1995. The Program for Climate Model Diagnosis and Intercomparison (PCMDI) "
+ "archives much of the CMIP data and provides other support for CMIP. PCMDI's CMIP effort is "
+ "funded by the Regional and Global Climate Modeling (RGCM) Program of the Climate and Environmental "
+ "Sciences Division of the U.S. Department of Energy's Office of Science, Biological and Environmental "
+ "Research (BER) program."
+ ),
"facets_by_group": {
GROUPS[0]: [
"project",
@@ -107,10 +125,7 @@
"model",
"data_node",
],
- GROUPS[1]: [
- "experiment",
- "experiment_family",
- ],
+ GROUPS[1]: ["experiment", "experiment_family"],
GROUPS[4]: [
"time_frequency",
"realm",
@@ -126,7 +141,13 @@
"name": "E3SM",
"full_name": "Energy Exascale Earth System Model",
"project_url": "https://e3sm.org/",
- "description": "The Energy Exascale Earth System Model (E3SM), formerly known as Accelerated Climate Modeling for Energy (ACME) project is an ongoing, state-of-the-science Earth system modeling, simulation, and prediction project, sponsored by the U.S. Department of Energy's (DOE's) Office of Biological and Environmental Research (BER), that optimizes the use of DOE laboratory computational resources to meet the science needs of the nation and the mission needs of DOE.",
+ "description": (
+ "The Energy Exascale Earth System Model (E3SM), formerly known as Accelerated Climate Modeling for "
+ "Energy (ACME) project is an ongoing, state-of-the-science Earth system modeling, simulation, and "
+ "prediction project, sponsored by the U.S. Department of Energy's (DOE's) Office of Biological and "
+ "Environmental Research (BER), that optimizes the use of DOE laboratory computational resources to "
+ "meet the science needs of the nation and the mission needs of DOE."
+ ),
"facets_by_group": {
GROUPS[0]: ["data_node"],
GROUPS[1]: [
@@ -156,26 +177,33 @@
"name": "CMIP3",
"full_name": "Coupled Model Intercomparison Project Phase 3",
"project_url": "https://pcmdi.llnl.gov/mips/cmip3/",
- "description": "n response to a proposed activity of the World Climate Research Programme's (WCRP's) Working Group on Coupled Modelling (WGCM), PCMDI volunteered to collect model output contributed by leading modeling centers around the world. Climate model output from simulations of the past, present and future climate was collected by PCMDI mostly during the years 2005 and 2006, and this archived data constitutes phase 3 of the Coupled Model Intercomparison Project (CMIP3). In part, the WGCM organized this activity to enable those outside the major modeling centers to perform research of relevance to climate scientists preparing the Fourth Asssessment Report (AR4) of the Intergovernmental Panel on Climate Change (IPCC). The IPCC was established by the World Meteorological Organization and the United Nations Environmental Program to assess scientific information on climate change. The IPCC publishes reports that summarize the state of the science.",
+ "description": (
+ "In response to a proposed activity of the World Climate Research Programme's (WCRP's) Working Group on "
+ "Coupled Modelling (WGCM), PCMDI volunteered to collect model output contributed by leading modeling centers "
+ "around the world. Climate model output from simulations of the past, present and future climate was collected "
+ "by PCMDI mostly during the years 2005 and 2006, and this archived data constitutes phase 3 of the Coupled Model "
+ "Intercomparison Project (CMIP3). In part, the WGCM organized this activity to enable those outside the major "
+ "modeling centers to perform research of relevance to climate scientists preparing the Fourth Assessment Report "
+ "(AR4) of the Intergovernmental Panel on Climate Change (IPCC). The IPCC was established by the World Meteorological "
+ "Organization and the United Nations Environmental Program to assess scientific information on climate change. The "
+ "IPCC publishes reports that summarize the state of the science."
+ ),
"facets_by_group": {
- GROUPS[0]: [
- "model",
- "experiment",
- "institute",
- ],
- GROUPS[4]: [
- "variable",
- "realm",
- "time_frequency",
- "ensemble",
- ],
+ GROUPS[0]: ["model", "experiment", "institute"],
+ GROUPS[4]: ["variable", "realm", "time_frequency", "ensemble"],
},
},
{
"name": "input4MIPs",
"full_name": "input datasets for Model Intercomparison Projects",
"project_url": "https://pcmdi.llnl.gov/mips/input4MIPs/",
- "description": "input4MIPS (input datasets for Model Intercomparison Projects) is an activity to make available via ESGF the boundary condition and forcing datasets needed for CMIP6. Various datasets are needed for the pre-industrial control (piControl), AMIP, and historical simulations, and additional datasets are needed for many of the CMIP6-endorsed model intercomparison projects (MIPs) experiments. Earlier versions of many of these datasets were used in the 5th Coupled Model Intercomparison Project (CMIP5).",
+ "description": (
+ "input4MIPS (input datasets for Model Intercomparison Projects) is an activity to make available via ESGF the "
+ "boundary condition and forcing datasets needed for CMIP6. Various datasets are needed for the pre-industrial "
+ "control (piControl), AMIP, and historical simulations, and additional datasets are needed for many of the "
+ "CMIP6-endorsed model intercomparison projects (MIPs) experiments. Earlier versions of many of these datasets "
+ "were used in the 5th Coupled Model Intercomparison Project (CMIP5)."
+ ),
"facets_by_group": {
GROUPS[0]: [
"mip_era",
@@ -202,18 +230,19 @@
"name": "obs4MIPs",
"full_name": "observations for Model Intercomparison Projects",
"project_url": "https://pcmdi.github.io/obs4MIPs/",
- "description": "Obs4MIPs (Observations for Model Intercomparisons Project) is an activity to make observational products more accessible for climate model intercomparisons via the same searchable distributed system used to serve and disseminate the rapidly expanding set of simulations made available for community research.",
+ "description": (
+ "Obs4MIPs (Observations for Model Intercomparisons Project) is an activity to make observational products more "
+ "accessible for climate model intercomparisons via the same searchable distributed system used to serve and "
+ "disseminate the rapidly expanding set of simulations made available for community research."
+ ),
"facets_by_group": {
- GROUPS[0]: [
- "product",
- "realm",
- "data_node",
- ],
- GROUPS[1]: ["source_id"
+ GROUPS[0]: ["product", "realm", "data_node"],
+ GROUPS[1]: [
+ "source_id",
"institution_id",
"region",
"source_type",
-],
+ ],
GROUPS[4]: [
"variable",
"variable_long_name",
@@ -230,7 +259,14 @@
{
"name": "CREATE-IP",
"full_name": "Collaborative REAnalysis Technical Environment",
- "description": "The Collaborative REAnalysis Technical Environment (CREATE) is a NASA Climate Model Data Services (CDS) project to collect all available global reanalysis data into one centralized location on NASA’s NCCS Advanced Data Analytics Platform (ADAPT), standardizing data formats, providing analytic capabilities, visualization analysis capabilities, and overall improved access to multiple reanalysis datasets. The CREATE project encompasses two efforts - CREATE-IP and CREATE-V. CREATE-IP is the project that collects and formats the reanalyses data. The list of variables currently available in CREATE-IP is growing over time so please check back frequently.",
+ "description": (
+ "The Collaborative REAnalysis Technical Environment (CREATE) is a NASA Climate Model Data Services (CDS) project to "
+ "collect all available global reanalysis data into one centralized location on NASA’s NCCS Advanced Data Analytics "
+ "Platform (ADAPT), standardizing data formats, providing analytic capabilities, visualization analysis capabilities, "
+ "and overall improved access to multiple reanalysis datasets. The CREATE project encompasses two efforts - CREATE-IP "
+ "and CREATE-V. CREATE-IP is the project that collects and formats the reanalyses data. The list of variables currently "
+ "available in CREATE-IP is growing over time so please check back frequently."
+ ),
"project_url": "https://reanalyses.org/",
"facets_by_group": {
GROUPS[0]: [
@@ -255,9 +291,9 @@
},
{
"name": "DRCDP",
- "description" : "Downscaled CMIP data for North America",
- "facets_by_group" :{
- "Search Properties": [
+ "description": "Downscaled CMIP data for North America",
+ "facets_by_group": {
+ "Search Properties": [
"downscaling_source_id",
"institution_id",
"driving_source_id",
@@ -265,9 +301,9 @@
"source_id",
"version_realization",
"variable_id",
- "driving_activity_id"
+ "driving_activity_id",
]
- }
+ },
},
{
"name": "All (except CMIP6)",
diff --git a/frontend/src/components/NodeStatus/StatusToolTip.tsx b/frontend/src/components/NodeStatus/StatusToolTip.tsx
index b1980f29d..595e80767 100644
--- a/frontend/src/components/NodeStatus/StatusToolTip.tsx
+++ b/frontend/src/components/NodeStatus/StatusToolTip.tsx
@@ -19,8 +19,8 @@ const StatusToolTip: React.FC> = ({
children,
}) => {
if (nodeStatus) {
- const node = (nodeStatus.find(
- (obj) => obj.name === dataNode
+ const node = (nodeStatus.find((obj) =>
+ obj.name.includes(dataNode)
) as unknown) as NodeStatusElement;
/* istanbul ignore else*/
@@ -107,7 +107,8 @@ const StatusToolTip: React.FC> = ({
title={
<>
Could not fetch status. Please contact support or try again later.
- Data Node:{dataNode}
+ Data Node:
+ {dataNode}
>
}
>
@@ -123,7 +124,8 @@ const StatusToolTip: React.FC> = ({
title={
<>
Could not fetch status. Please contact support or try again later.
- Data Node:{dataNode}
+ Data Node:
+ {dataNode}
>
}
>
diff --git a/frontend/src/test/mock/fixtures.ts b/frontend/src/test/mock/fixtures.ts
index 5bfa27002..03699b936 100644
--- a/frontend/src/test/mock/fixtures.ts
+++ b/frontend/src/test/mock/fixtures.ts
@@ -259,7 +259,7 @@ export const rawNodeStatusFixture = (
{
metric: {
__name__: 'probe_success',
- instance: 'aims3.llnl.gov',
+ instance: 'https://aims3.llnl.gov',
job: 'http_2xx',
target: 'https://aims3.llnl.gov/thredds/catalog/catalog.html',
},
@@ -268,7 +268,7 @@ export const rawNodeStatusFixture = (
{
metric: {
__name__: 'probe_success',
- instance: 'esgf1.dkrz.de',
+ instance: 'https://esgf1.dkrz.de',
job: 'http_2xx',
target: 'https://esgf1.dkrz.de/thredds/catalog/catalog.html',
},
@@ -282,13 +282,13 @@ export const rawNodeStatusFixture = (
};
export const parsedNodeStatusFixture = (): NodeStatusArray => [
{
- name: 'aims3.llnl.gov',
+ name: 'https://aims3.llnl.gov',
source: 'https://aims3.llnl.gov/thredds/catalog/catalog.html',
timestamp: 'Wed, 21 Oct 2020 21:23:50 GMT',
isOnline: true,
},
{
- name: 'esgf1.dkrz.de',
+ name: 'https://esgf1.dkrz.de',
source: 'https://esgf1.dkrz.de/thredds/catalog/catalog.html',
timestamp: 'Wed, 21 Oct 2020 21:23:50 GMT',
isOnline: false,
From b622c00adcf8d4a725c178e70ed4b9e139e834cf Mon Sep 17 00:00:00 2001
From: Carlos Downie <42552189+downiec@users.noreply.github.com>
Date: Wed, 15 Jan 2025 16:48:58 -0800
Subject: [PATCH 6/7] Added alert for the case that a collection search fails
in the manage collections popup (#710)
* Added alert for the case that a collection search fails in the collection management popup. Added a test to bring the coverage up to 100% for the search table component. Removed some unused code, the get endpoint function is not used.
* Minor linting error fixes.
* Fixed some backend tests based on the removed/unused functions
* Small fix to test.
---
backend/config/urls.py | 6 -
.../metagrid/api_proxy/tests/test_views.py | 8 -
backend/metagrid/api_proxy/views.py | 21 -
.../Globus/DatasetDownload.test.tsx | 515 +-----------------
.../src/components/Globus/DatasetDownload.tsx | 11 +
frontend/src/components/Search/Table.test.tsx | 23 +-
6 files changed, 63 insertions(+), 521 deletions(-)
diff --git a/backend/config/urls.py b/backend/config/urls.py
index 6f3c82b21..526c4fea2 100644
--- a/backend/config/urls.py
+++ b/backend/config/urls.py
@@ -16,7 +16,6 @@
from metagrid.api_proxy.views import (
do_citation,
do_globus_auth,
- do_globus_get_endpoint,
do_globus_logout,
do_globus_search_endpoints,
do_search,
@@ -58,11 +57,6 @@ class KeycloakLogin(SocialLoginView):
path("", include("social_django.urls", namespace="social")),
path("proxy/globus-logout/", do_globus_logout, name="globus-logout"),
path("proxy/globus-auth/", do_globus_auth, name="globus-auth"),
- path(
- "proxy/globus-get-endpoint/",
- do_globus_get_endpoint,
- name="globus-get-endpoint",
- ),
path(
"proxy/globus-search-endpoints/",
do_globus_search_endpoints,
diff --git a/backend/metagrid/api_proxy/tests/test_views.py b/backend/metagrid/api_proxy/tests/test_views.py
index 365a1d12b..9a256dcb0 100644
--- a/backend/metagrid/api_proxy/tests/test_views.py
+++ b/backend/metagrid/api_proxy/tests/test_views.py
@@ -23,14 +23,6 @@ def test_globus_auth_begin(self):
)
self.assertEqual(response.status_code, 302)
- def test_do_globus_get_endpoint(self):
- url = reverse("globus-get-endpoint")
-
- data = {"endpoint_id": "0247816e-cc0d-4e03-a509-10903f6dde11"}
- response = self.client.get(url, data)
- print(response.status_code)
- assert response.status_code == status.HTTP_200_OK
-
def test_do_globus_search_endpoints(self):
url = reverse("globus-search-endpoints")
diff --git a/backend/metagrid/api_proxy/views.py b/backend/metagrid/api_proxy/views.py
index 331d26396..963fc2b13 100644
--- a/backend/metagrid/api_proxy/views.py
+++ b/backend/metagrid/api_proxy/views.py
@@ -51,27 +51,6 @@ def do_globus_logout(request):
return redirect(homepage_url)
-@api_view()
-@permission_classes([])
-def do_globus_get_endpoint(request):
- endpoint_id = request.GET.get("endpoint_id", None)
- if request.user.is_authenticated:
- tc = load_transfer_client(request.user) # pragma: no cover
- else:
- client = globus_sdk.ConfidentialAppAuthClient(
- settings.SOCIAL_AUTH_GLOBUS_KEY, settings.SOCIAL_AUTH_GLOBUS_SECRET
- )
- token_response = client.oauth2_client_credentials_tokens()
- globus_transfer_data = token_response.by_resource_server[
- "transfer.api.globus.org"
- ]
- globus_transfer_token = globus_transfer_data["access_token"]
- authorizer = globus_sdk.AccessTokenAuthorizer(globus_transfer_token)
- tc = globus_sdk.TransferClient(authorizer=authorizer)
- endpoint = tc.get_endpoint(endpoint_id)
- return Response(endpoint.data)
-
-
@api_view()
@permission_classes([])
def do_globus_search_endpoints(request):
diff --git a/frontend/src/components/Globus/DatasetDownload.test.tsx b/frontend/src/components/Globus/DatasetDownload.test.tsx
index 581b6f8f6..d25db37cb 100644
--- a/frontend/src/components/Globus/DatasetDownload.test.tsx
+++ b/frontend/src/components/Globus/DatasetDownload.test.tsx
@@ -1,12 +1,11 @@
import React from 'react';
import userEvent from '@testing-library/user-event';
-import { act, waitFor, within, screen } from '@testing-library/react';
+import { act, within, screen } from '@testing-library/react';
import customRender from '../../test/custom-render';
import { rest, server } from '../../test/mock/server';
import { getSearchFromUrl } from '../../common/utils';
import { ActiveSearchQuery, RawSearchResults } from '../Search/types';
import {
- getRowName,
globusReadyNode,
makeCartItem,
mockConfig,
@@ -23,7 +22,6 @@ import {
globusEndpointFixture,
globusAccessTokenFixture,
globusTransferTokenFixture,
- userCartFixture,
} from '../../test/mock/fixtures';
import apiRoutes from '../../api/routes';
import DatasetDownloadForm, { GlobusGoals } from './DatasetDownload';
@@ -883,507 +881,54 @@ describe('DatasetDownload form tests', () => {
'Submitted: 11/30/2023, 3:10:00 PM'
);
});
-});
-
-xit('If endpoint URL is available, process it and continue to Transfer process', async () => {
- // Setting the tokens so that the sign-in step should be completed
- mockSaveValue(CartStateKeys.cartItemSelections, userCartFixture());
- mockSaveValue(GlobusStateKeys.accessToken, 'accessToken');
- mockSaveValue(GlobusStateKeys.transferToken, {
- id_token: '',
- resource_server: '',
- other_tokens: { refresh_token: 'something', transfer_token: 'something' },
- created_on: Math.floor(Date.now() / 1000),
- expires_in: Math.floor(Date.now() / 1000) + 100,
- access_token: '',
- refresh_expires_in: 0,
- refresh_token: 'something',
- scope:
- 'openid profile email offline_access urn:globus:auth:scope:transfer.api.globus.org:all',
- token_type: '',
- } as GlobusTokenResponse);
-
- const { getByTestId, getByRole, getByText, getAllByText } = customRender(
-
- );
-
- // Wait for results to load
- await waitFor(() =>
- expect(getByText('results found for', { exact: false })).toBeTruthy()
- );
-
- // Check first row renders and click the checkbox
- const firstRow = getByRole('row', {
- name: getRowName('plus', 'check', 'foo', '3', '1', '1', true),
- });
-
- // Check first row has add button and click it
- const addBtn = within(firstRow).getByRole('img', { name: 'plus' });
- expect(addBtn).toBeTruthy();
- await act(async () => {
- await user.click(addBtn);
- });
-
- // Check 'Added items(s) to the cart' message appears
- const addText = await waitFor(
- () => getAllByText('Added item(s) to your cart')[0]
- );
- expect(addText).toBeTruthy();
-
- // Set endpoint in url
- Object.defineProperty(window, 'location', {
- value: {
- assign: () => {},
- pathname: '/cart/items',
- href:
- 'https://localhost:3000/cart/items?endpoint=dummyEndpoint&label=dummy&path=nowhere&globfs=empty&endpointid=endpoint1',
- search:
- '?endpoint=dummyEndpoint&label=dummy&path=nowhere&globfs=empty&endpointid=endpoint1',
- replace: () => {},
- },
- });
-
- // Switch to the cart page
- const cartBtn = getByTestId('cartPageLink');
- await act(async () => {
- await user.click(cartBtn);
- });
-
- // A popup should come asking if user wishes to save endpoint as default
- const saveEndpointDialog = getByRole('dialog');
- expect(saveEndpointDialog).toBeTruthy();
- expect(saveEndpointDialog).toBeVisible();
- expect(saveEndpointDialog).toHaveTextContent(
- 'Do you want to save this endpoint as default?'
- );
-
- // Click Yes to save the endpoint as default
- const yesBtn = within(saveEndpointDialog).getByText('Yes');
- expect(yesBtn).toBeTruthy();
- await act(async () => {
- await user.click(yesBtn);
- });
-
- // Next step should be to start the Transfer
- const globusTransferDialog = getByRole('dialog');
- expect(globusTransferDialog).toBeTruthy();
-
- // Select the final transfer step in the dialog
- const transferStep = within(globusTransferDialog).getByText(
- 'Start Globus transfer.',
- {
- exact: false,
- }
- );
- // The transfer step should be the next step to perform
- expect(transferStep.innerHTML).toMatch(/-> {2}Start Globus transfer./i);
-
- // Click Yes to continue transfer steps
- const startBtn = within(globusTransferDialog).getByText('Yes');
- expect(startBtn).toBeTruthy();
- await act(async () => {
- await user.click(startBtn);
- });
-
- // Check 'Globus transfer task submitted successfully!' message appears
- const taskMsg = await waitFor(() =>
- getByText('Globus transfer task submitted successfully!', {
- exact: false,
- })
- );
- expect(taskMsg).toBeTruthy();
-
- // Clear all task items
- const submitHistory = getByText('Task Submit History', { exact: false });
- expect(submitHistory).toBeTruthy();
- const clearAllBtn = within(submitHistory).getByText('Clear All');
- expect(clearAllBtn).toBeTruthy();
- await act(async () => {
- await user.click(clearAllBtn);
- });
-});
-
-xit('If endpoint URL is set, and sign in tokens in URL, continue to select endpoint', async () => {
- // Setting the tokens so that the sign-in step should be completed
-
- const { getByTestId, getByRole, getByText, getAllByText } = customRender(
-
- );
-
- // Wait for results to load
- await waitFor(() =>
- expect(getByText('results found for', { exact: false })).toBeTruthy()
- );
-
- // Check first row renders and click the checkbox
- const firstRow = getByRole('row', {
- name: getRowName('plus', 'check', 'foo', '3', '1', '1', true),
- });
-
- // Check first row has add button and click it
- const addBtn = within(firstRow).getByRole('img', { name: 'plus' });
- expect(addBtn).toBeTruthy();
- await act(async () => {
- await user.click(addBtn);
- });
-
- // Check 'Added items(s) to the cart' message appears
- const addText = await waitFor(
- () => getAllByText('Added item(s) to your cart')[0]
- );
- expect(addText).toBeTruthy();
-
- // Switch to the cart page
- const cartBtn = getByTestId('cartPageLink');
- await act(async () => {
- await user.click(cartBtn);
- });
- // Select item for globus transfer
- const firstCheckBox = getByRole('checkbox');
- expect(firstCheckBox).toBeTruthy();
- await act(async () => {
- await user.click(firstCheckBox);
- });
-
- // Click Transfer button
- const globusTransferBtn = getByRole('button', {
- name: /download transfer/i,
- });
- expect(globusTransferBtn).toBeTruthy();
- await act(async () => {
- await user.click(globusTransferBtn);
- });
-
- // Get the transfer dialog popup component
- const popupModal = getByRole('dialog');
- expect(popupModal).toBeTruthy();
-
- // The dialog should be visible
- expect(popupModal).toBeVisible();
-
- // Select the sign-in step in the dialog
- const signInStep = within(popupModal).getByText(
- 'Redirect to obtain transfer permission from Globus',
- {
- exact: false,
- }
- );
- // It should have a -> symbol next to it to indicate it's the next step
- expect(signInStep.innerHTML).toMatch(
- '-> Redirect to obtain transfer permission from Globus.'
- );
-
- // Select the endpoint step in the dialog
- const selectEndpointStep = within(
- popupModal
- ).getByText('Redirect to select an endpoint in Globus', { exact: false });
- // It should NOT have a -> symbol next to it to indicate it's the next step
- expect(selectEndpointStep.innerHTML).toMatch(
- 'Redirect to select an endpoint in Globus.'
- );
-
- // Click Yes to start next transfer steps
- const yesBtn = getByText('Yes');
- expect(yesBtn).toBeTruthy();
- await act(async () => {
- await user.click(yesBtn);
- });
-
- // Expect the dialog to not be visible
- expect(popupModal).not.toBeVisible();
-});
-
-xit('Perform Transfer process when sign in tokens and endpoint are BOTH ready', async () => {
- // Setting the tokens so that the sign-in step should be completed
- mockSaveValue(CartStateKeys.cartItemSelections, userCartFixture());
- mockSaveValue(GlobusStateKeys.accessToken, 'accessToken');
- mockSaveValue(GlobusStateKeys.transferToken, {
- id_token: '',
- resource_server: '',
- other_tokens: { refresh_token: 'something', transfer_token: 'something' },
- created_on: Math.floor(Date.now() / 1000),
- expires_in: Math.floor(Date.now() / 1000) + 100,
- access_token: '',
- refresh_expires_in: 0,
- refresh_token: 'something',
- scope:
- 'openid profile email offline_access urn:globus:auth:scope:transfer.api.globus.org:all',
- token_type: '',
- } as GlobusTokenResponse);
-
- const { getByTestId, getByRole, getByText, getAllByText } = customRender(
-
- );
-
- // Wait for results to load
- await waitFor(() =>
- expect(getByText('results found for', { exact: false })).toBeTruthy()
- );
-
- // Check first row renders and click the checkbox
- const firstRow = getByRole('row', {
- name: getRowName('plus', 'check', 'foo', '3', '1', '1', true),
- });
-
- // Check first row has add button and click it
- const addBtn = within(firstRow).getByRole('img', { name: 'plus' });
- expect(addBtn).toBeTruthy();
- await act(async () => {
- await user.click(addBtn);
- });
-
- // Check 'Added items(s) to the cart' message appears
- const addText = await waitFor(
- () => getAllByText('Added item(s) to your cart')[0]
- );
- expect(addText).toBeTruthy();
-
- // Switch to the cart page
- const cartBtn = getByTestId('cartPageLink');
- await act(async () => {
- await user.click(cartBtn);
- });
-
- // Check 'Globus transfer task submitted successfully!' message appears
- const taskMsg = await waitFor(() =>
- getByText('Globus transfer task submitted successfully!', {
- exact: false,
- })
- );
- expect(taskMsg).toBeTruthy();
-
- // Clear all task items
- const submitHistory = getByText('Task Submit History', { exact: false });
- expect(submitHistory).toBeTruthy();
- const clearAllBtn = within(submitHistory).getByText('Clear All');
- expect(clearAllBtn).toBeTruthy();
- await act(async () => {
- await user.click(clearAllBtn);
- });
-});
-
-xdescribe('Testing globus transfer related failures', () => {
- beforeAll(() => {
- jest.spyOn(console, 'error').mockImplementation(jest.fn());
- tempStorageSetMock('pkce-pass', false);
- jest.resetModules();
- });
-
- it('Shows an error message if transfer task fails', async () => {
+ it('Shows an alert when a collection search fails in the manage collections form', async () => {
server.use(
- rest.post(apiRoutes.globusTransfer.path, (_req, res, ctx) =>
- res(ctx.status(404))
+ rest.get(apiRoutes.globusSearchEndpoints.path, (_req, res, ctx) =>
+ res(ctx.status(500))
)
);
- // Setting the tokens so that the sign-in step should be completed
- mockSaveValue(CartStateKeys.cartItemSelections, userCartFixture());
- mockSaveValue(GlobusStateKeys.accessToken, 'globusAccessToken');
- mockSaveValue(GlobusStateKeys.transferToken, {
- id_token: '',
- resource_server: '',
- other_tokens: { refresh_token: 'something', transfer_token: 'something' },
- created_on: Math.floor(Date.now() / 1000),
- expires_in: Math.floor(Date.now() / 1000) + 100,
- access_token: '',
- refresh_expires_in: 0,
- refresh_token: 'something',
- scope: 'openid profile email offline_access ',
- token_type: '',
- } as GlobusTokenResponse);
-
- const { getByTestId, getByRole, getByText, getAllByText } = customRender(
-
- );
-
- // Wait for results to load
- await waitFor(() =>
- expect(getByText('results found for', { exact: false })).toBeTruthy()
- );
-
- // Check first row renders and click the checkbox
- const firstRow = getByRole('row', {
- name: getRowName('plus', 'check', 'foo', '3', '1', '1', true),
- });
-
- // Check first row has add button and click it
- const addBtn = within(firstRow).getByRole('img', { name: 'plus' });
- expect(addBtn).toBeTruthy();
- await act(async () => {
- await user.click(addBtn);
- });
-
- // Check 'Added items(s) to the cart' message appears
- const addText = await waitFor(
- () => getAllByText('Added item(s) to your cart')[0]
- );
- expect(addText).toBeTruthy();
-
- // Switch to the cart page
- const cartBtn = getByTestId('cartPageLink');
- await act(async () => {
- await user.click(cartBtn);
+ await initializeComponentForTest({
+ ...defaultTestConfig,
+ savedEndpoints: [],
+ chosenEndpoint: null,
});
- // Check 'Globus transfer task failed' message appears
- const taskMsg = await waitFor(() =>
- getByText('Globus transfer task failed', {
- exact: false,
- })
- );
- expect(taskMsg).toBeTruthy();
- });
-
- // TODO: Figure a reliable way to mock the GlobusAuth.exchangeForAccessToken output values.
- /** Until that is done, this test will fail and will need to use istanbul ignore statements
- * for the mean time.
- */
- xit('Shows error message if url tokens are not valid for transfer', async () => {
- // Setting the tokens so that the sign-in step should be skipped
- mockSaveValue(CartStateKeys.cartItemSelections, userCartFixture());
-
- tempStorageSetMock('pkce-pass', false);
-
- const { getByTestId, getByRole, getByText, getAllByText } = customRender(
-
- );
-
- // Wait for results to load
- await waitFor(() =>
- expect(getByText('results found for', { exact: false })).toBeTruthy()
+ // Open download dropdown
+ const collectionDropdown = await screen.findByTestId(
+ 'searchCollectionInput'
);
-
- // Check first row renders and click the checkbox
- const firstRow = getByRole('row', {
- name: getRowName('plus', 'check', 'foo', '3', '1', '1', true),
- });
-
- // Check first row has add button and click it
- const addBtn = within(firstRow).getByRole('img', { name: 'plus' });
- expect(addBtn).toBeTruthy();
- await act(async () => {
- await user.click(addBtn);
- });
-
- // Check 'Added items(s) to the cart' message appears
- const addText = await waitFor(
- () => getAllByText('Added item(s) to your cart')[0]
+ const selectEndpoint = await within(collectionDropdown).findByRole(
+ 'combobox'
);
- expect(addText).toBeTruthy();
-
- // Set the tokens in the url
- Object.defineProperty(window, 'location', {
- value: {
- assign: () => {},
- pathname: '/cart/items',
- href:
- 'https://localhost:3000/cart/items?code=12kj3kjh4&state=testingTransferTokens',
- search: '?code=12kj3kjh4&state=testingTransferTokens',
- replace: () => {},
- },
- });
-
- tempStorageSetMock('pkce-pass', false);
+ await openDropdownList(user, selectEndpoint);
- // Switch to the cart page
- const cartBtn = getByTestId('cartPageLink');
+ // Select manage collections
+ const manageEndpointsBtn = await screen.findByText('Manage Collections');
+ expect(manageEndpointsBtn).toBeTruthy();
await act(async () => {
- await user.click(cartBtn);
+ await user.click(manageEndpointsBtn);
});
- const accessToken = await mockLoadValue(GlobusStateKeys.accessToken);
- const transferToken = await mockLoadValue(GlobusStateKeys.transferToken);
-
- expect(accessToken).toBeFalsy();
- expect(transferToken).toBeFalsy();
-
- // Check 'Error occurred when obtaining transfer permission!' message appears
- const taskMsg = await waitFor(
- () =>
- getAllByText('Error occured when obtaining transfer permissions.', {
- exact: false,
- })[0]
- );
- expect(taskMsg).toBeTruthy();
- });
-});
-
-xdescribe('Testing wget transfer related failures', () => {
- it('Wget transfer fails and failure message pops up.', async () => {
- server.use(
- rest.post(apiRoutes.wget.path, (_req, res, ctx) => res(ctx.status(404)))
- );
-
- const { getByTestId, getByRole, getByText, getAllByText } = customRender(
-
- );
-
- // Wait for results to load
- await waitFor(() =>
- expect(getByText('results found for', { exact: false })).toBeTruthy()
+ const manageCollectionsForm = await screen.findByTestId(
+ 'manageCollectionsForm'
);
+ expect(manageCollectionsForm).toBeTruthy();
- // Check first row renders and click the checkbox
- const firstRow = getByRole('row', {
- name: getRowName('plus', 'check', 'foo', '3', '1', '1', true),
- });
-
- // Check first row has add button and click it
- const addBtn = within(firstRow).getByRole('img', { name: 'plus' });
- expect(addBtn).toBeTruthy();
- await act(async () => {
- await user.click(addBtn);
- });
-
- // Check 'Added items(s) to the cart' message appears
- const addText = await waitFor(
- () => getAllByText('Added item(s) to your cart')[0]
+ // Type in endpoint search text
+ const endpointSearchInput = await screen.findByPlaceholderText(
+ 'Search for a Globus Collection'
);
- expect(addText).toBeTruthy();
-
- // Switch to the cart page
- const cartBtn = getByTestId('cartPageLink');
- await act(async () => {
- await user.click(cartBtn);
- });
-
- // Select item for globus transfer
- const firstCheckBox = getByRole('checkbox');
- expect(firstCheckBox).toBeTruthy();
- await act(async () => {
- await user.click(firstCheckBox);
- });
-
- // Open download dropdown
- const globusTransferDropdown = within(
- getByTestId('downloadTypeSelector')
- ).getByRole('combobox');
-
- await openDropdownList(user, globusTransferDropdown);
-
- // Select wget
- const wgetOption = getAllByText(/wget/i)[2];
- expect(wgetOption).toBeTruthy();
- await act(async () => {
- await user.click(wgetOption);
- });
-
- // Start wget download
- const downloadBtn = getByText('Download');
- expect(downloadBtn).toBeTruthy();
+ expect(endpointSearchInput).toBeTruthy();
await act(async () => {
- await user.click(downloadBtn);
+ await user.type(endpointSearchInput, 'lc public{enter}');
});
- // Expect error message to show
- await waitFor(() =>
- expect(
- getAllByText(
- 'The requested resource at the ESGF wget API service was invalid.',
- { exact: false }
- )
- ).toBeTruthy()
+ // Expect an alert to show up
+ const alertPopup = await screen.findByText(
+ 'An error occurred while searching for collections. Please try again later.'
);
+ expect(alertPopup).toBeTruthy();
});
});
diff --git a/frontend/src/components/Globus/DatasetDownload.tsx b/frontend/src/components/Globus/DatasetDownload.tsx
index 7e8c23731..c0ccef210 100644
--- a/frontend/src/components/Globus/DatasetDownload.tsx
+++ b/frontend/src/components/Globus/DatasetDownload.tsx
@@ -542,6 +542,17 @@ const DatasetDownloadForm: React.FC> = () => {
} catch (error) {
// eslint-disable-next-line no-console
console.error(error);
+ setAlertPopupState({
+ content:
+ 'An error occurred while searching for collections. Please try again later.',
+ onCancelAction: () => {
+ setAlertPopupState({ ...alertPopupState, show: false });
+ },
+ onOkAction: () => {
+ setAlertPopupState({ ...alertPopupState, show: false });
+ },
+ show: true,
+ });
} finally {
setLoadingEndpointSearchResults(false);
}
diff --git a/frontend/src/components/Search/Table.test.tsx b/frontend/src/components/Search/Table.test.tsx
index c8d71b610..72fd120fe 100644
--- a/frontend/src/components/Search/Table.test.tsx
+++ b/frontend/src/components/Search/Table.test.tsx
@@ -10,7 +10,7 @@ import apiRoutes from '../../api/routes';
import customRender from '../../test/custom-render';
import Table, { Props } from './Table';
import { QualityFlag } from './Tabs';
-import { getRowName } from '../../test/jestTestFunctions';
+import { getRowName, mockConfig } from '../../test/jestTestFunctions';
const user = userEvent.setup();
@@ -451,6 +451,27 @@ describe('test main table UI', () => {
);
expect(errorMsg).toBeTruthy();
});
+
+ it('does not render Globus Ready column when globusEnabledNodes is empty', async () => {
+ // Set names of the globus enabled nodes
+ mockConfig.globusEnabledNodes = [];
+
+ customRender();
+
+ // Check table exists
+ const table = await screen.findByRole('table');
+ expect(table).toBeTruthy();
+
+ // Check first row exists
+ const firstRow = await screen.findByRole('row', {
+ name: getRowName('plus', 'question', 'foo', '3', '1', '1'),
+ });
+ expect(firstRow).toBeTruthy();
+
+ // Check Globus Ready column does not exist
+ const globusReadyColumn = screen.queryByText('Globus Ready');
+ expect(globusReadyColumn).toBeNull();
+ });
});
describe('test QualityFlag', () => {
From 69e93ef3a61a764a5571ed03130066fde1fb9cb6 Mon Sep 17 00:00:00 2001
From: downiec <42552189+downiec@users.noreply.github.com>
Date: Wed, 22 Jan 2025 11:25:34 -0800
Subject: [PATCH 7/7] Minor update to update summary.
---
frontend/public/changelog/v1.2.2.md | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/frontend/public/changelog/v1.2.2.md b/frontend/public/changelog/v1.2.2.md
index 0b3ef5dd9..cba272a38 100644
--- a/frontend/public/changelog/v1.2.2.md
+++ b/frontend/public/changelog/v1.2.2.md
@@ -1,3 +1,4 @@
## Summary
-1. Minor bugfixes.
+1. Fixes issue with node status display icons.
+2. Additional bugfixes.