Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(all): multiple warnings fix #2318

Open
wants to merge 18 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion antarest/core/filesystem_blueprint.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ async def from_path(cls, full_path: Path, *, details: bool = False) -> "FileInfo
return obj


async def _calc_details(full_path: t.Union[str, Path]) -> t.Tuple[int, int]:
async def _calc_details(full_path: str | Path) -> t.Tuple[int, int]:
"""Calculate the number of files and the total size of a directory recursively."""

full_path = Path(full_path)
Expand Down
2 changes: 0 additions & 2 deletions antarest/core/filetransfer/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@

from typing import Optional

from fastapi import APIRouter, FastAPI

from antarest.core.application import AppBuildContext
from antarest.core.config import Config
from antarest.core.filetransfer.repository import FileDownloadRepository
Expand Down
2 changes: 0 additions & 2 deletions antarest/core/maintenance/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@

from typing import Optional

from fastapi import APIRouter, FastAPI

from antarest.core.application import AppBuildContext
from antarest.core.config import Config
from antarest.core.interfaces.cache import ICache
Expand Down
2 changes: 1 addition & 1 deletion antarest/core/permissions.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
logger = logging.getLogger(__name__)


permission_matrix: t.Dict[str, t.Dict[str, t.Sequence[t.Union[RoleType, PublicMode]]]] = {
permission_matrix: t.Dict[str, t.Dict[str, t.Sequence[RoleType | PublicMode]]] = {
StudyPermissionType.READ.value: {
"roles": [
RoleType.ADMIN,
Expand Down
2 changes: 1 addition & 1 deletion antarest/core/serialization/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
# Since pydantic v2 is written in RUST it's way faster.


def from_json(data: t.Union[str, bytes, bytearray]) -> t.Dict[str, t.Any]:
def from_json(data: str | bytes | bytearray) -> t.Dict[str, t.Any]:
return ADAPTER.validate_json(data) # type: ignore


Expand Down
2 changes: 0 additions & 2 deletions antarest/core/tasks/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@

from typing import Optional

from fastapi import APIRouter, FastAPI

from antarest.core.application import AppBuildContext
from antarest.core.config import Config
from antarest.core.interfaces.eventbus import DummyEventBusService, IEventBus
Expand Down
6 changes: 3 additions & 3 deletions antarest/core/tasks/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def add_worker_task(
self,
task_type: TaskType,
task_queue: str,
task_args: t.Dict[str, t.Union[int, float, bool, str]],
task_args: t.Dict[str, int | float | bool | str],
name: t.Optional[str],
ref_id: t.Optional[str],
request_params: RequestParameters,
Expand Down Expand Up @@ -178,7 +178,7 @@ def _create_worker_task(
self,
task_id: str,
task_type: str,
task_args: t.Dict[str, t.Union[int, float, bool, str]],
task_args: t.Dict[str, int | float | bool | str],
) -> Task:
task_result_wrapper: t.List[TaskResult] = []

Expand Down Expand Up @@ -227,7 +227,7 @@ def add_worker_task(
self,
task_type: TaskType,
task_queue: str,
task_args: t.Dict[str, t.Union[int, float, bool, str]],
task_args: t.Dict[str, int | float | bool | str],
name: t.Optional[str],
ref_id: t.Optional[str],
request_params: RequestParameters,
Expand Down
1 change: 0 additions & 1 deletion antarest/eventbus/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@

from typing import Optional

from fastapi import APIRouter, FastAPI
from redis import Redis

from antarest.core.application import AppBuildContext
Expand Down
1 change: 0 additions & 1 deletion antarest/eventbus/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import logging
import random
import threading
import time
import uuid
from typing import Awaitable, Callable, Dict, List, Optional

Expand Down
5 changes: 4 additions & 1 deletion antarest/fastapi_jwt_auth/auth_jwt.py
Original file line number Diff line number Diff line change
Expand Up @@ -525,12 +525,15 @@ def _verify_and_get_jwt_in_cookies(
if not isinstance(request, (Request, WebSocket)):
raise TypeError("request must be an instance of 'Request' or 'WebSocket'")

cookie = None
cookie_key = None

if type_token == "access":
cookie_key = self._access_cookie_key
cookie = request.cookies.get(cookie_key)
if not isinstance(request, WebSocket):
csrf_token = request.headers.get(self._access_csrf_header_name)
if type_token == "refresh":
elif type_token == "refresh":
cookie_key = self._refresh_cookie_key
cookie = request.cookies.get(cookie_key)
if not isinstance(request, WebSocket):
Expand Down
3 changes: 1 addition & 2 deletions antarest/front.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,8 @@
what are the API and websocket prefixes
"""

import re
from pathlib import Path
from typing import Any, List, Optional, Sequence
from typing import Any, List, Optional

from fastapi import FastAPI
from starlette.middleware.base import BaseHTTPMiddleware, DispatchFunction, RequestResponseEndpoint
Expand Down
2 changes: 0 additions & 2 deletions antarest/launcher/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@

from typing import Optional

from fastapi import APIRouter, FastAPI

from antarest.core.application import AppBuildContext
from antarest.core.config import Config
from antarest.core.filetransfer.service import FileTransferManager
Expand Down
2 changes: 1 addition & 1 deletion antarest/launcher/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class LauncherParametersDTO(AntaresBaseModel):
nb_cpu: t.Optional[int] = None
post_processing: bool = False
time_limit: int = 240 * 3600 # Default value set to 240 hours (in seconds)
xpansion: t.Union[XpansionParametersDTO, bool, None] = None
xpansion: XpansionParametersDTO | bool | None = None
xpansion_r_version: bool = False
archive_output: bool = True
auto_unzip: bool = True
Expand Down
2 changes: 0 additions & 2 deletions antarest/matrixstore/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@

from typing import Optional

from fastapi import APIRouter, FastAPI

from antarest.core.application import AppBuildContext
from antarest.core.config import Config
from antarest.core.filetransfer.service import FileTransferManager
Expand Down
4 changes: 2 additions & 2 deletions antarest/matrixstore/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,8 +237,8 @@ class MatrixContent(AntaresBaseModel):
"""

data: t.List[t.List[MatrixData]]
index: t.List[t.Union[int, str]]
columns: t.List[t.Union[int, str]]
index: t.List[int | str]
columns: t.List[int | str]


class MatrixDataSetUpdateDTO(AntaresBaseModel):
Expand Down
2 changes: 1 addition & 1 deletion antarest/matrixstore/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def exists(self, matrix_hash: str) -> bool:
matrix_file = self.bucket_dir.joinpath(f"{matrix_hash}.tsv")
return matrix_file.exists()

def save(self, content: t.Union[t.List[t.List[MatrixData]], npt.NDArray[np.float64]]) -> str:
def save(self, content: t.List[t.List[MatrixData]] | npt.NDArray[np.float64]) -> str:
"""
Saves the content of a matrix as a TSV file in the bucket directory
and returns its SHA256 hash.
Expand Down
8 changes: 4 additions & 4 deletions antarest/matrixstore/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def __init__(self, matrix_content_repository: MatrixContentRepository) -> None:
self.matrix_content_repository = matrix_content_repository

@abstractmethod
def create(self, data: t.Union[t.List[t.List[MatrixData]], npt.NDArray[np.float64]]) -> str:
def create(self, data: t.List[t.List[MatrixData]] | npt.NDArray[np.float64]) -> str:
raise NotImplementedError()

@abstractmethod
Expand All @@ -87,7 +87,7 @@ def exists(self, matrix_id: str) -> bool:
def delete(self, matrix_id: str) -> None:
raise NotImplementedError()

def get_matrix_id(self, matrix: t.Union[t.List[t.List[float]], str]) -> str:
def get_matrix_id(self, matrix: t.List[t.List[float]] | str) -> str:
"""
Get the matrix ID from a matrix or a matrix link.

Expand All @@ -114,7 +114,7 @@ def __init__(self, matrix_content_repository: MatrixContentRepository):
super().__init__(matrix_content_repository=matrix_content_repository)

@override
def create(self, data: t.Union[t.List[t.List[MatrixData]], npt.NDArray[np.float64]]) -> str:
def create(self, data: t.List[t.List[MatrixData]] | npt.NDArray[np.float64]) -> str:
return self.matrix_content_repository.save(data)

@override
Expand Down Expand Up @@ -171,7 +171,7 @@ def _from_dto(dto: MatrixDTO) -> t.Tuple[Matrix, MatrixContent]:
return matrix, content

@override
def create(self, data: t.Union[t.List[t.List[MatrixData]], npt.NDArray[np.float64]]) -> str:
def create(self, data: t.List[t.List[MatrixData]] | npt.NDArray[np.float64]) -> str:
"""
Creates a new matrix object with the specified data.

Expand Down
2 changes: 1 addition & 1 deletion antarest/study/business/adequacy_patch_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def get_value(field_info: FieldInfo) -> Any:

return parent.get(target_name, field_info["default_value"]) if is_in_version else None

return AdequacyPatchFormFields.construct(**{name: get_value(info) for name, info in FIELDS_INFO.items()})
return AdequacyPatchFormFields.model_construct(**{name: get_value(info) for name, info in FIELDS_INFO.items()})

def set_field_values(self, study: Study, field_values: AdequacyPatchFormFields) -> None:
"""
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/business/advanced_parameters_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ def get_value(field_info: FieldInfo) -> Any:
parent = seeds
return parent.get(target_name, field_info["default_value"])

return AdvancedParamsFormFields.construct(**{name: get_value(info) for name, info in FIELDS_INFO.items()})
return AdvancedParamsFormFields.model_construct(**{name: get_value(info) for name, info in FIELDS_INFO.items()})

def set_field_values(self, study: Study, field_values: AdvancedParamsFormFields) -> None:
"""
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/business/aggregator_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def __init__(
self,
study_path: Path,
output_id: str,
query_file: t.Union[MCIndAreasQueryFile, MCAllAreasQueryFile, MCIndLinksQueryFile, MCAllLinksQueryFile],
query_file: MCIndAreasQueryFile | MCAllAreasQueryFile | MCIndLinksQueryFile | MCAllLinksQueryFile,
frequency: MatrixFrequency,
ids_to_consider: t.Sequence[str],
columns_names: t.Sequence[str],
Expand Down
4 changes: 2 additions & 2 deletions antarest/study/business/allocation_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def set_allocation_form_fields(

updated_allocations = self.get_allocation_data(study, area_id)

return AllocationFormFields.construct(
return AllocationFormFields.model_construct(
allocation=[
AllocationField.construct(area_id=area, coefficient=value)
for area, value in updated_allocations.items()
Expand Down Expand Up @@ -250,4 +250,4 @@ def get_allocation_matrix(self, study: Study, all_areas: List[AreaInfoDTO]) -> A
col_idx = columns.index(prod_area)
array[row_idx][col_idx] = coefficient

return AllocationMatrix.construct(index=rows, columns=columns, data=array.tolist())
return AllocationMatrix.model_construct(index=rows, columns=columns, data=array.tolist())
2 changes: 1 addition & 1 deletion antarest/study/business/area_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@ def update_areas_props(

@staticmethod
def get_table_schema() -> JSON:
return AreaOutput.schema()
return AreaOutput.model_json_schema()

def get_all_areas(self, study: RawStudy, area_type: t.Optional[AreaType] = None) -> t.List[AreaInfoDTO]:
"""
Expand Down
4 changes: 2 additions & 2 deletions antarest/study/business/areas/hydro_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@
#
# This file is part of the Antares project.

from typing import Any, Dict, List, Optional, Union
from typing import Any, Dict, List

from pydantic import Field, model_validator
from pydantic import Field

from antarest.study.business.all_optional_meta import all_optional_model
from antarest.study.business.utils import FieldInfo, FormFieldsBaseModel, execute_or_add_commands
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/business/areas/renewable_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -427,4 +427,4 @@ def update_renewables_props(

@staticmethod
def get_table_schema() -> JSON:
return RenewableClusterOutput.schema()
return RenewableClusterOutput.model_json_schema()
4 changes: 2 additions & 2 deletions antarest/study/business/areas/st_storage_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -584,7 +584,7 @@ def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_clus
]

# Prepare and execute commands
commands: t.List[t.Union[CreateSTStorage, ReplaceMatrix]] = [create_cluster_cmd]
commands: t.List[CreateSTStorage | ReplaceMatrix] = [create_cluster_cmd]
storage_service = self.storage_service.get_storage(study)
command_context = self.storage_service.variant_study_service.command_factory.command_context
for source_path, new_path in zip(source_paths, new_paths):
Expand Down Expand Up @@ -718,4 +718,4 @@ def validate_matrix(matrix_type: STStorageTimeSeries) -> STStorageMatrix:

@staticmethod
def get_table_schema() -> JSON:
return STStorageOutput.schema()
return STStorageOutput.model_json_schema()
4 changes: 2 additions & 2 deletions antarest/study/business/areas/thermal_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ def update_thermals_props(

@staticmethod
def get_table_schema() -> JSON:
return ThermalClusterOutput.schema()
return ThermalClusterOutput.model_json_schema()

def create_cluster(self, study: Study, area_id: str, cluster_data: ThermalClusterCreation) -> ThermalClusterOutput:
"""
Expand Down Expand Up @@ -464,7 +464,7 @@ def duplicate_cluster(
new_paths.append(f"input/thermal/series/{area_id}/{lower_new_id}/fuelCost")

# Prepare and execute commands
commands: t.List[t.Union[CreateCluster, ReplaceMatrix]] = [create_cluster_cmd]
commands: t.List[CreateCluster | ReplaceMatrix] = [create_cluster_cmd]
storage_service = self.storage_service.get_storage(study)
command_context = self.storage_service.variant_study_service.command_factory.command_context
for source_path, new_path in zip(source_paths, new_paths):
Expand Down
8 changes: 4 additions & 4 deletions antarest/study/business/binding_constraint_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ class ConstraintTerm(AntaresBaseModel):
id: t.Optional[str] = None
weight: t.Optional[float] = None
offset: t.Optional[int] = None
data: t.Optional[t.Union[LinkTerm, ClusterTerm]] = None
data: t.Optional[LinkTerm | ClusterTerm] = None

@field_validator("id")
def id_to_lower(cls, v: t.Optional[str]) -> t.Optional[str]:
Expand Down Expand Up @@ -348,7 +348,7 @@ class ConstraintOutput870(ConstraintOutput830):

# WARNING: Do not change the order of the following line, it is used to determine
# the type of the output constraint in the FastAPI endpoint.
ConstraintOutput = t.Union[ConstraintOutputBase, ConstraintOutput830, ConstraintOutput870]
ConstraintOutput = ConstraintOutputBase | ConstraintOutput830 | ConstraintOutput870

OPERATOR_MATRIX_FILE_MAP = {
BindingConstraintOperator.EQUAL: ["{bc_id}_eq"],
Expand Down Expand Up @@ -1150,7 +1150,7 @@ def remove_constraint_term(

@staticmethod
def get_table_schema() -> JSON:
return ConstraintOutput870.schema()
return ConstraintOutput870.model_json_schema()


def _replace_matrices_according_to_frequency_and_version(
Expand All @@ -1177,7 +1177,7 @@ def _replace_matrices_according_to_frequency_and_version(


def check_attributes_coherence(
data: t.Union[ConstraintCreation, ConstraintInput],
data: ConstraintCreation | ConstraintInput,
study_version: StudyVersion,
operator: BindingConstraintOperator,
) -> None:
Expand Down
Loading
Loading