diff --git a/components/__init__.py b/components/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/components/infrastructure/__init__.py b/components/infrastructure/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/components/infrastructure/database/__init__.py b/components/infrastructure/database/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/components/infrastructure/database/db.py b/components/infrastructure/database/db.py deleted file mode 100644 index d5011126..00000000 --- a/components/infrastructure/database/db.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -Database initialization -""" diff --git a/components/infrastructure/database/impl/mongo/__init__.py b/components/infrastructure/database/impl/mongo/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/components/infrastructure/database/impl/mongo/db.py b/components/infrastructure/database/impl/mongo/db.py deleted file mode 100644 index 70dfe6e5..00000000 --- a/components/infrastructure/database/impl/mongo/db.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -Database initialization -""" -import os - -from beanie import init_beanie -from motor.motor_asyncio import AsyncIOMotorClient - -# from nmdc_runtime.infrastructure.database.impl.mongo.models import User -from components.workflow.workflow.core import get_beanie_documents - - -async def mongo_beanie_init(app): - """Initialize database service""" - document_models = get_beanie_documents() - app.db = AsyncIOMotorClient( - host=os.getenv("MONGO_HOST"), - username=os.getenv("MONGO_USERNAME"), - password=os.getenv("MONGO_PASSWORD"), - )[os.getenv("MONGO_DBNAME")] - await init_beanie(app.db, document_models=document_models) diff --git a/components/infrastructure/database/impl/mongo/models/__init__.py b/components/infrastructure/database/impl/mongo/models/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/components/infrastructure/database/impl/mongo/models/user.py b/components/infrastructure/database/impl/mongo/models/user.py deleted file mode 100644 index 1e418697..00000000 --- a/components/infrastructure/database/impl/mongo/models/user.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -User models -""" -from typing import Optional, List - -from beanie import Document, Indexed -from pydantic import EmailStr - - -# # User database representation -# class User(Document): -# class DocumentMeta: -# collection_name = "users" - -# username: Indexed(str, unique=True) -# email: Indexed(EmailStr, unique=True) -# full_name: Optional[str] = None -# site_admin: Optional[List[str]] = [] -# disabled: Optional[bool] = False - -# class Config: -# schema_extra = { -# "username": "bob", -# "email": "test@test.com", -# "full_name": "test", -# "password": "test", -# "site_admin": ["test_site"], -# "created_date": "1/1/2020", -# } - - -# class UserQueries(IUserQueries): -# """Implementation of the User query interface""" - -# async def create(self, user: UserAuth) -> UserOut: - -# auth_user = await User.get(user.username) -# if not auth_user: -# auth_user = User( -# username=user.username, -# email=user.email, -# full_name=user.full_name, -# site_admin=user.site_admin, -# password=user.password, -# ) -# await auth_user.insert() - -# if not verify_password(user.password, auth_user.password): -# return False -# return UserOut(auth_user) - -# async def update(self, user: UserUpdate) -> UserOut: -# pass diff --git a/components/infrastructure/database/models/__init__.py b/components/infrastructure/database/models/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/components/infrastructure/database/models/user.py b/components/infrastructure/database/models/user.py deleted file mode 100644 index 75ff350b..00000000 --- a/components/infrastructure/database/models/user.py +++ /dev/null @@ -1,10 +0,0 @@ -from __future__ import annotations - -from abc import abstractmethod -from typing import List - -# from nmdc_runtime.domain.users.userSchema import ( -# UserAuth, -# UserUpdate, -# UserOut, -# ) diff --git a/components/nmdc_runtime/workflow/__init__.py b/components/nmdc_runtime/workflow/__init__.py new file mode 100644 index 00000000..b159da6e --- /dev/null +++ b/components/nmdc_runtime/workflow/__init__.py @@ -0,0 +1,2 @@ +from .src.core import init_workflow_service +from .src.spec import WorkflowCreate, WorkflowUpdate diff --git a/components/nmdc_runtime/workflow/spec.py b/components/nmdc_runtime/workflow/spec.py new file mode 100644 index 00000000..c6471978 --- /dev/null +++ b/components/nmdc_runtime/workflow/spec.py @@ -0,0 +1,167 @@ +from typing import Literal, Optional, Union + +from pydantic import BaseModel, Field + + +class Sequencing(BaseModel): + Name: str = "Sequencing" + Enabled: bool = False + Git_repo: str = "" + Version: str = "v1.0.0" + Activity: Literal[ + "metagenome_sequencing_activity_set" + ] = "metagenome_sequencing_activity_set" + Predecessor: str = "" + Input_prefix: str = "" + Inputs: list = [] + + +class ReadQcAnalysisInputs(BaseModel): + input_files: str = "Metagenome Raw Reads" + informed_by: str = "" + resource: str = "NERSC-Cori" + proj: str = "" + + +class ReadQcAnalysis(BaseModel): + Name: str = "Read QC Analysis" + Enabled: bool = True + Git_repo: str = " https://github.com/microbiomedata/ReadsQC" + Version: str = "v1.0.6" + WDL: str = "rqcfilter.wdl" + Activity: Literal["read_qc_analysis_activity_set"] = "read_qc_analysis_activity_set" + Predecessor: str = "Sequencing" + Input_prefix: str = "nmdc_rqcfilter" + ID_type: str = "mgrc" + Inputs: ReadQcAnalysisInputs = ReadQcAnalysisInputs() + + +class MetagenomeAnnotationInputs(BaseModel): + input_file: str = "Assembly Contigs" + imgap_project_id: str = "actid" + resource: str = "NERSC-Cori" + proj: str = "" + informed_by: str = "" + git_url: str = "https://github.com/microbiomedata/mg_annotation" + + +class MetagenomeAnnotation(BaseModel): + Name: str = "Metagenome Annotation" + Enabled: bool = True + Git_repo: str = "https://github.com/microbiomedata/mg_annotation" + Version: str = "v1.0.0-beta" + WDL: str = "annotation_full.wdl" + Activity: Literal[ + "metagenome_annotation_activity_set" + ] = "metagenome_annotation_activity_set" + Predecessor: str = "MetagenomeAssembly" + Input_prefix: str = "annotation" + ID_type: str = "mgann" + Inputs: MetagenomeAnnotationInputs = MetagenomeAnnotationInputs() + + +class MetagenomeAssemblyInputs(BaseModel): + input_file: str = "Filtered Sequencing Reads" + rename_contig_prefix: str = "actid" + resource: str = "NERSC-Cori" + proj: str = "" + informed_by: str = "" + git_url: str = "https://github.com/microbiomedata/meta_assembly" + + +class MetagenomeAssembly(BaseModel): + Name: str = "Metagenome Assembly" + Enabled: bool = True + Git_repo: str = "https://github.com/microbiomedata/metaAssembly" + Version: str = "v1.0.3-beta" + WDL: str = "jgi_assembly.wdl" + Activity: Literal[ + "metagenome_assembly_activity_set" + ] = "metagenome_assembly_activity_set" + Predecessor: str = "Read QC Analysis" + Input_prefix: str = "jgi_metaASM" + ID_type: str = "mgasm" + Inputs: MetagenomeAssemblyInputs = MetagenomeAssemblyInputs() + + +class MAGsInputs(BaseModel): + input_file: str = "Assembly Contigs" + contig_file: str = "Assembly Contigs" + gff_file: str = "Functional Annotation GFF" + cath_funfam_file: str = "CATH FunFams (Functional Families) Annotation GFF" + supfam_file: str = "SUPERFam Annotation GFF" + cog_file: str = "Clusters of Orthologous Groups (COG) Annotation GFF" + pfam_file: str = "Pfam Annotation GFF" + product_names_file: str = "Product names" + tigrfam_file: str = "TIGRFam Annotation GFF" + ec_file: str = "Annotation Enzyme Commission" + ko_file: str = "Annotation KEGG Orthology" + sam_file: str = "Assembly Coverage BAM" + smart_file: str = "SMART Annotation GFF" + proteins_file: str = "Annotation Amino Acid FASTA" + gene_phylogeny_file: str = "Gene Phylogeny" + resource: str = "NERSC-Cori" + proj: str = "" + informed_by: str = "" + git_url: str = "https://github.com/microbiomedata/metaMAGs" + url_root: str = "https://data.microbiomedata.org/data/" + + +class MAGs(BaseModel): + Name: str = "MAGs" + Enabled: bool = True + Git_repo: str = "https://github.com/microbiomedata/metaMAGs" + Version: str = "v1.0.4-beta" + WDL: str = "mbin_nmdc.wdl" + Activity: Literal["mags_activity_set"] = "mags_activity_set" + Predecessor: str = "Metagenome Annotation" + Input_prefix: str = "nmdc_mags" + ID_type: str = "mgmag" + Inputs: MAGsInputs = MAGsInputs() + + +class ReadBasedAnalysisInputs(BaseModel): + input_file: str = "Filtered Sequencing Reads" + prefix: str = "actid" + resource: str = "NERSC-Cori" + proj: str = "" + informed_by: str = "" + git_url: str = "https://github.com/microbiomedata/ReadbasedAnalysis" + url_root: str = "https://data.microbiomedata.org/data/" + + +class ReadBasedAnalysis(BaseModel): + Name: str = "Readbased Analysis" + Enabled: bool = True + Git_repo: str = "https://github.com/microbiomedata/ReadbasedAnalysis" + Version: str = "v1.0.2-beta" + WDL: str = "ReadbasedAnalysis.wdl" + Activity: Literal[ + "read_based_analysis_activity_set" + ] = "read_based_analysis_activity_set" + Predecessor: str = "Read QC Analysis" + Input_prefix: str = "nmdc_rba" + ID_type: str = "mgrba" + Inputs: ReadBasedAnalysisInputs = ReadBasedAnalysisInputs() + + +class WorkflowModel(BaseModel): + workflow: Union[ + ReadQcAnalysis, + MetagenomeAssembly, + MAGs, + ReadBasedAnalysis, + Sequencing, + MetagenomeAnnotation, + ] = Field(..., discriminator="Activity") + + +def get_all_workflows(): + return [ + ReadQcAnalysis(Inputs=ReadQcAnalysisInputs()), + MetagenomeAssembly(Inputs=MetagenomeAssemblyInputs()), + MetagenomeAnnotation(Inputs=MetagenomeAnnotationInputs()), + MAGs(Inputs=MAGsInputs()), + ReadBasedAnalysis(Inputs=ReadBasedAnalysisInputs()), + Sequencing(), + ] diff --git a/components/nmdc_runtime/workflow/src/__init__.py b/components/nmdc_runtime/workflow/src/__init__.py new file mode 100644 index 00000000..94480c2b --- /dev/null +++ b/components/nmdc_runtime/workflow/src/__init__.py @@ -0,0 +1,2 @@ +from .core import init_workflow_service +from .spec import WorkflowCreate, WorkflowUpdate diff --git a/components/nmdc_runtime/workflow/src/core.py b/components/nmdc_runtime/workflow/src/core.py new file mode 100644 index 00000000..853900f5 --- /dev/null +++ b/components/nmdc_runtime/workflow/src/core.py @@ -0,0 +1,17 @@ +from .spec import ResultWithErr, WorkflowQueriesABC, WorkflowUpdate +from .store import WorkflowQueries + + +class WorkflowService: + def __init__( + self, workflow_queries: WorkflowQueriesABC = WorkflowQueries() + ) -> None: + self.__queries = workflow_queries + + async def update(self, workflow: WorkflowUpdate) -> ResultWithErr[dict]: + result = await self.__queries.workflow_update(workflow) + return {"data": result}, None + + +def init_workflow_service() -> WorkflowService: + return WorkflowService() diff --git a/components/nmdc_runtime/workflow/src/spec.py b/components/nmdc_runtime/workflow/src/spec.py new file mode 100644 index 00000000..ae05139c --- /dev/null +++ b/components/nmdc_runtime/workflow/src/spec.py @@ -0,0 +1,162 @@ +"""Module""" +from abc import ABC +from typing import Literal, Optional, Tuple, TypedDict, TypeVar + +from pydantic import BaseModel, HttpUrl + +T = TypeVar("T") + +ResultWithErr = Tuple[T, Optional[Exception]] + + +class WorkflowModelBase(BaseModel): + """ + Workflow mapping class. + + Fields: + - `name` - Workflow document name. + + - `enabled` - boolean representing whether workflow is enabled or not + + - `git_repo` - Repository where workflow wdl is kept. + + - `version` - Version of the workflow being used + + - `wdl` - wdl file for workflow + + - `activity` - name of associated workflow activities + + - `predecessor` - workflow that is executed before this one. + + - `trigger` - the triggering data object + - `input_prefix` - what prefix is used for inputs + + - `inputs` - map of input names to locations + + Inherited from: + + - Pydantic BaseModel + """ + + name: str + enabled: bool + git_repo: HttpUrl + version: str + wdl: str + activity: str + predecessor: list[str] + trigger: str + input_prefix: str + inputs: dict[str, str] + + +class WorkflowModelReadQC(WorkflowModelBase): + name: Literal["Read QC"] + activity: Literal["nmdc:ReadQCAnalysisActivity"] + trigger: Literal["Metagenome Raw Reads"] + + +class WorkflowModelMetagenomeAssembly(WorkflowModelBase): + name: Literal["Read QC"] + activity: Literal["nmdc:MetagenomeAssemblyActivity"] + trigger: Literal["Filtered Sequencing Reads"] + + +class WorkflowModelMetagenomeAnnotation(WorkflowModelBase): + name: Literal["Read QC"] + activity: Literal["nmdc:MetagenomeAnnotationActivity"] + trigger: Literal["Assembly Contigs"] + + +class WorkflowModelMAGs(WorkflowModelBase): + name: Literal["MAGs"] + activity: Literal["nmdc:MAGsActivity"] + trigger: Literal["Functional Annotation GFF"] + + +class WorkflowModelReadbasedAnalysis(WorkflowModelBase): + name: Literal["ReadbasedAnalysis"] + activity: Literal["nmdc:ReadbasedAnalysisActivity"] + trigger: Literal["Filtered Sequencing Reads"] + + +class Workflow(WorkflowModelBase): + name: Literal[ + "Read QC", + "Metagenome Assembly", + "Metagenome Annotation", + "MAGs", + "Readbased Analysis", + ] + activity: Literal[ + "nmdc:ReadQCAnalysisActivity", + "nmdc:MetagenomeAssemblyActivity", + "nmdc:MetagenomeAnnotationActivity", + "nmdc:MAGsActivity", + "nmdc:ReadbasedAnalysisActivity", + ] + trigger: Literal[ + "Metagenome Raw Reads", + "Filtered Sequencing Reads", + "Assembly Contigs", + "Functional Annotation GFF", + "Filtered Sequencing Reads", + ] + + +class WorkflowCreate(TypedDict): + name: str + enabled: bool + git_repo: HttpUrl + version: str + wdl: str + activity: str + predecessor: Optional[str] + trigger: str + input_prefix: str + inputs: dict[str, str] + + +class WorkflowUpdate(TypedDict, total=False): + name: str + enabled: bool + git_repo: HttpUrl + version: str + wdl: str + activity: str + predecessor: Optional[str] + trigger: str + input_prefix: str + inputs: dict[str, str] + + +class WorkflowQueriesABC(ABC): + """Querying functions for workflows. + + Extend this class to create and wrap your queries whether they be to a + database, file system, or any other kind of backend. + """ + + async def workflow_create(self, creation: WorkflowCreate) -> ResultWithErr[bool]: + """Add a new workflow. + + Args: + creation (WorkflowCreate): a typed dictionary containing initial data for a + new workflow + + Returns: + bool: a simple boolean indicating success or failure + """ + raise NotImplementedError + + async def workflow_update(self, update: WorkflowUpdate) -> ResultWithErr[bool]: + """Update an existing workflow. + + Args: + update (WorkflowUpdate): a typed dictionary containing the fields to be + updated + + Returns: + bool: a simple boolean indicating success or failure + """ + raise NotImplementedError diff --git a/components/nmdc_runtime/workflow/src/store.py b/components/nmdc_runtime/workflow/src/store.py new file mode 100644 index 00000000..50095c41 --- /dev/null +++ b/components/nmdc_runtime/workflow/src/store.py @@ -0,0 +1,70 @@ +from typing import TYPE_CHECKING, Optional + +from beanie import Document, Indexed, Link +from beanie.operators import Push +from pymongo import TEXT, IndexModel +from pymongo.errors import DuplicateKeyError + +from .spec import Workflow, WorkflowCreate, WorkflowQueriesABC, WorkflowUpdate + + +class WorkflowCurrent(Document, Workflow): + @classmethod + async def by_name(cls, *, name: str) -> Optional["WorkflowCurrent"]: + return await cls.find_one(cls.name == name) + + class Settings: + name = "workflow_current" + use_revision = True + indexes = [ + IndexModel([("name", TEXT)], unique=True), + ] + + +class WorkflowRevision(Document): + if TYPE_CHECKING: + name: str + else: + name: Indexed(str, unique=True) + revisions: list[Workflow] + current: Link[WorkflowCurrent] + + class Settings: + name = "workflow_revisions" + + +class WorkflowQueries: + async def workflow_create(self, creation: WorkflowCreate) -> str: + try: + workflow = WorkflowCurrent(**dict(creation)) + + revision = WorkflowRevision( + name=creation["name"], revisions=[], current=workflow + ) + + await workflow.save() + await revision.save() + + return f"{creation['name']} added to the collection" + + except DuplicateKeyError as e: + raise e + + async def workflow_update(self, update: WorkflowUpdate) -> str: + try: + workflow_old = await WorkflowCurrent.find_one( + WorkflowCurrent.name == update["name"] + ) + if not workflow_old: + return f"{update['name']} doesn't exist" + + await WorkflowRevision.find_one( + WorkflowRevision.name == workflow_old.name + ).update(Push({WorkflowRevision.revisions: workflow_old.dict()})) + + await workflow_old.set(dict(update)) + + return f"{update['name']} updated to latest revision" + + except Exception as e: + raise e diff --git a/components/nmdc_runtime/workflow_execution_activity/__init__.py b/components/nmdc_runtime/workflow_execution_activity/__init__.py new file mode 100644 index 00000000..f91be83e --- /dev/null +++ b/components/nmdc_runtime/workflow_execution_activity/__init__.py @@ -0,0 +1,7 @@ +"""Provides methods for interacting with NMDC Workflow Execution Activities. + +Workflow Execution Activies are a map of relevant data a user would like to have with +regards to job execution or instantiation within their local system. +""" +from .core import ActivityService, init_activity_service +from .spec import Database, WorkflowExecutionActivity diff --git a/components/nmdc_runtime/workflow_execution_activity/core.py b/components/nmdc_runtime/workflow_execution_activity/core.py new file mode 100644 index 00000000..f483e43f --- /dev/null +++ b/components/nmdc_runtime/workflow_execution_activity/core.py @@ -0,0 +1,121 @@ +"""Core functionality of the activity service module.""" +import functools +import json +import logging +import operator +from dataclasses import Field, fields +from typing import Any, Dict, TypedDict + +from beanie import Document +from components.nmdc_runtime.workflow.spec import (WorkflowModel, + get_all_workflows) +from nmdc_schema.nmdc import Database, DataObject, WorkflowExecutionActivity + +from .store import insert_activities + + +class ActiveActivities(TypedDict): + activities: list[WorkflowExecutionActivity] + workflow: WorkflowModel + + +flatten = lambda *n: ( + e for a in n for e in (flatten(*a) if isinstance(a, (tuple, list)) else (a,)) +) + + +def get_active_activities( + activities: Database, +) -> list[ActiveActivities]: + activity_fields: tuple[Field[Database.activity_set]] = fields(activities) + active_activities: list[ActiveActivities] = [] + for field in activity_fields: + if activities[field.name] and field.name != "data_object_set": + active_activities.append( + { + "activities": activities[field.name], + "workflow": WorkflowModel(workflow={"Activity": field.name}), + } + ) + + return active_activities + + +def add_relevant_info(workflow, activity): + workflow.Inputs.proj = activity.id + workflow.Inputs.informed_by = activity.was_informed_by + return workflow + + +def construct_job_config( + activity: WorkflowExecutionActivity, name: str +) -> WorkflowModel: + workflows = get_all_workflows() + next_workflows = list(filter(lambda wf: wf.Predecessor == name, workflows)) + relevant_info = [add_relevant_info(wf, activity) for wf in next_workflows] + return relevant_info + + +def container_job(activities, name): + jobs = [construct_job_config(activity, name) for activity in activities] + return jobs + + +def parse_data_objects(activity, data_objects: list[DataObject]): + new_activity = activity.dict() + for key in new_activity["Inputs"]: + for do in data_objects: + if new_activity["Inputs"][key] == str(do.data_object_type): + new_activity["Inputs"][key] = str(do.url) # I'm very upset about this + + return new_activity + + +class ActivityService: + """Repository for interacting with nmdc workflow execution activities.""" + + def create_jobs(self, activities, data_objects): + processed_activities = list( + flatten( + [ + container_job(aa["activities"], aa["workflow"].workflow.Name) + for aa in activities + ] + ) + ) + return [ + parse_data_objects(activity, data_objects) + for activity in processed_activities + ] + + async def add_activity_set(self, activities: Database, db): + """ + Store workflow activities. + + Parameters + ---------- + activities : Database + dictionary of fields for data object creation + + db: A database + service for interacting with data objects + + Returns + ------- + list[str] + IDs for all activities added to the collection + """ + insert_activities(activities, db) + active_activities = get_active_activities(activities) + return active_activities + + +def init_activity_service() -> ActivityService: + """ + Instantiate an activity service. + + Returns + ------- + ActivityService + """ + return ActivityService() diff --git a/components/nmdc_runtime/workflow_execution_activity/spec.py b/components/nmdc_runtime/workflow_execution_activity/spec.py new file mode 100644 index 00000000..010d57bc --- /dev/null +++ b/components/nmdc_runtime/workflow_execution_activity/spec.py @@ -0,0 +1,36 @@ +"""Beans.""" +from abc import ABC, abstractmethod +from pydantic.dataclasses import dataclass +from datetime import datetime +from typing import Literal, Type +from typing_extensions import TypedDict + +from nmdc_schema.nmdc import ( + DataObject, + DataObjectId, + WorkflowExecutionActivity, + WorkflowExecutionActivityId, + Database, +) +from pydantic import BaseModel, HttpUrl +from typing_extensions import NotRequired + + +class ActivityQueriesABC(ABC): + @abstractmethod + async def create_activity(self, activity_set: Database) -> str: + """Beans.""" + raise NotImplementedError + + @abstractmethod + async def list_by_id( + self, identifiers: list[str] + ) -> list[WorkflowExecutionActivity]: + """Beans.""" + raise NotImplementedError + + +class DataObjectQueriesABC(ABC): + @abstractmethod + async def create_data_object(self, data_object_set: Database) -> str: + raise NotImplementedError diff --git a/components/nmdc_runtime/workflow_execution_activity/src/core.py b/components/nmdc_runtime/workflow_execution_activity/src/core.py new file mode 100644 index 00000000..a49e354a --- /dev/null +++ b/components/nmdc_runtime/workflow_execution_activity/src/core.py @@ -0,0 +1,100 @@ +from typing import Any, Dict + +from beanie import Document + +from .spec import ActivitySet, DataObject, WorkflowExecutionActivity +from .store import (ActivityQueries, DataObjectInDb, DataObjectQueries, + WorkflowExecutionActivityInDb) + + +class DataObjectService: + """Service for handling nmdc data objects in nmdc runtime.""" + + def __init__( + self, data_object_queries: DataObjectQueries = DataObjectQueries() + ) -> None: + self.__queries = data_object_queries + + async def create_data_object(self, data_object: DataObject) -> str: + """ + A function to create a new workflow job. + + Parameters + ---------- + data_object : DataObject + dictionary of fields for data object creation + + Returns + ------- + str + DataObject identifier + """ + return await self.__queries.create_data_object(data_object) + + +class ActivityService: + """Repository for interacting with nmdc workflow execution activities.""" + + def __init__( + self, + activity_queries: ActivityQueries = ActivityQueries(), + ) -> None: + """ + Workflow execution activity service. + + By default this class loads its query machinery using mongodb, but if a user wants to + use a different db or otherwise they should inherit and overload the ActivityQueries + methods. + + Parameters + ---------- + activity_queries : ActivityQueries + Queries for activity set collection.""" + self.__queries = activity_queries + + async def add_activity_set( + self, + activities: ActivitySet, + data_object_service: DataObjectService = DataObjectService(), + ) -> list[str]: + """ + Store workflow activities. + + Parameters + ---------- + activities : ActivitySet + dictionary of fields for data object creation + + data_object_service : DataObjectService + service for interacting with data objects + + Returns + ------- + list[str] + IDs for all activities added to the collection + """ + _ = [ + await data_object_service.create_data_object(data_object) + for data_object in activities.data_object_set + ] + return [ + await self.__queries.create_activity(activity) + for activity in activities.activity_set + ] + + +def init_activity_service() -> ActivityService: + """ + Instantiates an activity service. + + Returns + ------- + ActivityService + """ + return ActivityService() + + +def init_beanie_documents() -> list: + """ + Returns beanie classes for mongodb.""" + return [WorkflowExecutionActivityInDb, DataObjectInDb] diff --git a/components/nmdc_runtime/workflow_execution_activity/src/spec.py b/components/nmdc_runtime/workflow_execution_activity/src/spec.py new file mode 100644 index 00000000..43af5028 --- /dev/null +++ b/components/nmdc_runtime/workflow_execution_activity/src/spec.py @@ -0,0 +1,129 @@ +"Beans" +from abc import ABC, abstractmethod +from datetime import datetime +from typing import Annotated, Literal, Optional, TypedDict, Union + +from pydantic import BaseModel, Field, HttpUrl +from typing_extensions import NotRequired + + +class WorkflowExecutionActivity(TypedDict): + """Definition of workflow execution activities for 3rd party modules.""" + + has_input: list[str] + part_of: list[str] + git_url: HttpUrl + version: str + has_output: list[str] + input_read_bases: int + was_informed_by: str + was_associated_with: NotRequired[str] + id: str + name: str + started_at_time: datetime + ended_at_time: NotRequired[datetime] + type: Literal[ + "nmdc:ReadQCAnalysisActivity", + "nmdc:MetagenomeAssemblyActivity", + "nmdc:MetagenomeAnnotationActivity", + "nmdc:MAGsActivity", + "nmdc:ReadbasedAnalysisActivity", + ] + execution_resource: str + + +class WorkflowExecutionActivityBase(BaseModel): + """Represents an instance of an execution of a particular workflow.""" + + has_input: list[str] + part_of: list[str] + git_url: HttpUrl + version: str + has_output: list[str] + input_read_bases: int + was_informed_by: str + was_associated_with: Optional[str] + id: str + name: str + started_at_time: datetime + ended_at_time: datetime + execution_resource: str + + +class ReadQCActivityModel(WorkflowExecutionActivityBase): + type: Literal["nmdc:ReadQCAnalysisActivity"] + output_read_count: int + output_read_bases: int + input_read_count: int + input_read_bases: int + + +class MetagenomeSequencingAnalysisActivityModel(WorkflowExecutionActivityBase): + type: Literal["nmdc:MetagenomeSequencingAnalysisActivity"] + + +WorkflowExecutionActivityModel = Annotated[ + Union[ReadQCActivityModel, MetagenomeSequencingAnalysisActivityModel], + Field(discriminator="type"), +] + + +class DataObject(TypedDict, total=False): + """An object that primarily consists of symbols that represent information. + + Files, records, and omics data are examples of data objects.""" + + file_size_bytes: int + md5_checksum: str + data_object_type: str + compression_type: NotRequired[str] + was_generated_by: NotRequired[str] + url: HttpUrl + type: str + name: str + description: str + id: str + + +class DataObjectModel(BaseModel): + """An object that primarily consists of symbols that represent information. + + Files, records, and omics data are examples of data objects.""" + + file_size_bytes: int + md5_checksum: str + data_object_type: str + compression_type: Optional[str] + was_generated_by: Optional[str] + url: HttpUrl + type: str + name: str + description: str + id: str + + +class ActivitySet(BaseModel): + """More thought.""" + + activity_set: list[WorkflowExecutionActivity] + data_object_set: list[DataObject] + + +class ActivityQueriesABC(ABC): + @abstractmethod + async def create_activity(self, activity: WorkflowExecutionActivity) -> str: + """Beans""" + raise NotImplementedError + + @abstractmethod + async def list_by_id( + self, identifiers: list[str] + ) -> list[WorkflowExecutionActivity]: + """Beans""" + raise NotImplementedError + + +class DataObjectQueriesABC(ABC): + @abstractmethod + async def create_data_object(self, data_object: DataObject) -> str: + raise NotImplementedError diff --git a/components/nmdc_runtime/workflow_execution_activity/src/store.py b/components/nmdc_runtime/workflow_execution_activity/src/store.py new file mode 100644 index 00000000..814af9e7 --- /dev/null +++ b/components/nmdc_runtime/workflow_execution_activity/src/store.py @@ -0,0 +1,82 @@ +from datetime import datetime +from typing import Dict, List, Literal, TypedDict, Union, cast + +from beanie import Document, Indexed, Link +from beanie.operators import In +from pydantic import ValidationError +from pymongo.errors import DuplicateKeyError + +from .spec import (ActivityQueriesABC, DataObject, DataObjectModel, + DataObjectQueriesABC, WorkflowExecutionActivity, + WorkflowExecutionActivityModel) + + +class DataObjectInDb(Document, DataObjectModel): + id: Indexed(str, unique=True) # type: ignore + md5_checksum: Indexed(str, unique=True) # type: ignore + + class Collection: + """Describe Collection""" + + name = "data_objects" + + +class WorkflowExecutionActivityInDb(Document): + activity: WorkflowExecutionActivityModel + type: Literal[ + "nmdc:ReadQCAnalysisActivity", + "nmdc:MetagenomeAssemblyActivity", + "nmdc:MetagenomeAnnotationActivity", + "nmdc:MAGsActivity", + "nmdc:ReadbasedAnalysisActivity", + ] + id: Indexed(str, unique=True) # type: ignore + data_objects: list[Link[DataObjectInDb]] + + class Collection: + name = "workflow_execution_activities" + + +class ActivityQueries(ActivityQueriesABC): + async def create_activity(self, activity: WorkflowExecutionActivity) -> str: + """Description""" + try: + new_activity: WorkflowExecutionActivityInDb = ( + WorkflowExecutionActivityInDb.parse_obj(activity) + ) + await new_activity.insert() + return new_activity.id + except DuplicateKeyError as error: + raise error + except ValidationError as error: + raise error + + async def list_by_id( + self, identifiers: list[str] + ) -> list[WorkflowExecutionActivity]: + """Beans""" + try: + activities: list[ + WorkflowExecutionActivityInDb + ] = await WorkflowExecutionActivityInDb.find( + In(WorkflowExecutionActivityInDb.id, identifiers) + ).to_list() + return [ + cast(WorkflowExecutionActivity, activity.dict()) + for activity in activities + ] + except ValidationError as error: + raise error + + +class DataObjectQueries(DataObjectQueriesABC): + async def create_data_object(self, data_object: DataObject) -> str: + """Beans""" + try: + new_object = DataObjectInDb(**data_object) + await new_object.insert() + return new_object.id + except DuplicateKeyError as error: + raise error + except ValidationError as error: + raise error diff --git a/components/nmdc_runtime/workflow_execution_activity/store.py b/components/nmdc_runtime/workflow_execution_activity/store.py new file mode 100644 index 00000000..3a571c16 --- /dev/null +++ b/components/nmdc_runtime/workflow_execution_activity/store.py @@ -0,0 +1,26 @@ +import json +from dataclasses import asdict, fields +from datetime import datetime +from typing import Dict, List, Literal, TypedDict, Union, cast + +import attrs +from motor.motor_asyncio import AsyncIOMotorDatabase +from nmdc_schema.nmdc import Database +from pydantic import ValidationError +from pymongo.database import Database as MongoDatabase +from pymongo.errors import DuplicateKeyError + + +def insert_activities(activities: Database, mdb: MongoDatabase) -> bool: + """Description.""" + activity_fields = fields(activities) + for field in activity_fields: + if activities[field.name]: + collection = mdb.get_collection(field.name) + collection.insert_many( + [ + json.loads(json.dumps(activity, default=lambda o: o.__dict__)) + for activity in activities[field.name] + ] + ) + return True diff --git a/components/workflow/LICENSE b/components/workflow/LICENSE deleted file mode 100644 index d6456956..00000000 --- a/components/workflow/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/components/workflow/__init__.py b/components/workflow/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/components/workflow/pyproject.toml b/components/workflow/pyproject.toml deleted file mode 100644 index 31dc50ad..00000000 --- a/components/workflow/pyproject.toml +++ /dev/null @@ -1,36 +0,0 @@ -[build-system] -requires = ["hatchling>=1.9.0"] -build-backend = "hatchling.build" - -[project] -name = "workflow_nmdc_runtime" -version = "0.0.1" -authors = [ - { name="Elais Player", email="elais@fastmail.com"} -] -license = { file="LICENSE" } -requires-python = ">=3.9" -classifiers = [ - "Programming Language :: Python :: 3", - "License :: OSI Approved :: Apache License", - "Operating System :: OS Independent", -] -dependencies = [ - "beanie>=1.11.6", - "pydantic>=1.9.1", - "semver>=3.0.0.dev3" -] - -[project.urls] -Homepage = "https://github.com/microbiomedata/nmdc-runtime" -Tracker = "https://github.com/microbiomedata/nmdc-runtime/issues" -Source = "https://github.com/pypa/hatch" - -[tool.mypy] -disallow_untyped_defs = false -follow_imports = "normal" -ignore_missing_imports = true -pretty = true -show_column_numbers = true -warn_no_return = false -warn_unused_ignores = true diff --git a/components/workflow/requirements.txt b/components/workflow/requirements.txt deleted file mode 100644 index 6c92e648..00000000 --- a/components/workflow/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -beanie==1.11.6 -pydantic==1.9.1 -semver==3.0.0.dev3 diff --git a/components/workflow/workflow/__init__.py b/components/workflow/workflow/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/components/workflow/workflow/core.py b/components/workflow/workflow/core.py deleted file mode 100644 index f759b0d3..00000000 --- a/components/workflow/workflow/core.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Any, Dict - -from .spec import DataObject, ReadsQCSequencingActivity -from .store import ( - DataObjectInDb, - DataObjectQueries, - ReadsQCSequencingActivityInDb, - ReadsQCSequencingActivityQueries, -) - - -class DataObjectService: - """Service for handling nmdc data objects in nmdc runtime.""" - - def __init__( - self, data_object_queries: DataObjectQueries = DataObjectQueries() - ) -> None: - self.__queries = data_object_queries - - async def create_data_object(self, data_object: Dict[str, Any]) -> Dict[str, Any]: - """A function to create a new workflow job - - :param data_object: Dict[str, Any] dictionary of fields for data object creation - - :return stuff: Dict[str, Any] stuff - """ - new_object = DataObject.parse_obj(data_object) - result = await self.__queries.create_data_object(new_object) - return result.dict() - - async def by_id(self, id: str) -> Dict[str, Any]: - result = await self.__queries.by_id(id) - return result.dict() - - -class ReadsQCSequencingActivityService: - """Service for handling nmdc metagenome activities in nmdc runtime.""" - - def __init__( - self, - activity_queries: ReadsQCSequencingActivityQueries = ReadsQCSequencingActivityQueries(), - ) -> None: - self.__queries = activity_queries - - async def create_mgs_activity(self, mgs_activity: Dict[str, Any]) -> Dict[str, Any]: - new_activity = ReadsQCSequencingActivity.parse_obj(mgs_activity) - result = await self.__queries.create_activity(new_activity) - return result.dict() - - async def by_id(self, id: str) -> Dict[str, Any]: - result = await self.__queries.by_id(id) - return result.dict() - - -def get_beanie_documents(): - return [DataObjectInDb, ReadsQCSequencingActivityInDb] - - -def get_data_object_service(): - return DataObjectService() diff --git a/components/workflow/workflow/store.py b/components/workflow/workflow/store.py deleted file mode 100644 index a85fec36..00000000 --- a/components/workflow/workflow/store.py +++ /dev/null @@ -1,70 +0,0 @@ -from datetime import datetime -from typing import Dict, List, Union - -from beanie import Document, Indexed -from pydantic import DirectoryPath, HttpUrl, ValidationError - -from .spec import ( - DataObject, - ReadsQCSequencingActivity, - IDataObjectQueries, - IReadsQCSequencingActivityQueries, -) - - -class DataObjectInDb(Document, DataObject): - id: str - - # @classmethod - # def create(cls, data_object: DataObject) -> "DataObject" : - # return await cls.in - - class Collection: - name = "data_object_set" - - -class ReadsQCSequencingActivityInDb(Document, ReadsQCSequencingActivity): - id: str - - class Collection: - name = "read_QC_analysis_activity_set" - - -class DataObjectQueries(IDataObjectQueries): - async def create_data_object(self, data_object: DataObject) -> DataObject: - try: - new_object = DataObjectInDb(**data_object.dict()) - result = await new_object.insert() - return DataObject(**result.dict()) - except ValidationError as e: - raise ValidationError from e - - async def by_id(self, id: str) -> DataObject: - try: - new_object = await DataObjectInDb.find_one(DataObjectInDb.id == id) - return DataObjectInDb - except ValidationError as e: - raise ValidationError from e - - -class ReadsQCSequencingActivityQueries(IReadsQCSequencingActivityQueries): - async def create_activity( - self, metagenome_sequencing_activity: ReadsQCSequencingActivity - ) -> ReadsQCSequencingActivity: - try: - new_activity = ReadsQCSequencingActivityInDb( - **metagenome_sequencing_activity.dict() - ) - result = await new_activity.insert() - return ReadsQCSequencingActivity(**result.dict()) - except ValidationError as e: - raise ValidationError from e - - async def by_id(self, id: str) -> ReadsQCSequencingActivity: - try: - new_activity = await ReadsQCSequencingActivityInDb.find_one( - ReadsQCSequencingActivityInDb.id == id - ) - return ReadsQCSequencingActivity(**new_activity.dict()) - except ValidationError as e: - raise ValidationError from e diff --git a/guix-manifest.scm b/guix-manifest.scm index 379f8731..19068f2b 100644 --- a/guix-manifest.scm +++ b/guix-manifest.scm @@ -6,10 +6,12 @@ (gnu packages docker) (gnu packages jupyter) (gnu packages openstack) + (gnu packages rust) (gnu packages pkg-config) (gnu packages python) (gnu packages python-build) (gnu packages python-check) + (gnu packages python-compression) (gnu packages python-xyz) (guix download) (guix build-system python) @@ -87,20 +89,168 @@ (description "This package provides a language server for Jedi!") (license license:expat))) +(define-public python-pylsp-mypy + (package + (name "python-pylsp-mypy") + (version "0.6.3") + (source (origin + (method url-fetch) + (uri (pypi-uri "pylsp-mypy" version)) + (sha256 + (base32 + "1gf865dj9na7jyp1148k27jafwb6bg0rdg9kyv4x4ag8qdlgv9h6")))) + (build-system python-build-system) + (propagated-inputs (list python-lsp-server python-mypy python-toml)) + (native-inputs (list python-coverage python-pytest python-pytest-cov + python-tox)) + (home-page "https://github.com/python-lsp/pylsp-mypy") + (synopsis "Mypy linter for the Python LSP Server") + (description "Mypy linter for the Python LSP Server") + (license #f))) + +(define-public python-pep517 + (package + (name "python-pep517") + (version "0.13.0") + (source (origin + (method url-fetch) + (uri (pypi-uri "pep517" version)) + (sha256 + (base32 + "0nczh9pfcin7rlgzgmfw3snypwscp3a2cdr0v6ny2aqpbiy94sdf")))) + (build-system python-build-system) + (propagated-inputs (list python-importlib-metadata python-tomli + python-zipp)) + (home-page "https://github.com/pypa/pep517") + (synopsis "Wrappers to build Python packages using PEP 517 hooks") + (description "Wrappers to build Python packages using PEP 517 hooks") + (license #f))) + +(define-public python-build + (package + (name "python-build") + (version "0.9.0") + (source (origin + (method url-fetch) + (uri (pypi-uri "build" version)) + (sha256 + (base32 + "0g5w28ban6k9qywqwdqiqms3crg75rsvfphl4f4qkg8wi57741qs")))) + (build-system python-build-system) + (propagated-inputs (list python-colorama python-importlib-metadata + python-packaging python-pep517 python-tomli)) + (native-inputs (list python-filelock + python-pytest + python-pytest-cov + python-pytest-mock + python-pytest-rerunfailures + python-pytest-xdist + python-setuptools + python-toml + python-wheel)) + (home-page "") + (synopsis "A simple, correct PEP 517 build frontend") + (description + "This package provides a simple, correct PEP 517 build frontend") + (license license:expat))) + +(define-public python-pylsp-rope + (package + (name "python-pylsp-rope") + (version "0.1.10") + (source (origin + (method url-fetch) + (uri (pypi-uri "pylsp-rope" version)) + (sha256 + (base32 + "1mydh5fp2yz5rayrp3q2ff4y39881wla0sx09cx66bwjbzqh8qcy")))) + (build-system python-build-system) + (propagated-inputs (list python-lsp-server python-rope + python-typing-extensions)) + (native-inputs (list python-build python-pytest python-twine)) + (home-page "https://github.com/python-rope/pylsp-rope") + (synopsis + "Extended refactoring capabilities for Python LSP Server using Rope.") + (description + "Extended refactoring capabilities for Python LSP Server using Rope.") + (license license:expat))) + +(define-public python-types-setuptools + (package + (name "python-types-setuptools") + (version "65.5.0.3") + (source (origin + (method url-fetch) + (uri (pypi-uri "types-setuptools" version)) + (sha256 + (base32 + "1z59bap6vchjcb5kcsxzgdvdlp1aal3323awn9lxr8pjymqr2xhp")))) + (build-system python-build-system) + (home-page "https://github.com/python/typeshed") + (synopsis "Typing stubs for setuptools") + (description "Typing stubs for setuptools") + (license #f))) + +(define-public python-types-pkg-resources + (package + (name "python-types-pkg-resources") + (version "0.1.3") + (source (origin + (method url-fetch) + (uri (pypi-uri "types-pkg-resources" version)) + (sha256 + (base32 + "1blxmgxrcc2g5g6vqcrpknzzc9m7b4rmv7fr5xb478xy7n6rnjl3")))) + (build-system python-build-system) + (home-page "https://github.com/python/typeshed") + (synopsis "Typing stubs for pkg_resources") + (description "Typing stubs for pkg_resources") + (license #f))) + +(define-public python-lsp-black + (package + (name "python-lsp-black") + (version "1.2.1") + (source (origin + (method url-fetch) + (uri (pypi-uri "python-lsp-black" version)) + (sha256 + (base32 + "1sfckmajwgil4sqfmkgxmrp7rkz1ybwf5br6rj16msbplfrfmsnp")))) + (build-system python-build-system) + (propagated-inputs (list python-black python-lsp-server python-toml)) + (native-inputs (list python-flake8 + python-isort + python-mypy + python-pre-commit + python-pytest + python-types-pkg-resoures + python-types-setuptools + python-types-toml)) + (home-page "https://github.com/python-lsp/python-lsp-black") + (synopsis "Black plugin for the Python LSP Server") + (description "Black plugin for the Python LSP Server") + (license #f))) + (packages->manifest (list - gnu-make - pkg-config docker-compose + gnu-make jupyter + rust + pkg-config python python-black python-coverage - python-pytest + python-flake8 python-jedi - python-jedi-language-server python-ipython - python-autopep8 - python-flake8 + python-lsp-server + python-pydantic + python-pylsp-mypy + python-pyflakes + python-pytest + python-rope + python-mccabe python-virtualenv python-yapf)) diff --git a/nmdc_runtime/api/__init__.py b/nmdc_runtime/api/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/nmdc_runtime/api/boot/object_types.py b/nmdc_runtime/api/boot/object_types.py index cb8d6e45..93d9295d 100644 --- a/nmdc_runtime/api/boot/object_types.py +++ b/nmdc_runtime/api/boot/object_types.py @@ -56,24 +56,6 @@ "name": "metadata changesheet", "description": "Specification for changes to existing metadata", }, - { - "id": "readqc-in", - "created_at": datetime(2022, 9, 27, tzinfo=timezone.utc), - "name": "metadata needed for Reads QC Workflow", - "description": "metadata, in the form of a nmdc:Database, needed for nmdc:ReadQcAnalysisActivity", - }, - { - "id": "mgasmb-in", - "created_at": datetime(2022, 9, 27, tzinfo=timezone.utc), - "name": "metadata needed for Metagenome Assembly Workflow", - "description": "metadata, in the form of a nmdc:Database, needed for nmdc:MetagenomeAssembly", - }, - { - "id": "mgasmbgen-in", - "created_at": datetime(2022, 9, 27, tzinfo=timezone.utc), - "name": "metadata needed for Metagenome Assembled Genomes", - "description": "metadata, in the form of a nmdc:Database, needed for nmdc:MetagenomeAssembledGenomes", - }, ] _raw.extend( @@ -82,7 +64,7 @@ "id": key, "created_at": datetime(2021, 9, 14, tzinfo=timezone.utc), "name": key, - "description": spec["description"], + # "description": spec["description"], } for key, spec in nmdc_jsonschema["properties"].items() if key.endswith("_set") diff --git a/nmdc_runtime/api/boot/triggers.py b/nmdc_runtime/api/boot/triggers.py index f6b16727..d1e72f9d 100644 --- a/nmdc_runtime/api/boot/triggers.py +++ b/nmdc_runtime/api/boot/triggers.py @@ -43,26 +43,6 @@ "object_type_id": "metadata-changesheet", "workflow_id": "apply-changesheet-1.0.0", }, - { - "created_at": datetime(2022, 9, 27, tzinfo=timezone.utc), - "object_type_id": "readqc-in", - "workflow_id": "mgasmb-1.0.1", - }, - { - "created_at": datetime(2022, 9, 27, tzinfo=timezone.utc), - "object_type_id": "readqc-in", - "workflow_id": "rba-1.0.1", - }, - { - "created_at": datetime(2022, 9, 27, tzinfo=timezone.utc), - "object_type_id": "mgasmb-in", - "workflow_id": "mganno-1.0.1", - }, - { - "created_at": datetime(2022, 9, 27, tzinfo=timezone.utc), - "object_type_id": "mganno-in", - "workflow_id": "mgasmbgen-1.0.1", - }, ] diff --git a/nmdc_runtime/api/main.py b/nmdc_runtime/api/main.py index 5e2cc3f3..52cc8a5b 100644 --- a/nmdc_runtime/api/main.py +++ b/nmdc_runtime/api/main.py @@ -2,7 +2,6 @@ from importlib import import_module import uvicorn -from components.infrastructure.database.impl.mongo.db import mongo_beanie_init from fastapi import APIRouter, FastAPI from fastapi.middleware.cors import CORSMiddleware from nmdc_runtime.api.core.auth import get_password_hash @@ -325,10 +324,5 @@ async def ensure_indexes(): mdb[collection_name].create_index([(spec, 1)], name=spec, background=True) -# @app.on_event("startup") -# async def init_beanie(): -# await mongo_beanie_init(app) - - if __name__ == "__main__": uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/nmdc_runtime/api/v1/__init__.py b/nmdc_runtime/api/v1/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/nmdc_runtime/api/v1/outputs.py b/nmdc_runtime/api/v1/outputs.py index 9cf7774e..f97e8637 100644 --- a/nmdc_runtime/api/v1/outputs.py +++ b/nmdc_runtime/api/v1/outputs.py @@ -1,42 +1,28 @@ -"""Beans.""" -import json -from typing import Any - -from bson import json_util -from dagster import ExecuteInProcessResult from fastapi import APIRouter, Depends, HTTPException -from nmdc_runtime.api.endpoints.util import ( - _claim_job, _request_dagster_run, permitted, - persist_content_and_get_drs_object, users_allowed) +from nmdc_runtime.api.db.mongo import get_mongo_db +from nmdc_runtime.api.endpoints.util import persist_content_and_get_drs_object from nmdc_runtime.api.models.site import Site, get_current_client_site -from nmdc_runtime.site.repository import repo, run_config_frozen__normal_env -from nmdc_runtime.util import unfreeze from pymongo import ReturnDocument from pymongo.database import Database as MongoDatabase from pymongo.errors import DuplicateKeyError from starlette import status -from toolz import merge - -from ..db.mongo import get_mongo_db -from ..models.object_type import DrsObjectWithTypes -from .models.ingest import Ingest router = APIRouter(prefix="/outputs", tags=["outputs"]) -@router.post( - "", - status_code=status.HTTP_201_CREATED, - response_model=dict[str, Any], -) -async def ingest( - ingest: Ingest, - mdb: MongoDatabase = Depends(get_mongo_db), - site: Site = Depends(get_current_client_site), -): - """Ingest activity set.""" - try: +# @router.post( +# "", +# status_code=status.HTTP_201_CREATED, +# ) +# async def ingest( +# # ingest: Ingest, +# mdb: MongoDatabase = Depends(get_mongo_db), +# # site: Site = Depends(get_current_client_site), +# ) -> bool: +# pass +# # try: +<<<<<<< HEAD if site is None: raise HTTPException(status_code=401, detail="Client site not found") input_dict = { @@ -83,3 +69,25 @@ async def ingest( except DuplicateKeyError as e: raise HTTPException(status_code=409, detail=e.details) +======= +# # if site is None: +# # raise HTTPException(status_code=401, detail="Client site not found") + +# # drs_obj_doc = persist_content_and_get_drs_object( +# # content=ingest.json(), +# # filename=None, +# # content_type="application/json", +# # description="input metadata for readqc-in wf", +# # id_ns="json-readqc-in", +# # ) + +# # doc_after = mdb.objects.find_one_and_update( +# # {"id": drs_obj_doc["id"]}, +# # {"$set": {"types": ["readqc-in"]}}, +# # return_document=ReturnDocument.AFTER, +# # ) +# # return doc_after + +# # except DuplicateKeyError as e: +# # raise HTTPException(status_code=409, detail=e.details) +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) diff --git a/nmdc_runtime/api/v1/router.py b/nmdc_runtime/api/v1/router.py index 2f4203ea..a0209e30 100644 --- a/nmdc_runtime/api/v1/router.py +++ b/nmdc_runtime/api/v1/router.py @@ -2,7 +2,8 @@ # from . import users from . import outputs +from .workflows import activities router_v1 = APIRouter(prefix="/v1", responses={404: {"description": "Not found"}}) -router_v1.include_router(outputs.router) +router_v1.include_router(activities.router) diff --git a/nmdc_runtime/site/ops.py b/nmdc_runtime/site/ops.py index aac33d79..bb3b1098 100644 --- a/nmdc_runtime/site/ops.py +++ b/nmdc_runtime/site/ops.py @@ -10,51 +10,36 @@ from zipfile import ZipFile import fastjsonschema -from bson import json_util, ObjectId -from dagster import ( - List, - String, - op, - Out, - AssetMaterialization, - AssetKey, - MetadataValue, - Output, - Failure, - RetryPolicy, - OpExecutionContext, -) +from bson import ObjectId, json_util +from dagster import (AssetKey, AssetMaterialization, Failure, List, + MetadataValue, OpExecutionContext, Out, Output, + RetryPolicy, String, op) from fastjsonschema import JsonSchemaValueException from gridfs import GridFS -from nmdc_schema.nmdc_data import get_nmdc_jsonschema_dict -from pydantic import BaseModel -from pymongo.database import Database as MongoDatabase -from starlette import status -from terminusdb_client.woqlquery import WOQLQuery as WQ -from toolz import get_in, dissoc, assoc - from nmdc_runtime.api.core.idgen import generate_one_id -from nmdc_runtime.api.core.metadata import df_from_sheet_in, _validate_changesheet -from nmdc_runtime.api.core.metadata import map_id_to_collection, get_collection_for_id -from nmdc_runtime.api.core.util import dotted_path_for, now, json_clean -from nmdc_runtime.api.models.job import JobOperationMetadata, Job +from nmdc_runtime.api.core.metadata import (_validate_changesheet, + df_from_sheet_in, + get_collection_for_id, + map_id_to_collection) +from nmdc_runtime.api.core.util import dotted_path_for, json_clean, now +from nmdc_runtime.api.models.job import Job, JobOperationMetadata from nmdc_runtime.api.models.metadata import ChangesheetIn -from nmdc_runtime.api.models.operation import ( - Operation, - ObjectPutMetadata, - UpdateOperationRequest, -) +from nmdc_runtime.api.models.operation import (ObjectPutMetadata, Operation, + UpdateOperationRequest) from nmdc_runtime.api.models.run import _add_run_complete_event from nmdc_runtime.api.models.util import ResultT from nmdc_runtime.site.drsobjects.ingest import mongo_add_docs_result_as_dict -from nmdc_runtime.site.drsobjects.registration import specialize_activity_set_docs +from nmdc_runtime.site.drsobjects.registration import \ + specialize_activity_set_docs from nmdc_runtime.site.resources import RuntimeApiSiteClient -from nmdc_runtime.site.util import run_and_log, collection_indexed_on_id -from nmdc_runtime.util import ( - put_object, - drs_object_in_for, - pluralize, -) +from nmdc_runtime.site.util import collection_indexed_on_id, run_and_log +from nmdc_runtime.util import drs_object_in_for, pluralize, put_object +from nmdc_schema.nmdc_data import get_nmdc_jsonschema_dict +from pydantic import BaseModel +from pymongo.database import Database as MongoDatabase +from starlette import status +from terminusdb_client.woqlquery import WOQLQuery as WQ +from toolz import assoc, dissoc, get_in @op diff --git a/requirements/dev.txt b/requirements/dev.txt index 92e016f1..840b8794 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -122,6 +122,7 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich +<<<<<<< HEAD coverage[toml]==7.0.4 \ --hash=sha256:0322354757b47640535daabd2d56384ff3cad2896248fc84d328c5fad4922d5c \ --hash=sha256:053cdc47cae08257051d7e934a0de4d095b60eb8a3024fa9f1b2322fa1547137 \ @@ -174,6 +175,60 @@ coverage[toml]==7.0.4 \ --hash=sha256:f3d485e6ec6e09857bf2115ece572d666b7c498377d4c70e66bb06c63ed177c2 \ --hash=sha256:f684d88eb4924ed0630cf488fd5606e334c6835594bb5fe36b50a509b10383ed \ --hash=sha256:f6c4ad409a0caf7e2e12e203348b1a9b19c514e7d078520973147bf2d3dcbc6f +======= +coverage[toml]==7.0.5 \ + --hash=sha256:051afcbd6d2ac39298d62d340f94dbb6a1f31de06dfaf6fcef7b759dd3860c45 \ + --hash=sha256:0a1890fca2962c4f1ad16551d660b46ea77291fba2cc21c024cd527b9d9c8809 \ + --hash=sha256:0ee30375b409d9a7ea0f30c50645d436b6f5dfee254edffd27e45a980ad2c7f4 \ + --hash=sha256:13250b1f0bd023e0c9f11838bdeb60214dd5b6aaf8e8d2f110c7e232a1bff83b \ + --hash=sha256:17e01dd8666c445025c29684d4aabf5a90dc6ef1ab25328aa52bedaa95b65ad7 \ + --hash=sha256:19245c249aa711d954623d94f23cc94c0fd65865661f20b7781210cb97c471c0 \ + --hash=sha256:1caed2367b32cc80a2b7f58a9f46658218a19c6cfe5bc234021966dc3daa01f0 \ + --hash=sha256:1f66862d3a41674ebd8d1a7b6f5387fe5ce353f8719040a986551a545d7d83ea \ + --hash=sha256:220e3fa77d14c8a507b2d951e463b57a1f7810a6443a26f9b7591ef39047b1b2 \ + --hash=sha256:276f4cd0001cd83b00817c8db76730938b1ee40f4993b6a905f40a7278103b3a \ + --hash=sha256:29de916ba1099ba2aab76aca101580006adfac5646de9b7c010a0f13867cba45 \ + --hash=sha256:2a7f23bbaeb2a87f90f607730b45564076d870f1fb07b9318d0c21f36871932b \ + --hash=sha256:2c407b1950b2d2ffa091f4e225ca19a66a9bd81222f27c56bd12658fc5ca1209 \ + --hash=sha256:30b5fec1d34cc932c1bc04017b538ce16bf84e239378b8f75220478645d11fca \ + --hash=sha256:3c2155943896ac78b9b0fd910fb381186d0c345911f5333ee46ac44c8f0e43ab \ + --hash=sha256:411d4ff9d041be08fdfc02adf62e89c735b9468f6d8f6427f8a14b6bb0a85095 \ + --hash=sha256:436e103950d05b7d7f55e39beeb4d5be298ca3e119e0589c0227e6d0b01ee8c7 \ + --hash=sha256:49640bda9bda35b057b0e65b7c43ba706fa2335c9a9896652aebe0fa399e80e6 \ + --hash=sha256:4a950f83fd3f9bca23b77442f3a2b2ea4ac900944d8af9993743774c4fdc57af \ + --hash=sha256:50a6adc2be8edd7ee67d1abc3cd20678987c7b9d79cd265de55941e3d0d56499 \ + --hash=sha256:52ab14b9e09ce052237dfe12d6892dd39b0401690856bcfe75d5baba4bfe2831 \ + --hash=sha256:54f7e9705e14b2c9f6abdeb127c390f679f6dbe64ba732788d3015f7f76ef637 \ + --hash=sha256:66e50680e888840c0995f2ad766e726ce71ca682e3c5f4eee82272c7671d38a2 \ + --hash=sha256:790e4433962c9f454e213b21b0fd4b42310ade9c077e8edcb5113db0818450cb \ + --hash=sha256:7a38362528a9115a4e276e65eeabf67dcfaf57698e17ae388599568a78dcb029 \ + --hash=sha256:7b05ed4b35bf6ee790832f68932baf1f00caa32283d66cc4d455c9e9d115aafc \ + --hash=sha256:7e109f1c9a3ece676597831874126555997c48f62bddbcace6ed17be3e372de8 \ + --hash=sha256:949844af60ee96a376aac1ded2a27e134b8c8d35cc006a52903fc06c24a3296f \ + --hash=sha256:95304068686545aa368b35dfda1cdfbbdbe2f6fe43de4a2e9baa8ebd71be46e2 \ + --hash=sha256:9e662e6fc4f513b79da5d10a23edd2b87685815b337b1a30cd11307a6679148d \ + --hash=sha256:a9fed35ca8c6e946e877893bbac022e8563b94404a605af1d1e6accc7eb73289 \ + --hash=sha256:b69522b168a6b64edf0c33ba53eac491c0a8f5cc94fa4337f9c6f4c8f2f5296c \ + --hash=sha256:b78729038abea6a5df0d2708dce21e82073463b2d79d10884d7d591e0f385ded \ + --hash=sha256:b8c56bec53d6e3154eaff6ea941226e7bd7cc0d99f9b3756c2520fc7a94e6d96 \ + --hash=sha256:b9727ac4f5cf2cbf87880a63870b5b9730a8ae3a4a360241a0fdaa2f71240ff0 \ + --hash=sha256:ba3027deb7abf02859aca49c865ece538aee56dcb4871b4cced23ba4d5088904 \ + --hash=sha256:be9fcf32c010da0ba40bf4ee01889d6c737658f4ddff160bd7eb9cac8f094b21 \ + --hash=sha256:c18d47f314b950dbf24a41787ced1474e01ca816011925976d90a88b27c22b89 \ + --hash=sha256:c76a3075e96b9c9ff00df8b5f7f560f5634dffd1658bafb79eb2682867e94f78 \ + --hash=sha256:cbfcba14a3225b055a28b3199c3d81cd0ab37d2353ffd7f6fd64844cebab31ad \ + --hash=sha256:d254666d29540a72d17cc0175746cfb03d5123db33e67d1020e42dae611dc196 \ + --hash=sha256:d66187792bfe56f8c18ba986a0e4ae44856b1c645336bd2c776e3386da91e1dd \ + --hash=sha256:d8d04e755934195bdc1db45ba9e040b8d20d046d04d6d77e71b3b34a8cc002d0 \ + --hash=sha256:d8f3e2e0a1d6777e58e834fd5a04657f66affa615dae61dd67c35d1568c38882 \ + --hash=sha256:e057e74e53db78122a3979f908973e171909a58ac20df05c33998d52e6d35757 \ + --hash=sha256:e4ce984133b888cc3a46867c8b4372c7dee9cee300335e2925e197bcd45b9e16 \ + --hash=sha256:ea76dbcad0b7b0deb265d8c36e0801abcddf6cc1395940a24e3595288b405ca0 \ + --hash=sha256:ecb0f73954892f98611e183f50acdc9e21a4653f294dfbe079da73c6378a6f47 \ + --hash=sha256:ef14d75d86f104f03dea66c13188487151760ef25dd6b2dbd541885185f05f40 \ + --hash=sha256:f26648e1b3b03b6022b48a9b910d0ae209e2d51f50441db5dce5b530fad6d9b1 \ + --hash=sha256:f67472c09a0c7486e27f3275f617c964d25e35727af952869dd496b9b5b7f6a3 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # -r requirements/dev.in # pytest-cov @@ -265,12 +320,45 @@ more-itertools==9.0.0 \ --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes +mypy==0.991 \ + --hash=sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d \ + --hash=sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6 \ + --hash=sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf \ + --hash=sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f \ + --hash=sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813 \ + --hash=sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33 \ + --hash=sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad \ + --hash=sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05 \ + --hash=sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297 \ + --hash=sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06 \ + --hash=sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd \ + --hash=sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243 \ + --hash=sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305 \ + --hash=sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476 \ + --hash=sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711 \ + --hash=sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70 \ + --hash=sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5 \ + --hash=sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461 \ + --hash=sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab \ + --hash=sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c \ + --hash=sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d \ + --hash=sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135 \ + --hash=sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93 \ + --hash=sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648 \ + --hash=sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a \ + --hash=sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb \ + --hash=sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3 \ + --hash=sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372 \ + --hash=sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb \ + --hash=sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef + # via -r requirements/dev.in mypy-extensions==0.4.3 \ --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 # via # -c requirements/main.txt # black + # mypy packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 @@ -391,6 +479,7 @@ tomli==2.0.1 \ # -c requirements/main.txt # black # coverage + # mypy # pytest # setuptools-scm twine==4.0.2 \ @@ -403,10 +492,17 @@ typing-extensions==4.4.0 \ # via # -c requirements/main.txt # black + # mypy # setuptools-scm +<<<<<<< HEAD urllib3==1.26.13 \ --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \ --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8 +======= +urllib3==1.26.14 \ + --hash=sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72 \ + --hash=sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # -c requirements/main.txt # requests diff --git a/requirements/main.in b/requirements/main.in index afe440c0..fc71d587 100644 --- a/requirements/main.in +++ b/requirements/main.in @@ -1,6 +1,10 @@ base32-lib beautifulsoup4 +<<<<<<< HEAD beanie>=1.11.0 +======= +beanie>=1.16.0 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) boto3 click dagit @@ -24,12 +28,16 @@ mkdocs-jupyter mkdocs-material mkdocs-mermaid2-plugin motor -nmdc-schema==3.2.0 +nmdc-schema>=7.1.0 openpyxl pandas passlib[bcrypt] pymongo +<<<<<<< HEAD pydantic[email]>=1.9.0 +======= +pydantic[email]>=1.10.0 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) python-jose[cryptography] python-multipart pyyaml diff --git a/requirements/main.txt b/requirements/main.txt index 9d9642d3..6da3a07c 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -4,13 +4,19 @@ # # pip-compile --generate-hashes --output-file=requirements/main.txt requirements/main.in # -alabaster==0.7.12 \ - --hash=sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359 \ - --hash=sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02 +alabaster==0.7.13 \ + --hash=sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3 \ + --hash=sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2 # via sphinx +<<<<<<< HEAD alembic==1.9.1 \ --hash=sha256:a9781ed0979a20341c2cbb56bd22bd8db4fc1913f955e705444bd3a97c59fa32 \ --hash=sha256:f9f76e41061f5ebe27d4fe92600df9dd612521a7683f904dab328ba02cffa5a2 +======= +alembic==1.9.2 \ + --hash=sha256:6880dec4f28dd7bd999d2ed13fbe7c9d4337700a44d11a524c0ce0c59aaf0dbd \ + --hash=sha256:e8a6ff9f3b1887e1fed68bfb8fb9a000d8f61c21bdcc85b67bb9f87fcbc4fce3 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via dagster aniso8601==9.0.1 \ --hash=sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f \ @@ -147,6 +153,7 @@ bleach==5.0.1 \ --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c # via nbconvert +<<<<<<< HEAD boto3==1.26.46 \ --hash=sha256:96055651f7be882175aa334ad46528e1ad79fb8ca33fa9c3998cc1d985b34eab \ --hash=sha256:e24d65c31780c208768ebcd152d8a0181591c9c8e7d971e23f318d7f41910ba1 @@ -154,6 +161,15 @@ boto3==1.26.46 \ botocore==1.29.46 \ --hash=sha256:78bf25933e35eb6354a9e80fe156f86dce4d346a92afe364dfce25c17ab0639f \ --hash=sha256:dbac2fde265f13beb9191ec3ff63b90b515e9ed63875edc3afbd72c5f585e48b +======= +boto3==1.26.51 \ + --hash=sha256:b4aefdc72191c40a0155511b9ce933c94dcbdd1834ffc1204e90a30e7849ef13 \ + --hash=sha256:d599ce626b03e7236b0cda051c3cedc128fd75e0ec2f799fab9b2eabdf32d945 + # via -r requirements/main.in +botocore==1.29.51 \ + --hash=sha256:bbb92420902b4d9e4b854fcfae20d1029f1c3396e0579894f115278bc51d6198 \ + --hash=sha256:f2f521fbd2343879f3c2d42392c88f1e7f15ea147a6dc5a3dab7b8686d90fcb6 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # boto3 # s3transfer @@ -238,9 +254,95 @@ chardet==5.1.0 \ # via # pyshex # pyshexc -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f +charset-normalizer==3.0.1 \ + --hash=sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b \ + --hash=sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42 \ + --hash=sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d \ + --hash=sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b \ + --hash=sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a \ + --hash=sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59 \ + --hash=sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154 \ + --hash=sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1 \ + --hash=sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c \ + --hash=sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a \ + --hash=sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d \ + --hash=sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6 \ + --hash=sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b \ + --hash=sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b \ + --hash=sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783 \ + --hash=sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5 \ + --hash=sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918 \ + --hash=sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555 \ + --hash=sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639 \ + --hash=sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786 \ + --hash=sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e \ + --hash=sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed \ + --hash=sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820 \ + --hash=sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8 \ + --hash=sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3 \ + --hash=sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541 \ + --hash=sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14 \ + --hash=sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be \ + --hash=sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e \ + --hash=sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76 \ + --hash=sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b \ + --hash=sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c \ + --hash=sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b \ + --hash=sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3 \ + --hash=sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc \ + --hash=sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6 \ + --hash=sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59 \ + --hash=sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4 \ + --hash=sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d \ + --hash=sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d \ + --hash=sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3 \ + --hash=sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a \ + --hash=sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea \ + --hash=sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6 \ + --hash=sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e \ + --hash=sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603 \ + --hash=sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24 \ + --hash=sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a \ + --hash=sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58 \ + --hash=sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678 \ + --hash=sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a \ + --hash=sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c \ + --hash=sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6 \ + --hash=sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18 \ + --hash=sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174 \ + --hash=sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317 \ + --hash=sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f \ + --hash=sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc \ + --hash=sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837 \ + --hash=sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41 \ + --hash=sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c \ + --hash=sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579 \ + --hash=sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753 \ + --hash=sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8 \ + --hash=sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291 \ + --hash=sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087 \ + --hash=sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866 \ + --hash=sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3 \ + --hash=sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d \ + --hash=sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1 \ + --hash=sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca \ + --hash=sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e \ + --hash=sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db \ + --hash=sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72 \ + --hash=sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d \ + --hash=sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc \ + --hash=sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539 \ + --hash=sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d \ + --hash=sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af \ + --hash=sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b \ + --hash=sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602 \ + --hash=sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f \ + --hash=sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478 \ + --hash=sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c \ + --hash=sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e \ + --hash=sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479 \ + --hash=sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7 \ + --hash=sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8 # via requests click==8.1.3 \ --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \ @@ -300,6 +402,7 @@ cryptography==39.0.0 \ --hash=sha256:f964c7dcf7802d133e8dbd1565914fa0194f9d683d82411989889ecd701e8adf \ --hash=sha256:fec8b932f51ae245121c4671b4bbc030880f363354b2f0e0bd1366017d891458 # via python-jose +<<<<<<< HEAD curies==0.4.1 \ --hash=sha256:074d8243a26b44cb23f45b6939b4d1874004798913dcf4689261c7fecffe5031 \ --hash=sha256:1790408f24ea53cd0d9003e0b1cd6424e41fd8cc8fc4a66e87d7cd6cda175df7 @@ -311,11 +414,25 @@ dagit==1.1.9 \ dagster==1.1.9 \ --hash=sha256:73a5f1c3f6d6ba154757cfca4245fdd7b1992ad32cccbad4273f8cd9371f02f3 \ --hash=sha256:cb9d106ec541b3b09fa532c3d095399a3ad989f9a6179b585d6daf1561e106a5 +======= +curies==0.4.2 \ + --hash=sha256:91d6993a59270c3c280d3689e26001eb7c098d319f9a9f102c060943fa2bc44b \ + --hash=sha256:bdf862d9320b3f0c1d4a81532b07457da5370aa72bc6189099480ac0db80a6f7 + # via linkml-runtime +dagit==1.1.10 \ + --hash=sha256:283f6f9c25b07acb072f4962d4cb3bdeda23d9a178b98e115b8b9399ef1ea9dd \ + --hash=sha256:71b755d2ea214222266e95b7dafb5c8c07c933ca1a4d3ea16ab975aefcdbd7c1 + # via -r requirements/main.in +dagster==1.1.10 \ + --hash=sha256:17d647432548b6eb78c12c0d0d9efaf3c3f2694293488b80670c1ad1233c3b20 \ + --hash=sha256:8a20d6da0adba8b947fc07d62d84e468252b8137016dea60bebae4024760c056 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # -r requirements/main.in # dagit # dagster-graphql # dagster-postgres +<<<<<<< HEAD dagster-graphql==1.1.9 \ --hash=sha256:114adadc0862f81950a2229fea7c37d952c8fe628ea24b28b216be501643036f \ --hash=sha256:91db616593cdf834e54430112869e0029f6227cad0829ee97d75d85f59de30d4 @@ -325,6 +442,17 @@ dagster-graphql==1.1.9 \ dagster-postgres==0.17.9 \ --hash=sha256:b4abf757a7dd7c9d8caebd02fa2c37d8be66bed03ef7f034178b4de7f489d911 \ --hash=sha256:db63995b3ff1395c624e2b708dd6f57f3d7f01b6772bee8434af175ebd113d9e +======= +dagster-graphql==1.1.10 \ + --hash=sha256:3d58430e4ecb47fc8ccf73da8cdc665d376b8c442d4ac04a53b2e3b1ab6c807b \ + --hash=sha256:a0cdd869deb1271ae7e01ef879e960364a93b6dc48f5bccc48bb66f414ee2106 + # via + # -r requirements/main.in + # dagit +dagster-postgres==0.17.10 \ + --hash=sha256:4565baadc43c23a2de956a27b119f56b994884fbc2c9e27c856e873b0485c197 \ + --hash=sha256:c1f1221dc42feadfb7f41b36e1770b0ae35f88eb0eaba8737dd8bde927a74f9c +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via -r requirements/main.in debugpy==1.6.5 \ --hash=sha256:048368f121c08b00bbded161e8583817af5055982d2722450a69efe2051621c2 \ @@ -436,9 +564,15 @@ distlib==0.3.6 \ --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e # via virtualenv +<<<<<<< HEAD dnspython==2.2.1 \ --hash=sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e \ --hash=sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f +======= +dnspython==2.3.0 \ + --hash=sha256:224e32b03eb46be70e12ef6d64e0be123a64e621ab4c0822ff6d450d52a540b9 \ + --hash=sha256:89141536394f909066cabd112e3e1a37e4e654db00a25308b0f130bc3152eb46 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # email-validator # pymongo @@ -766,9 +900,15 @@ iniconfig==2.0.0 \ --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest +<<<<<<< HEAD ipykernel==6.20.1 \ --hash=sha256:a314e6782a4f9e277783382976b3a93608a3787cd70a235b558b47f875134be1 \ --hash=sha256:f6016ecbf581d0ea6e29ba16cee6cc1a9bbde3835900c46c6571a791692f4139 +======= +ipykernel==6.20.2 \ + --hash=sha256:1893c5b847033cd7a58f6843b04a9349ffb1031bc6588401cadc9adb58da428e \ + --hash=sha256:5d0675d5f48bf6a95fd517d7b70bcb3b2c5631b2069949b5c2d6e1d7477fb5a0 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # ipywidgets # jupyter @@ -947,9 +1087,15 @@ jupyter==1.0.0 \ --hash=sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78 \ --hash=sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f # via -r requirements/main.in +<<<<<<< HEAD jupyter-client==7.4.8 \ --hash=sha256:109a3c33b62a9cf65aa8325850a0999a795fac155d9de4f7555aef5f310ee35a \ --hash=sha256:d4a67ae86ee014bcb96bd8190714f6af921f2b0f52f4208b086aa5acfd9f8d65 +======= +jupyter-client==7.4.9 \ + --hash=sha256:214668aaea208195f4c13d28eb272ba79f945fc0cf3f11c7092c20b2ca1980e7 \ + --hash=sha256:52be28e04171f07aed8f20e1616a5a552ab9fee9cbbe6c1896ae170c3880d392 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # ipykernel # jupyter-console @@ -975,6 +1121,7 @@ jupyter-core==5.1.3 \ # nbformat # notebook # qtconsole +<<<<<<< HEAD jupyter-events==0.6.0 \ --hash=sha256:587f3055fe965f023b23b9929b22d2070e8b5c79ef0a42e37bdb12199862f63c \ --hash=sha256:bee793d06e124c5a80da3346f96f17aec5e0f28b632a514682b2a18ff548c69a @@ -982,6 +1129,15 @@ jupyter-events==0.6.0 \ jupyter-server==2.0.6 \ --hash=sha256:6a4c9a3f9fa8679015954586944a568b911a98d7480ae1d56ff55a6a4f055254 \ --hash=sha256:8dd75992e90b7ca556794a1ed5cca51263c697abc6d0df561af574aa1c0a033f +======= +jupyter-events==0.6.3 \ + --hash=sha256:57a2749f87ba387cd1bfd9b22a0875b889237dbf2edc2121ebb22bde47036c17 \ + --hash=sha256:9a6e9995f75d1b7146b436ea24d696ce3a35bfa8bfe45e0c33c334c79464d0b3 + # via jupyter-server +jupyter-server==2.1.0 \ + --hash=sha256:90cd6f2bd0581ddd9b2dbe82026a0f4c228a1d95c86e22460efbfdfc931fcf56 \ + --hash=sha256:efaae5e4f0d5f22c7f2f2dc848635036ee74a2df02abed52d30d9d95121ad382 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # jupyterlab # jupyterlab-server @@ -999,9 +1155,15 @@ jupyterlab-pygments==0.2.2 \ --hash=sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f \ --hash=sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d # via nbconvert +<<<<<<< HEAD jupyterlab-server==2.18.0 \ --hash=sha256:2ce377afe6c5f762e933de1d942cad1ec07a1fbace4b586cd7a905fd57892695 \ --hash=sha256:7830f085debc9417a72ebf482dc5cb477d6bf76884826c73182fa457c7829df4 +======= +jupyterlab-server==2.19.0 \ + --hash=sha256:51f6922e34f9f3db875051f4f7b57539a04ddd030f42d9ce6062dedf67bf7f2f \ + --hash=sha256:9aec21a2183bbedd9f91a86628355449575f1862d88b28ad5f905019d31e6c21 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via jupyterlab jupyterlab-widgets==3.0.5 \ --hash=sha256:a04a42e50231b355b7087e16a818f541e53589f7647144ea0344c4bf16f300e5 \ @@ -1047,19 +1209,32 @@ libcst==0.4.9 \ --hash=sha256:f6ce794483d4c605ef0f5b199a49fb6996f9586ca938b7bfef213bd13858d7ab \ --hash=sha256:f9679177391ccb9b0cdde3185c22bf366cb672457c4b7f4031fcb3b5e739fbd6 # via shed +<<<<<<< HEAD linkml==1.4.1 \ --hash=sha256:071488b462f4b71d8b0296d3517fae2edf7e0d3041fb1a2ab5d5debf206e01f8 \ --hash=sha256:b936126fbf3942ff138ddb20d25a45ddaf024d00072e8ee14057f041e1f3ccd4 # via # -r requirements/main.in # nmdc-schema +======= +linkml==1.4.2 \ + --hash=sha256:4fd809a56b34b3489ae9180bc83f74a66dcac26fd00eebf294b10a8ad2527086 \ + --hash=sha256:f4eefa657c13ddb279402db9f7660acafdf2d021c17ae2fa3c36b274832dfe21 + # via -r requirements/main.in +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) linkml-dataops==0.1.0 \ --hash=sha256:193cf7f659e5f07946d2c2761896910d5f7151d91282543b1363801f68307f4c \ --hash=sha256:4550eab65e78b70dc3b9c651724a94ac2b1d1edb2fbe576465f1d6951a54ed04 # via linkml +<<<<<<< HEAD linkml-runtime==1.4.1 \ --hash=sha256:1c5eedcfcf0de4d3f0527ad256f230b872ab117b4391a9057246a045d180e4a6 \ --hash=sha256:ad106bc1aaf41debdb247d0d89b41d8bb2ed71feeb2ab7d326772177cbfc2974 +======= +linkml-runtime==1.4.2 \ + --hash=sha256:40d0a6deaafcb90599f426aa7e3099c5379fe49f125c3868e19536d10549163c \ + --hash=sha256:d21fa31b0e6b0610e877d15c266a776f8ca31b6420810a44a05f73228ba3e6d2 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # -r requirements/main.in # linkml @@ -1162,47 +1337,57 @@ markdown-it-py==2.1.0 \ # jupytext # mdit-py-plugins # myst-parser -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 +markupsafe==2.1.2 \ + --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \ + --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \ + --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \ + --hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \ + --hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \ + --hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \ + --hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \ + --hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \ + --hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \ + --hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \ + --hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \ + --hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \ + --hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \ + --hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \ + --hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \ + --hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \ + --hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \ + --hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \ + --hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \ + --hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \ + --hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \ + --hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \ + --hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \ + --hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \ + --hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \ + --hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \ + --hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \ + --hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \ + --hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \ + --hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \ + --hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \ + --hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \ + --hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \ + --hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \ + --hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \ + --hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \ + --hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \ + --hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \ + --hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \ + --hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \ + --hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \ + --hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \ + --hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \ + --hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \ + --hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \ + --hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \ + --hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \ + --hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \ + --hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \ + --hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58 # via # jinja2 # mako @@ -1367,9 +1552,15 @@ nbconvert==6.5.4 \ # mkdocs-jupyter # nbclassic # notebook +<<<<<<< HEAD nbformat==5.7.1 \ --hash=sha256:3810a0130453ed031970521d20989b8a592f3c2e73283a8280ae34ae1f75b3f8 \ --hash=sha256:e52ab802ce7f7a2863861e914642f021b9d7c23ad9726d14c36df92a79acd754 +======= +nbformat==5.7.3 \ + --hash=sha256:22a98a6516ca216002b0a34591af5bcb8072ca6c63910baffc901cfa07fefbf0 \ + --hash=sha256:4b021fca24d3a747bf4e626694033d792d594705829e5e35b14ee3369f9f6477 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # jupyter-server # jupytext @@ -1385,9 +1576,9 @@ nest-asyncio==1.5.6 \ # jupyter-client # nbclassic # notebook -nmdc-schema==3.2.0 \ - --hash=sha256:f037fa2c334617296166db8e7a4bbc88434008ff1d5e7a13613f39a0c3664615 \ - --hash=sha256:f8cca315031de4949cba1af722497a3669495123a051ca305acde7fdf894c3b9 +nmdc-schema==7.1.6 \ + --hash=sha256:34c1164c69a761adaea52623e022132a1142e02e40da5d3ef3bc5eb0cd5c5642 \ + --hash=sha256:922c4ca1f229b459f8b1bc6c6f143d960558119eb3e43902a343f89f6a651295 # via -r requirements/main.in notebook==6.5.2 \ --hash=sha256:c1897e5317e225fc78b45549a6ab4b668e4c996fd03a04e938fe5e7af2bfffd0 \ @@ -1433,16 +1624,15 @@ numpydoc==1.5.0 \ --hash=sha256:b0db7b75a32367a0e25c23b397842c65e344a1206524d16c8069f0a1c91b5f4c \ --hash=sha256:c997759fb6fc32662801cece76491eedbc0ec619b514932ffd2b270ae89c07f9 # via terminusdb-client -openpyxl==3.0.7 \ - --hash=sha256:46af4eaf201a89b610fcca177eed957635f88770a5462fb6aae4a2a52b0ff516 \ - --hash=sha256:6456a3b472e1ef0facb1129f3c6ef00713cebf62e736cd7a75bcc3247432f251 +openpyxl==3.0.10 \ + --hash=sha256:0ab6d25d01799f97a9464630abacbb34aafecdcaa0ef3cba6d6b3499867d0355 \ + --hash=sha256:e47805627aebcf860edb4edf7987b1309c1b3632f3750538ed962bbcc3bd7449 # via # -r requirements/main.in # linkml - # nmdc-schema -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +packaging==23.0 \ + --hash=sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2 \ + --hash=sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97 # via # dagster # ipykernel @@ -1484,9 +1674,12 @@ pandas==1.5.2 \ --hash=sha256:e9dbacd22555c2d47f262ef96bb4e30880e5956169741400af8b306bbb24a273 \ --hash=sha256:f6257b314fc14958f8122779e5a1557517b0f8e500cfb2bd53fa1f75a8ad0af2 # via -r requirements/main.in +<<<<<<< HEAD pandoc==2.3 \ --hash=sha256:e772c2c6d871146894579828dbaf1efd538eb64fc7e71d4a6b3a11a18baef90d # via nmdc-schema +======= +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) pandocfilters==1.5.0 \ --hash=sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38 \ --hash=sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f @@ -1550,6 +1743,7 @@ pluggy==1.0.0 \ # via # pytest # tox +<<<<<<< HEAD plumbum==1.8.1 \ --hash=sha256:07cf5f50bf739e91fb83ce304fc66b41dbd12db4d4546ff5266087dd9d148314 \ --hash=sha256:88a40fc69247d0cd585e21ca169b3820f46c484535102e16455d2202727bb37b @@ -1560,6 +1754,12 @@ ply==3.11 \ # via # jsonpath-ng # pandoc +======= +ply==3.11 \ + --hash=sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3 \ + --hash=sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce + # via jsonpath-ng +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) prefixcommons==0.1.12 \ --hash=sha256:16dbc0a1f775e003c724f19a694fcfa3174608f5c8b0e893d494cf8098ac7f8b \ --hash=sha256:22c4e2d37b63487b3ab48f0495b70f14564cb346a15220f23919eb0c1851f69f @@ -1790,9 +1990,15 @@ pyjsg==0.11.10 \ # linkml # pyshexc # shexjsg +<<<<<<< HEAD pymdown-extensions==9.9 \ --hash=sha256:0f8fb7b74a37a61cc34e90b2c91865458b713ec774894ffad64353a5fce85cfc \ --hash=sha256:ac698c15265680db5eb13cd4342abfcde2079ac01e5486028f47a1b41547b859 +======= +pymdown-extensions==9.9.1 \ + --hash=sha256:8a8973933ab45b6fe8f5f8da1de25766356b1f91dee107bf4a34efd158dc340b \ + --hash=sha256:abed29926960bbb3b40f5ed5fa6375e29724d4e3cb86ced7c2bbd37ead1afeea +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # mkdocs-material # mkdocs-mermaid2-plugin @@ -1877,9 +2083,13 @@ pymongo==4.3.3 \ pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc +<<<<<<< HEAD # via # packaging # rdflib +======= + # via rdflib +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) pyrsistent==0.19.3 \ --hash=sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8 \ --hash=sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440 \ @@ -1919,9 +2129,15 @@ pyshexc==0.9.1 \ # via # linkml # pyshex +<<<<<<< HEAD pytest==7.2.0 \ --hash=sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71 \ --hash=sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59 +======= +pytest==7.2.1 \ + --hash=sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5 \ + --hash=sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via pytest-logging pytest-logging==2015.11.4 \ --hash=sha256:cec5c85ecf18aab7b2ead5498a31b9f758680ef5a902b9054ab3f2bdbb77c896 @@ -1959,9 +2175,15 @@ python-multipart==0.0.5 \ pytrie==0.4.0 \ --hash=sha256:8f4488f402d3465993fb6b6efa09866849ed8cda7903b50647b7d0342b805379 # via curies +<<<<<<< HEAD pytz==2022.7 \ --hash=sha256:7ccfae7b4b2c067464a6733c6261673fdb8fd1be905460396b97a073e9fa683a \ --hash=sha256:93007def75ae22f7cd991c84e02d434876818661f8df9ad5df9e950ff4e52cfd +======= +pytz==2022.7.1 \ + --hash=sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0 \ + --hash=sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # babel # dagster @@ -2017,7 +2239,6 @@ pyyaml==6.0 \ --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5 # via # -r requirements/main.in - # dagit # dagster # json-flattener # jupyter-events @@ -2036,81 +2257,84 @@ pyyaml-env-tag==0.1 \ --hash=sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb \ --hash=sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069 # via mkdocs -pyzmq==24.0.1 \ - --hash=sha256:0108358dab8c6b27ff6b985c2af4b12665c1bc659648284153ee501000f5c107 \ - --hash=sha256:07bec1a1b22dacf718f2c0e71b49600bb6a31a88f06527dfd0b5aababe3fa3f7 \ - --hash=sha256:0e8f482c44ccb5884bf3f638f29bea0f8dc68c97e38b2061769c4cb697f6140d \ - --hash=sha256:0ec91f1bad66f3ee8c6deb65fa1fe418e8ad803efedd69c35f3b5502f43bd1dc \ - --hash=sha256:0f14cffd32e9c4c73da66db97853a6aeceaac34acdc0fae9e5bbc9370281864c \ - --hash=sha256:15975747462ec49fdc863af906bab87c43b2491403ab37a6d88410635786b0f4 \ - --hash=sha256:1724117bae69e091309ffb8255412c4651d3f6355560d9af312d547f6c5bc8b8 \ - --hash=sha256:1a7c280185c4da99e0cc06c63bdf91f5b0b71deb70d8717f0ab870a43e376db8 \ - --hash=sha256:1b7928bb7580736ffac5baf814097be342ba08d3cfdfb48e52773ec959572287 \ - --hash=sha256:2032d9cb994ce3b4cba2b8dfae08c7e25bc14ba484c770d4d3be33c27de8c45b \ - --hash=sha256:20e7eeb1166087db636c06cae04a1ef59298627f56fb17da10528ab52a14c87f \ - --hash=sha256:216f5d7dbb67166759e59b0479bca82b8acf9bed6015b526b8eb10143fb08e77 \ - --hash=sha256:28b119ba97129d3001673a697b7cce47fe6de1f7255d104c2f01108a5179a066 \ - --hash=sha256:3104f4b084ad5d9c0cb87445cc8cfd96bba710bef4a66c2674910127044df209 \ - --hash=sha256:3e6192dbcefaaa52ed81be88525a54a445f4b4fe2fffcae7fe40ebb58bd06bfd \ - --hash=sha256:42d4f97b9795a7aafa152a36fe2ad44549b83a743fd3e77011136def512e6c2a \ - --hash=sha256:44e706bac34e9f50779cb8c39f10b53a4d15aebb97235643d3112ac20bd577b4 \ - --hash=sha256:47b11a729d61a47df56346283a4a800fa379ae6a85870d5a2e1e4956c828eedc \ - --hash=sha256:4854f9edc5208f63f0841c0c667260ae8d6846cfa233c479e29fdc85d42ebd58 \ - --hash=sha256:48f721f070726cd2a6e44f3c33f8ee4b24188e4b816e6dd8ba542c8c3bb5b246 \ - --hash=sha256:52afb0ac962963fff30cf1be775bc51ae083ef4c1e354266ab20e5382057dd62 \ - --hash=sha256:54d8b9c5e288362ec8595c1d98666d36f2070fd0c2f76e2b3c60fbad9bd76227 \ - --hash=sha256:5bd3d7dfd9cd058eb68d9a905dec854f86649f64d4ddf21f3ec289341386c44b \ - --hash=sha256:613010b5d17906c4367609e6f52e9a2595e35d5cc27d36ff3f1b6fa6e954d944 \ - --hash=sha256:624321120f7e60336be8ec74a172ae7fba5c3ed5bf787cc85f7e9986c9e0ebc2 \ - --hash=sha256:65c94410b5a8355cfcf12fd600a313efee46ce96a09e911ea92cf2acf6708804 \ - --hash=sha256:6640f83df0ae4ae1104d4c62b77e9ef39be85ebe53f636388707d532bee2b7b8 \ - --hash=sha256:687700f8371643916a1d2c61f3fdaa630407dd205c38afff936545d7b7466066 \ - --hash=sha256:77c2713faf25a953c69cf0f723d1b7dd83827b0834e6c41e3fb3bbc6765914a1 \ - --hash=sha256:78068e8678ca023594e4a0ab558905c1033b2d3e806a0ad9e3094e231e115a33 \ - --hash=sha256:7a23ccc1083c260fa9685c93e3b170baba45aeed4b524deb3f426b0c40c11639 \ - --hash=sha256:7abddb2bd5489d30ffeb4b93a428130886c171b4d355ccd226e83254fcb6b9ef \ - --hash=sha256:80093b595921eed1a2cead546a683b9e2ae7f4a4592bb2ab22f70d30174f003a \ - --hash=sha256:8242543c522d84d033fe79be04cb559b80d7eb98ad81b137ff7e0a9020f00ace \ - --hash=sha256:838812c65ed5f7c2bd11f7b098d2e5d01685a3f6d1f82849423b570bae698c00 \ - --hash=sha256:83ea1a398f192957cb986d9206ce229efe0ee75e3c6635baff53ddf39bd718d5 \ - --hash=sha256:8421aa8c9b45ea608c205db9e1c0c855c7e54d0e9c2c2f337ce024f6843cab3b \ - --hash=sha256:858375573c9225cc8e5b49bfac846a77b696b8d5e815711b8d4ba3141e6e8879 \ - --hash=sha256:86de64468cad9c6d269f32a6390e210ca5ada568c7a55de8e681ca3b897bb340 \ - --hash=sha256:87f7ac99b15270db8d53f28c3c7b968612993a90a5cf359da354efe96f5372b4 \ - --hash=sha256:8bad8210ad4df68c44ff3685cca3cda448ee46e20d13edcff8909eba6ec01ca4 \ - --hash=sha256:8bb4af15f305056e95ca1bd086239b9ebc6ad55e9f49076d27d80027f72752f6 \ - --hash=sha256:8c78bfe20d4c890cb5580a3b9290f700c570e167d4cdcc55feec07030297a5e3 \ - --hash=sha256:8f3f3154fde2b1ff3aa7b4f9326347ebc89c8ef425ca1db8f665175e6d3bd42f \ - --hash=sha256:94010bd61bc168c103a5b3b0f56ed3b616688192db7cd5b1d626e49f28ff51b3 \ - --hash=sha256:941fab0073f0a54dc33d1a0460cb04e0d85893cb0c5e1476c785000f8b359409 \ - --hash=sha256:9dca7c3956b03b7663fac4d150f5e6d4f6f38b2462c1e9afd83bcf7019f17913 \ - --hash=sha256:a180dbd5ea5d47c2d3b716d5c19cc3fb162d1c8db93b21a1295d69585bfddac1 \ - --hash=sha256:a2712aee7b3834ace51738c15d9ee152cc5a98dc7d57dd93300461b792ab7b43 \ - --hash=sha256:a435ef8a3bd95c8a2d316d6e0ff70d0db524f6037411652803e118871d703333 \ - --hash=sha256:abb756147314430bee5d10919b8493c0ccb109ddb7f5dfd2fcd7441266a25b75 \ - --hash=sha256:abe6eb10122f0d746a0d510c2039ae8edb27bc9af29f6d1b05a66cc2401353ff \ - --hash=sha256:acbd0a6d61cc954b9f535daaa9ec26b0a60a0d4353c5f7c1438ebc88a359a47e \ - --hash=sha256:ae08ac90aa8fa14caafc7a6251bd218bf6dac518b7bff09caaa5e781119ba3f2 \ - --hash=sha256:ae61446166983c663cee42c852ed63899e43e484abf080089f771df4b9d272ef \ - --hash=sha256:afe1f3bc486d0ce40abb0a0c9adb39aed3bbac36ebdc596487b0cceba55c21c1 \ - --hash=sha256:b946da90dc2799bcafa682692c1d2139b2a96ec3c24fa9fc6f5b0da782675330 \ - --hash=sha256:b947e264f0e77d30dcbccbb00f49f900b204b922eb0c3a9f0afd61aaa1cedc3d \ - --hash=sha256:bb5635c851eef3a7a54becde6da99485eecf7d068bd885ac8e6d173c4ecd68b0 \ - --hash=sha256:bcbebd369493d68162cddb74a9c1fcebd139dfbb7ddb23d8f8e43e6c87bac3a6 \ - --hash=sha256:c31805d2c8ade9b11feca4674eee2b9cce1fec3e8ddb7bbdd961a09dc76a80ea \ - --hash=sha256:c8840f064b1fb377cffd3efeaad2b190c14d4c8da02316dae07571252d20b31f \ - --hash=sha256:ccb94342d13e3bf3ffa6e62f95b5e3f0bc6bfa94558cb37f4b3d09d6feb536ff \ - --hash=sha256:d66689e840e75221b0b290b0befa86f059fb35e1ee6443bce51516d4d61b6b99 \ - --hash=sha256:dabf1a05318d95b1537fd61d9330ef4313ea1216eea128a17615038859da3b3b \ - --hash=sha256:db03704b3506455d86ec72c3358a779e9b1d07b61220dfb43702b7b668edcd0d \ - --hash=sha256:de4217b9eb8b541cf2b7fde4401ce9d9a411cc0af85d410f9d6f4333f43640be \ - --hash=sha256:df0841f94928f8af9c7a1f0aaaffba1fb74607af023a152f59379c01c53aee58 \ - --hash=sha256:dfb992dbcd88d8254471760879d48fb20836d91baa90f181c957122f9592b3dc \ - --hash=sha256:e7e66b4e403c2836ac74f26c4b65d8ac0ca1eef41dfcac2d013b7482befaad83 \ - --hash=sha256:e8012bce6836d3f20a6c9599f81dfa945f433dab4dbd0c4917a6fb1f998ab33d \ - --hash=sha256:f01de4ec083daebf210531e2cca3bdb1608dbbbe00a9723e261d92087a1f6ebc \ - --hash=sha256:f0d945a85b70da97ae86113faf9f1b9294efe66bd4a5d6f82f2676d567338b66 \ - --hash=sha256:fa0ae3275ef706c0309556061185dd0e4c4cd3b7d6f67ae617e4e677c7a41e2e +pyzmq==25.0.0 \ + --hash=sha256:00c94fd4c9dd3c95aace0c629a7fa713627a5c80c1819326b642adf6c4b8e2a2 \ + --hash=sha256:01d53958c787cfea34091fcb8ef36003dbb7913b8e9f8f62a0715234ebc98b70 \ + --hash=sha256:0282bba9aee6e0346aa27d6c69b5f7df72b5a964c91958fc9e0c62dcae5fdcdc \ + --hash=sha256:02f5cb60a7da1edd5591a15efa654ffe2303297a41e1b40c3c8942f8f11fc17c \ + --hash=sha256:0645b5a2d2a06fd8eb738018490c514907f7488bf9359c6ee9d92f62e844b76f \ + --hash=sha256:0a154ef810d44f9d28868be04641f837374a64e7449df98d9208e76c260c7ef1 \ + --hash=sha256:0a90b2480a26aef7c13cff18703ba8d68e181facb40f78873df79e6d42c1facc \ + --hash=sha256:0e8d00228db627ddd1b418c7afd81820b38575f237128c9650365f2dd6ac3443 \ + --hash=sha256:17e1cb97d573ea84d7cd97188b42ca6f611ab3ee600f6a75041294ede58e3d20 \ + --hash=sha256:183e18742be3621acf8908903f689ec520aee3f08449bfd29f583010ca33022b \ + --hash=sha256:1f6116991568aac48b94d6d8aaed6157d407942ea385335a6ed313692777fb9d \ + --hash=sha256:20638121b0bdc80777ce0ec8c1f14f1ffec0697a1f88f0b564fa4a23078791c4 \ + --hash=sha256:2754fa68da08a854f4816e05160137fa938a2347276471103d31e04bcee5365c \ + --hash=sha256:28bcb2e66224a7ac2843eb632e4109d6b161479e7a2baf24e37210461485b4f1 \ + --hash=sha256:293a7c2128690f496057f1f1eb6074f8746058d13588389981089ec45d8fdc77 \ + --hash=sha256:2a73af6504e0d2805e926abf136ebf536735a13c22f709be7113c2ec65b4bec3 \ + --hash=sha256:2d05d904f03ddf1e0d83d97341354dfe52244a619b5a1440a5f47a5b3451e84e \ + --hash=sha256:2e7b87638ee30ab13230e37ce5331b3e730b1e0dda30120b9eeec3540ed292c8 \ + --hash=sha256:3100dddcada66ec5940ed6391ebf9d003cc3ede3d320748b2737553019f58230 \ + --hash=sha256:31e523d067ce44a04e876bed3ff9ea1ff8d1b6636d16e5fcace9d22f8c564369 \ + --hash=sha256:3594c0ff604e685d7e907860b61d0e10e46c74a9ffca168f6e9e50ea934ee440 \ + --hash=sha256:3670e8c5644768f214a3b598fe46378a4a6f096d5fb82a67dfd3440028460565 \ + --hash=sha256:4046d03100aca266e70d54a35694cb35d6654cfbef633e848b3c4a8d64b9d187 \ + --hash=sha256:4725412e27612f0d7d7c2f794d89807ad0227c2fc01dd6146b39ada49c748ef9 \ + --hash=sha256:484c2c4ee02c1edc07039f42130bd16e804b1fe81c4f428e0042e03967f40c20 \ + --hash=sha256:487305c2a011fdcf3db1f24e8814bb76d23bc4d2f46e145bc80316a59a9aa07d \ + --hash=sha256:4a1bc30f0c18444d51e9b0d0dd39e3a4e7c53ee74190bebef238cd58de577ea9 \ + --hash=sha256:4c25c95416133942280faaf068d0fddfd642b927fb28aaf4ab201a738e597c1e \ + --hash=sha256:4cbb885f347eba7ab7681c450dee5b14aed9f153eec224ec0c3f299273d9241f \ + --hash=sha256:4d3d604fe0a67afd1aff906e54da557a5203368a99dcc50a70eef374f1d2abef \ + --hash=sha256:4e295f7928a31ae0f657e848c5045ba6d693fe8921205f408ca3804b1b236968 \ + --hash=sha256:5049e75cc99db65754a3da5f079230fb8889230cf09462ec972d884d1704a3ed \ + --hash=sha256:5050f5c50b58a6e38ccaf9263a356f74ef1040f5ca4030225d1cb1a858c5b7b6 \ + --hash=sha256:526f884a27e8bba62fe1f4e07c62be2cfe492b6d432a8fdc4210397f8cf15331 \ + --hash=sha256:531866c491aee5a1e967c286cfa470dffac1e2a203b1afda52d62b58782651e9 \ + --hash=sha256:5605621f2181f20b71f13f698944deb26a0a71af4aaf435b34dd90146092d530 \ + --hash=sha256:58fc3ad5e1cfd2e6d24741fbb1e216b388115d31b0ca6670f894187f280b6ba6 \ + --hash=sha256:60ecbfe7669d3808ffa8a7dd1487d6eb8a4015b07235e3b723d4b2a2d4de7203 \ + --hash=sha256:610d2d112acd4e5501fac31010064a6c6efd716ceb968e443cae0059eb7b86de \ + --hash=sha256:6136bfb0e5a9cf8c60c6ac763eb21f82940a77e6758ea53516c8c7074f4ff948 \ + --hash=sha256:62b9e80890c0d2408eb42d5d7e1fc62a5ce71be3288684788f74cf3e59ffd6e2 \ + --hash=sha256:656281d496aaf9ca4fd4cea84e6d893e3361057c4707bd38618f7e811759103c \ + --hash=sha256:66509c48f7446b640eeae24b60c9c1461799a27b1b0754e438582e36b5af3315 \ + --hash=sha256:6bf3842af37af43fa953e96074ebbb5315f6a297198f805d019d788a1021dbc8 \ + --hash=sha256:731b208bc9412deeb553c9519dca47136b5a01ca66667cafd8733211941b17e4 \ + --hash=sha256:75243e422e85a62f0ab7953dc315452a56b2c6a7e7d1a3c3109ac3cc57ed6b47 \ + --hash=sha256:7877264aa851c19404b1bb9dbe6eed21ea0c13698be1eda3784aab3036d1c861 \ + --hash=sha256:81f99fb1224d36eb91557afec8cdc2264e856f3464500b55749020ce4c848ef2 \ + --hash=sha256:8539216173135e9e89f6b1cc392e74e6b935b91e8c76106cf50e7a02ab02efe5 \ + --hash=sha256:85456f0d8f3268eecd63dede3b99d5bd8d3b306310c37d4c15141111d22baeaf \ + --hash=sha256:866eabf7c1315ef2e93e34230db7cbf672e0d7c626b37c11f7e870c8612c3dcc \ + --hash=sha256:926236ca003aec70574754f39703528947211a406f5c6c8b3e50eca04a9e87fc \ + --hash=sha256:930e6ad4f2eaac31a3d0c2130619d25db754b267487ebc186c6ad18af2a74018 \ + --hash=sha256:94f0a7289d0f5c80807c37ebb404205e7deb737e8763eb176f4770839ee2a287 \ + --hash=sha256:9a2d5e419bd39a1edb6cdd326d831f0120ddb9b1ff397e7d73541bf393294973 \ + --hash=sha256:9ca6db34b26c4d3e9b0728841ec9aa39484eee272caa97972ec8c8e231b20c7e \ + --hash=sha256:9f72ea279b2941a5203e935a4588b9ba8a48aeb9a926d9dfa1986278bd362cb8 \ + --hash=sha256:a0e7ef9ac807db50b4eb6f534c5dcc22f998f5dae920cc28873d2c1d080a4fc9 \ + --hash=sha256:a1cd4a95f176cdc0ee0a82d49d5830f13ae6015d89decbf834c273bc33eeb3d3 \ + --hash=sha256:a9c464cc508177c09a5a6122b67f978f20e2954a21362bf095a0da4647e3e908 \ + --hash=sha256:ac97e7d647d5519bcef48dd8d3d331f72975afa5c4496c95f6e854686f45e2d9 \ + --hash=sha256:af1fbfb7ad6ac0009ccee33c90a1d303431c7fb594335eb97760988727a37577 \ + --hash=sha256:b055a1cddf8035966ad13aa51edae5dc8f1bba0b5d5e06f7a843d8b83dc9b66b \ + --hash=sha256:b6f75b4b8574f3a8a0d6b4b52606fc75b82cb4391471be48ab0b8677c82f9ed4 \ + --hash=sha256:b90bb8dfbbd138558f1f284fecfe328f7653616ff9a972433a00711d9475d1a9 \ + --hash=sha256:be05504af0619d1cffa500af1e0ede69fb683f301003851f5993b5247cc2c576 \ + --hash=sha256:c21a5f4e54a807df5afdef52b6d24ec1580153a6bcf0607f70a6e1d9fa74c5c3 \ + --hash=sha256:c48f257da280b3be6c94e05bd575eddb1373419dbb1a72c3ce64e88f29d1cd6d \ + --hash=sha256:cac602e02341eaaf4edfd3e29bd3fdef672e61d4e6dfe5c1d065172aee00acee \ + --hash=sha256:ccb3e1a863222afdbda42b7ca8ac8569959593d7abd44f5a709177d6fa27d266 \ + --hash=sha256:e1081d7030a1229c8ff90120346fb7599b54f552e98fcea5170544e7c6725aab \ + --hash=sha256:e14df47c1265356715d3d66e90282a645ebc077b70b3806cf47efcb7d1d630cb \ + --hash=sha256:e4bba04ea779a3d7ef25a821bb63fd0939142c88e7813e5bd9c6265a20c523a2 \ + --hash=sha256:e99629a976809fe102ef73e856cf4b2660acd82a412a51e80ba2215e523dfd0a \ + --hash=sha256:f330a1a2c7f89fd4b0aa4dcb7bf50243bf1c8da9a2f1efc31daf57a2046b31f2 \ + --hash=sha256:f3f96d452e9580cb961ece2e5a788e64abaecb1232a80e61deffb28e105ff84a \ + --hash=sha256:fc7c1421c5b1c916acf3128bf3cc7ea7f5018b58c69a6866d70c14190e600ce9 # via # ipykernel # jupyter-client @@ -2148,13 +2372,16 @@ rdflib-shim==1.0.3 \ # pyshex # pyshexc # sparqlslurper -requests==2.28.1 \ - --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ - --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 +requests==2.28.2 \ + --hash=sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa \ + --hash=sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf # via # -r requirements/main.in # curies +<<<<<<< HEAD # dagit +======= +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # dagster # dagster-graphql # gql @@ -2175,11 +2402,23 @@ requests-toolbelt==0.10.1 \ rfc3339-validator==0.1.4 \ --hash=sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b \ --hash=sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa +<<<<<<< HEAD # via jsonschema rfc3986-validator==0.1.1 \ --hash=sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9 \ --hash=sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055 # via jsonschema +======= + # via + # jsonschema + # jupyter-events +rfc3986-validator==0.1.1 \ + --hash=sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9 \ + --hash=sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055 + # via + # jsonschema + # jupyter-events +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) rfc3987==1.3.8 \ --hash=sha256:10702b1e51e5658843460b189b185c0366d2cf4cff716f13111b0ea9fd2dce53 \ --hash=sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733 @@ -2441,9 +2680,15 @@ toolz==0.12.0 \ --hash=sha256:2059bd4148deb1884bb0eb770a3cde70e7f954cfbbdc2285f1f2de01fd21eb6f \ --hash=sha256:88c570861c440ee3f2f6037c4654613228ff40c93a6c25e0eba70d17282c6194 # via -r requirements/main.in +<<<<<<< HEAD toposort==1.8 \ --hash=sha256:b1e89996c43daaf0e03805d33df22333c99c9d36715b188dea0e551ce2f1cd81 \ --hash=sha256:c87fd1a8d70b2ca8c928eaf90a538307171fed89e1dcfcdbf7cf6599dfc3208a +======= +toposort==1.9 \ + --hash=sha256:9f434c815e1bd2f9ad05152b6b0071b1f56e288c107869708f2463ec932e2637 \ + --hash=sha256:f41a34490d44934b533a7bdaff979ee8a47203fd2d8a746db83f2d5ab12458b9 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via dagster tornado==6.2 \ --hash=sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca \ @@ -2522,9 +2767,15 @@ uri-template==1.2.0 \ --hash=sha256:934e4d09d108b70eb8a24410af8615294d09d279ce0e7cbcdaef1bd21f932b06 \ --hash=sha256:f1699c77b73b925cf4937eae31ab282a86dc885c333f2e942513f08f691fc7db # via jsonschema +<<<<<<< HEAD urllib3==1.26.13 \ --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \ --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8 +======= +urllib3==1.26.14 \ + --hash=sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72 \ + --hash=sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1 +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via # botocore # gql @@ -2602,7 +2853,6 @@ watchdog==2.2.1 \ --hash=sha256:e618a4863726bc7a3c64f95c218437f3349fb9d909eb9ea3a1ed3b567417c661 \ --hash=sha256:f8ac23ff2c2df4471a61af6490f847633024e5aa120567e08d07af5718c9d092 # via - # dagit # dagster # linkml # mkdocs @@ -2626,9 +2876,9 @@ watchfiles==0.18.1 \ --hash=sha256:dde79930d1b28f15994ad6613aa2865fc7a403d2bb14585a8714a53233b15717 \ --hash=sha256:e2b2bdd26bf8d6ed90763e6020b475f7634f919dbd1730ea1b6f8cb88e21de5d # via uvicorn -wcwidth==0.2.5 \ - --hash=sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784 \ - --hash=sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83 +wcwidth==0.2.6 \ + --hash=sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e \ + --hash=sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0 # via prompt-toolkit webcolors==1.12 \ --hash=sha256:16d043d3a08fd6a1b1b7e3e9e62640d09790dce80d2bdd4792a175b35fe794a9 \ @@ -2789,9 +3039,15 @@ xlrd==2.0.1 \ --hash=sha256:6a33ee89877bd9abc1158129f6e94be74e2679636b8a205b43b85206c3f0bbdd \ --hash=sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88 # via -r requirements/main.in +<<<<<<< HEAD xlsxwriter==3.0.6 \ --hash=sha256:2f9e5ea13343fe85486e349d4e5fdf746bb69dc7bc1dedfa9b5fae2bb48c0795 \ --hash=sha256:56eae8ae587536734009aa819845c3e3c865462399823085b0baabbb081a929c +======= +xlsxwriter==3.0.7 \ + --hash=sha256:1239958b12da12be6abf05b32f64ff162eb75fff0fdf00d901ca0f46dc0557cf \ + --hash=sha256:d79dadac9dcbff3c24281764b052b46bcdb391f3184f337c4c4e6ed8d2738d7d +>>>>>>> 0474cb4 (feat(workflow_automation): add activities) # via -r requirements/main.in yarl==1.8.2 \ --hash=sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87 \ diff --git a/setup.py b/setup.py index 7aba580c..3e40dd70 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ -import setuptools +"""blah.""" +from setuptools import find_namespace_packages, setup # type: ignore with open("README.md") as f: long_description = f.read() @@ -8,13 +9,15 @@ with open("requirements/dev.in") as f: dev_requires = f.read().splitlines()[1:] # Elide `-c main.txt` constraint - -setuptools.setup( +setup( name="nmdc_runtime", url="https://github.com/microbiomedata/nmdc-runtime", - packages=setuptools.find_namespace_packages( - include=["nmdc_runtime.*", "components.*"], exclude=["tests"] + packages=find_namespace_packages( + where="components", + include=["nmdc_runtime*"], + exclude=["tests", "tests2"], ), + package_dir={"": "components"}, use_scm_version=True, setup_requires=["setuptools_scm"], author="Donny Winston", diff --git a/workspace.toml b/workspace.toml new file mode 100644 index 00000000..1301e18b --- /dev/null +++ b/workspace.toml @@ -0,0 +1,2 @@ +[tool.polylith] +namespace = "nmdc_runtime"