Skip to content

Commit

Permalink
Revert "Add Pydantic Extra.forbid to shared base model config " (#880)
Browse files Browse the repository at this point in the history
Reverts #820

---------

Signed-off-by: Flaviu Vadan <[email protected]>
  • Loading branch information
flaviuvadan authored Nov 28, 2023
1 parent 206b51a commit 9aad50b
Show file tree
Hide file tree
Showing 22 changed files with 72 additions and 222 deletions.
1 change: 0 additions & 1 deletion docs/examples/workflows-examples.md
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,6 @@ Explore the examples through the side bar!
| [template-defaults](https://github.com/argoproj/argo-workflows/blob/master/examples/template-defaults.yaml) |
| [testvolume](https://github.com/argoproj/argo-workflows/blob/master/examples/testvolume.yaml) |
| [timeouts-step](https://github.com/argoproj/argo-workflows/blob/master/examples/timeouts-step.yaml) |
| [webhdfs-input-output-artifacts](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml) |
| [work-avoidance](https://github.com/argoproj/argo-workflows/blob/master/examples/work-avoidance.yaml) |
| [workflow-count-resourcequota](https://github.com/argoproj/argo-workflows/blob/master/examples/workflow-count-resourcequota.yaml) |
| [workflow-event-binding/event-consumer-workfloweventbinding](https://github.com/argoproj/argo-workflows/blob/master/examples/workflow-event-binding/event-consumer-workfloweventbinding.yaml) |
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
# Webhdfs Input Output Artifacts

## Note

This example is a replication of an Argo Workflow example in Hera.
The upstream example can be [found here](https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml).

This example showcases how to use WebHDFS.

This example used to reside in the `examples` directory, but was moved here to avoid the extra `overwrite` parameter
specified in the `outputs` field upstream, which is not an allowed parameter. Example:
- https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml


=== "Hera"
Expand Down
11 changes: 10 additions & 1 deletion docs/examples/workflows/volume_mounts_nfs.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,16 @@

with Workflow(
generate_name="volumes-",
volumes=[NFSVolume(name="nfs-volume", server="your.nfs.server", mount_path="/mnt/nfs", path="/share/nfs")],
volumes=[
NFSVolume(
name="nfs-volume",
server="your.nfs.server",
mount_path="/mnt/nfs",
path="/share/nfs",
size="1Gi",
storage_class_name="nfs-client",
)
],
entrypoint="d",
) as w:
with DAG(name="d"):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,3 @@
"""This example showcases how to use WebHDFS.
This example used to reside in the `examples` directory, but was moved here to avoid the extra `overwrite` parameter
specified in the `outputs` field upstream, which is not an allowed parameter. Example:
- https://github.com/argoproj/argo-workflows/blob/master/examples/webhdfs-input-output-artifacts.yaml
"""
from hera.workflows import (
Container,
HTTPArtifact,
Expand Down
11 changes: 10 additions & 1 deletion examples/workflows/volume_mounts_nfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,16 @@ def foo():

with Workflow(
generate_name="volumes-",
volumes=[NFSVolume(name="nfs-volume", server="your.nfs.server", mount_path="/mnt/nfs", path="/share/nfs")],
volumes=[
NFSVolume(
name="nfs-volume",
server="your.nfs.server",
mount_path="/mnt/nfs",
path="/share/nfs",
size="1Gi",
storage_class_name="nfs-client",
)
],
entrypoint="d",
) as w:
with DAG(name="d"):
Expand Down
18 changes: 6 additions & 12 deletions src/hera/shared/_base_model.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
"""Module that holds the underlying base Pydantic models for Hera objects."""
from pydantic import (
BaseModel as PyBaseModel,
Extra,
)
from pydantic import BaseModel as PyBaseModel


class BaseModel(PyBaseModel):
Expand All @@ -13,19 +10,16 @@ class Config:
"""

allow_population_by_field_name = True
"""support populating Hera object fields via keyed dictionaries."""
"""support populating Hera object fields via keyed dictionaries"""

allow_mutation = True
"""supports mutating Hera objects post instantiation."""
"""supports mutating Hera objects post instantiation"""

use_enum_values = True
"""supports using enums, which are then unpacked to obtain the actual `.value`, on Hera objects."""
"""supports using enums, which are then unpacked to obtain the actual `.value`, on Hera objects"""

arbitrary_types_allowed = True
"""supports specifying arbitrary types for any field to support Hera object fields processing."""
"""supports specifying arbitrary types for any field to support Hera object fields processing"""

smart_union = True
"""uses smart union for matching a field's specified value to the underlying type that's part of a union."""

extra = Extra.forbid
"""forbids extra fields from being added to Hera objects. This protects users from using the wrong kwargs."""
"""uses smart union for matching a field's specified value to the underlying type that's part of a union"""
6 changes: 2 additions & 4 deletions src/hera/workflows/_mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -690,12 +690,10 @@ def __call__(self, *args, **kwargs) -> Union[None, Step, Task]:
f"Callable Template '{self.name}' is not callable under a Workflow" # type: ignore
)
if isinstance(_context.pieces[-1], (Steps, Parallel)):
allowed_step_kwargs = Step.__fields__.keys()
return Step(template=self, **{k: v for k, v in kwargs.items() if k in allowed_step_kwargs})
return Step(*args, template=self, **kwargs)

if isinstance(_context.pieces[-1], DAG):
allowed_task_kwargs = Task.__fields__.keys()
return Task(template=self, **{k: v for k, v in kwargs.items() if k in allowed_task_kwargs})
return Task(*args, template=self, **kwargs)

raise InvalidTemplateCall(
f"Callable Template '{self.name}' is not under a Workflow, Steps, Parallel, or DAG context" # type: ignore
Expand Down
2 changes: 1 addition & 1 deletion src/hera/workflows/artifact.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ class Artifact(BaseModel):
path: Optional[str] = None
"""path where the artifact should be placed/loaded from"""

recurse_mode: Optional[bool] = None
recurse_mode: Optional[str] = None
"""recursion mode when applying the permissions of the artifact if it is an artifact folder"""

sub_path: Optional[str] = None
Expand Down
78 changes: 6 additions & 72 deletions src/hera/workflows/user_container.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""The user container module provides user container functionality and objects."""
from typing import Any, Dict, List, Optional, Union
from typing import List, Optional, Union

from hera.workflows.env import _BaseEnv
from hera.workflows.env_from import _BaseEnvFrom
Expand All @@ -13,82 +13,16 @@
from hera.workflows.resources import Resources
from hera.workflows.volume import _BaseVolume

# TODO: Mixins imports user container, so this avoids a circular import. Fix in a future release
EnvT = Optional[
Union[
_BaseEnv,
EnvVar,
List[Union[_BaseEnv, EnvVar, Dict[str, Any]]],
Dict[str, Any],
]
]
"""`EnvT` is the core Hera type for environment variables.
The env type enables setting single valued environment variables, lists of environment variables, or dictionary
mappings of env variables names to values, which are automatically parsed by Hera.
"""

# TODO: Mixins imports user container, so this avoids a circular import. Fix in a future release
EnvFromT = Optional[Union[_BaseEnvFrom, EnvFromSource, List[Union[_BaseEnvFrom, EnvFromSource]]]]
"""`EnvFromT` is the core Hera type for environment variables derived from Argo/Kubernetes sources.
This env type enables specifying environment variables in base form, as `hera.workflows.env` form, or lists of the
aforementioned objects.
"""


class UserContainer(_ModelUserContainer):
"""`UserContainer` is a container type that is specifically used as a side container."""

env: EnvT = None # type: ignore[assignment]
env_from: EnvFromT = None # type: ignore[assignment]
env: Optional[List[Union[_BaseEnv, EnvVar]]] = None # type: ignore[assignment]
env_from: Optional[List[Union[_BaseEnvFrom, EnvFromSource]]] = None # type: ignore[assignment]
image_pull_policy: Optional[Union[str, ImagePullPolicy]] = None # type: ignore[assignment]
resources: Optional[Union[Resources, ResourceRequirements]] = None # type: ignore[assignment]
volumes: Optional[List[_BaseVolume]] = None

def _build_resources(self) -> Optional[ResourceRequirements]:
"""Parses the resources and returns a generated `ResourceRequirements` object."""
if self.resources is None or isinstance(self.resources, ResourceRequirements):
return self.resources
return self.resources.build()

def _build_env(self) -> Optional[List[EnvVar]]:
"""Processes the `env` field and returns a list of generated `EnvVar` or `None`."""
if self.env is None:
return None

result: List[EnvVar] = []
env = self.env if isinstance(self.env, list) else [self.env]
for e in env:
if isinstance(e, EnvVar):
result.append(e)
elif issubclass(e.__class__, _BaseEnv):
result.append(e.build())
elif isinstance(e, dict):
for k, v in e.items():
result.append(EnvVar(name=k, value=v))

# returning `None` for `envs` means the submission to the server will not even have the `envs` field
# set, which saves some space
return result if result else None

def _build_env_from(self) -> Optional[List[EnvFromSource]]:
"""Processes the `env_from` field and returns a list of generated `EnvFrom` or `None`."""
if self.env_from is None:
return None

result: List[EnvFromSource] = []
env_from = self.env_from if isinstance(self.env_from, list) else [self.env_from]
for e in env_from:
if isinstance(e, EnvFromSource):
result.append(e)
elif issubclass(e.__class__, _BaseEnvFrom):
result.append(e.build())

# returning `None` for `envs` means the submission to the server will not even have the `env_from` field
# set, which saves some space
return result if result else None

def _build_image_pull_policy(self) -> Optional[str]:
"""Processes the image pull policy field and returns a generated `ImagePullPolicy` enum."""
if self.image_pull_policy is None:
Expand Down Expand Up @@ -119,8 +53,8 @@ def build(self) -> _ModelUserContainer:
return _ModelUserContainer(
args=self.args,
command=self.command,
env=self._build_env(),
env_from=self._build_env_from(),
env=self.env,
env_from=self.env_from,
image=self.image,
image_pull_policy=self._build_image_pull_policy(),
lifecycle=self.lifecycle,
Expand All @@ -129,7 +63,7 @@ def build(self) -> _ModelUserContainer:
name=self.name,
ports=self.ports,
readiness_probe=self.readiness_probe,
resources=self._build_resources(),
resources=self.resources,
security_context=self.security_context,
startup_probe=self.startup_probe,
stdin=self.stdin,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

@script()
def read_artifact(
my_artifact: Annotated[str, Artifact(name="my_artifact", path="/tmp/file", mode=123, recurse_mode=True)]
my_artifact: Annotated[str, Artifact(name="my_artifact", path="/tmp/file", mode=123, recurseMode=True)]
) -> str:
return my_artifact

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from hera.workflows.steps import Steps


@script(inputs=[Artifact(name="my_artifact", path="/tmp/file", mode=123, recurse_mode=True)])
@script(inputs=[Artifact(name="my_artifact", path="/tmp/file", mode=123, recurseMode=True)])
def read_artifact(my_artifact) -> str:
return my_artifact

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,9 @@
from typing_extensions import Annotated # type: ignore

from hera.shared import global_config
from hera.workflows import (
Artifact,
OSSArtifact,
Steps,
Workflow,
models as m,
script,
)
from hera.workflows import Workflow, script
from hera.workflows.artifact import Artifact, OSSArtifact
from hera.workflows.steps import Steps

global_config.experimental_features["script_annotations"] = True

Expand All @@ -25,13 +20,13 @@ def read_artifact(
OSSArtifact(
name="my_artifact",
path="/tmp/file",
access_key_secret=m.SecretKeySelector(name="my-oss-credentials", key="secretKey"),
secret_key_secret=m.SecretKeySelector(name="my-oss-credentials", key="secretKey"),
access_key_secret={"name": "my-oss-credentials", "key": "secretKey"},
bucket="test-bucket-name",
create_bucket_if_not_present=True,
endpoint="http://oss-cn-hangzhou-zmf.aliyuncs.com",
key="test/mydirectory/",
lifecycle_rule=m.OSSLifecycleRule(mark_deletion_after_days=42),
lifecycle_rule={"name": "my-oss-rule", "key": "ruleKey"},
secret_key_secret={"name": "my-oss-credentials", "key": "secretKey"},
security_token="oss-token",
),
],
Expand Down
Original file line number Diff line number Diff line change
@@ -1,27 +1,22 @@
"""Regression test: compare the new Annotated style inputs declaration with the old version."""

from hera.workflows import (
Artifact,
OSSArtifact,
Steps,
Workflow,
models as m,
script,
)
from hera.workflows import Workflow, script
from hera.workflows.artifact import Artifact, OSSArtifact
from hera.workflows.steps import Steps


@script(
inputs=[
OSSArtifact(
name="my_artifact",
path="/tmp/file",
access_key_secret=m.SecretKeySelector(name="my-oss-credentials", key="secretKey"),
secret_key_secret=m.SecretKeySelector(name="my-oss-credentials", key="secretKey"),
access_key_secret={"name": "my-oss-credentials", "key": "secretKey"},
bucket="test-bucket-name",
create_bucket_if_not_present=True,
endpoint="http://oss-cn-hangzhou-zmf.aliyuncs.com",
key="test/mydirectory/",
lifecycle_rule=m.OSSLifecycleRule(mark_deletion_after_days=42),
lifecycle_rule={"name": "my-oss-rule", "key": "ruleKey"},
secret_key_secret={"name": "my-oss-credentials", "key": "secretKey"},
security_token="oss-token",
),
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,9 @@
from typing_extensions import Annotated # type: ignore

from hera.shared import global_config
from hera.workflows import (
Artifact,
S3Artifact,
Steps,
Workflow,
models as m,
script,
)
from hera.workflows import Workflow, script
from hera.workflows.artifact import Artifact, S3Artifact
from hera.workflows.steps import Steps

global_config.experimental_features["script_annotations"] = True

Expand All @@ -25,16 +20,16 @@ def read_artifact(
S3Artifact(
name="my_artifact",
path="/tmp/file",
access_key_secret=m.SecretKeySelector(name="my-oss-credentials", key="secretKey"),
secret_key_secret=m.SecretKeySelector(name="my-oss-credentials", key="secretKey"),
access_key_secret={"name": "my-oss-credentials", "key": "secretKey"},
bucket="my-bucket-name",
create_bucket_if_not_present=m.CreateS3BucketOptions(object_locking=True),
encryption_options=m.S3EncryptionOptions(enable_encryption=True),
create_bucket_if_not_present={"value": "true"},
encryption_options={"value": "s3-encryption"},
endpoint="s3.amazonaws.com",
insecure=True,
key="path/in/bucket",
region="us-west-2",
role_arn="s3-role-arn",
secret_key_secret={"name": "my-oss-credentials", "key": "secretKey"},
use_sdk_creds=True,
),
],
Expand Down
Loading

0 comments on commit 9aad50b

Please sign in to comment.