Skip to content

Commit

Permalink
fix: ensure we do not recursive flatten as the adapter col method is …
Browse files Browse the repository at this point in the history
…already recursive
  • Loading branch information
z3z1ma committed Jan 30, 2025
1 parent 09f435d commit 04f92c0
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 18 deletions.
3 changes: 3 additions & 0 deletions .changes/1.1.12.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
## 1.1.12 - 2025-01-30
### Fixed
* Ensure we don't recurse in flattening struct columns as the adapter is already recursive under the hood
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html),
and is generated by [Changie](https://github.com/miniscruff/changie).


## 1.1.12 - 2025-01-30
### Fixed
* Ensure we don't recurse in flattening struct columns as the adapter is already recursive under the hood

## 1.1.11 - 2025-01-30
### Fixed
* Ensure records and structs are flattened
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "hatchling.build"

[project]
name = "dbt-osmosis"
version = "1.1.11"
version = "1.1.12"
description = "A dbt utility for managing YAML to make developing with dbt more delightful."
readme = "README.md"
license = { text = "Apache-2.0" }
Expand Down
39 changes: 22 additions & 17 deletions src/dbt_osmosis/core/osmosis.py
Original file line number Diff line number Diff line change
Expand Up @@ -853,26 +853,31 @@ def get_columns(context: YamlRefactorContext, ref: TableRef) -> dict[str, Column
normalized_cols = OrderedDict()
offset = 0

def process_column(col: BaseColumn | ColumnMetadata):
def process_column(col: BaseColumn | ColumnMetadata) -> None:
nonlocal offset
if any(re.match(b, col.name) for b in context.ignore_patterns):
logger.debug(
":no_entry_sign: Skipping column => %s due to skip pattern match.", col.name
)
return
normalized = normalize_column_name(col.name, context.project.runtime_cfg.credentials.type)
if isinstance(col, ColumnMetadata):
col_meta = col

if hasattr(col, "flatten"):
# flatten bq structs
cols = getattr(col, "flatten")()
else:
dtype = _maybe_use_precise_dtype(col, context.settings)
col_meta = ColumnMetadata(
name=normalized, type=dtype, index=offset, comment=getattr(col, "comment", None)
cols = [col]

for col in cols:
if any(re.match(b, col.name) for b in context.ignore_patterns):
logger.debug(
":no_entry_sign: Skipping column => %s due to skip pattern match.", col.name
)
return
normalized = normalize_column_name(
col.name, context.project.runtime_cfg.credentials.type
)
normalized_cols[normalized] = col_meta
offset += 1
if hasattr(col, "flatten"):
for struct_field in t.cast(Iterable[BaseColumn], getattr(col, "flatten")()):
process_column(struct_field)
if not isinstance(col, ColumnMetadata):
dtype = _maybe_use_precise_dtype(col, context.settings)
col = ColumnMetadata(
name=normalized, type=dtype, index=offset, comment=getattr(col, "comment", None)
)
normalized_cols[normalized] = col
offset += 1

if catalog := context.read_catalog():
logger.debug(":blue_book: Catalog found => Checking for ref => %s", ref)
Expand Down

0 comments on commit 04f92c0

Please sign in to comment.