Skip to content

Commit

Permalink
Adopt new name for ExposureImageFactory
Browse files Browse the repository at this point in the history
  • Loading branch information
timj committed Jan 13, 2025
1 parent 5bbc85a commit 8c26931
Showing 1 changed file with 8 additions and 8 deletions.
16 changes: 8 additions & 8 deletions python/lsst/dax/obscore/obscore_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
from lsst.daf.butler import Butler, DataCoordinate, ddl
from lsst.daf.butler.formatters.parquet import arrow_to_numpy
from lsst.daf.butler.registry.obscore import (
ExposureRegionFactory,
DerivedRegionFactory,
ObsCoreSchema,
RecordFactory,
SpatialObsCorePlugin,
Expand Down Expand Up @@ -197,8 +197,8 @@ def close(self) -> None:
super().close()


class _ExposureRegionFactory(ExposureRegionFactory):
"""Exposure region factory that returns an existing region, region is
class _DerivedRegionFactory(DerivedRegionFactory):
"""Region factory that returns an existing region, region is
specified via `set` method, which should be called before calling
record factory.
"""
Expand All @@ -214,7 +214,7 @@ def set(self, data_id: DataCoordinate, region: Region) -> None:
----------
data_id : `~lsst.daf.butler.DataCoordinate`
Data ID that will be matched against parameter of
`exposure_region`.
`derived_region`.
region : `Region`
Corresponding region.
"""
Expand All @@ -226,7 +226,7 @@ def reset(self) -> None:
self._data_id = None
self._region = None

def exposure_region(self, dataId: DataCoordinate) -> Region | None:
def derived_region(self, dataId: DataCoordinate) -> Region | None:
# Docstring inherited.
if dataId == self._data_id:
return self._region
Expand Down Expand Up @@ -254,10 +254,10 @@ def __init__(self, butler: Butler, config: ExporterConfig):

self.schema = self._make_schema(schema.table_spec)

self._exposure_region_factory = _ExposureRegionFactory()
self._derived_region_factory = _DerivedRegionFactory()
universe = self.butler.dimensions
self.record_factory = RecordFactory.get_record_type_from_universe(universe)(
config, schema, universe, spatial_plugins, self._exposure_region_factory
config, schema, universe, spatial_plugins, self._derived_region_factory
)

def to_parquet(self, output: str) -> None:
Expand Down Expand Up @@ -500,7 +500,7 @@ def _make_record_batches(
_LOG.debug("New record, dataId=%s region=%s", dataId.mapping, region)
# _LOG.debug("New record, records=%s", dataId.records)

self._exposure_region_factory.set(dataId, region)
self._derived_region_factory.set(dataId, region)
record = self.record_factory(ref)
if record is None:
continue
Expand Down

0 comments on commit 8c26931

Please sign in to comment.