Skip to content

Commit

Permalink
Rename fields to match new apdb schema
Browse files Browse the repository at this point in the history
  • Loading branch information
parejkoj committed May 31, 2023
1 parent bebdf9f commit 31e1fb8
Show file tree
Hide file tree
Showing 10 changed files with 53 additions and 51 deletions.
2 changes: 1 addition & 1 deletion config/apdb-cassandra.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
config.part_pix_level=10

# Names ra/dec columns in DiaObject table
config.ra_dec_columns=['ra', 'decl']
config.ra_dec_columns=['ra', 'dec']

# If True then print/log timing information
config.timer=False
Expand Down
4 changes: 2 additions & 2 deletions config/apdb-mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@

# List of columns to read from DiaObject, by default read all columns
config.dia_object_columns = [
"diaObjectId", "lastNonForcedSource", "ra", "decl",
"raSigma", "declSigma", "ra_decl_Cov", "pixelId"
"diaObjectId", "lastNonForcedSource", "ra", "dec",
"raSigma", "decSigma", "ra_dec_Cov", "pixelId"
]

# If True (default) then use "upsert" for DiaObjectsLast table
Expand Down
4 changes: 2 additions & 2 deletions config/apdb-pg.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@

# List of columns to read from DiaObject, by default read all columns
config.dia_object_columns = [
"diaObjectId", "lastNonForcedSource", "ra", "decl",
"raSigma", "declSigma", "ra_decl_Cov", "pixelId"
"diaObjectId", "lastNonForcedSource", "ra", "dec",
"raSigma", "decSigma", "ra_dec_Cov", "pixelId"
]

# If True (default) then use "upsert" for DiaObjectsLast table
Expand Down
4 changes: 2 additions & 2 deletions config/apdb-sqlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@

# List of columns to read from DiaObject, by default read all columns
config.dia_object_columns = [
"diaObjectId", "lastNonForcedSource", "ra", "decl",
"raSigma", "declSigma", "ra_decl_Cov", "pixelId"
"diaObjectId", "lastNonForcedSource", "ra", "dec",
"raSigma", "decSigma", "ra_dec_Cov", "pixelId"
]

# If True (default) then use "upsert" for DiaObjectsLast table
Expand Down
6 changes: 3 additions & 3 deletions python/lsst/dax/apdb/apdbCassandra.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ class ApdbCassandraConfig(ApdbConfig):
)
part_pix_level = Field[int](doc="Pixelization level used for partitioning index.", default=10)
part_pix_max_ranges = Field[int](doc="Max number of ranges in pixelization envelope", default=64)
ra_dec_columns = ListField[str](default=["ra", "decl"], doc="Names of ra/dec columns in DiaObject table")
ra_dec_columns = ListField[str](default=["ra", "dec"], doc="Names of ra/dec columns in DiaObject table")
timer = Field[bool](doc="If True then print/log timing information", default=False)
time_partition_tables = Field[bool](
doc="Use per-partition tables for sources instead of partitioning by time", default=True
Expand Down Expand Up @@ -624,8 +624,8 @@ def _getSources(
if len(object_id_set) > 0:
catalog = cast(pandas.DataFrame, catalog[catalog["diaObjectId"].isin(object_id_set)])

# precise filtering on midPointTai
catalog = cast(pandas.DataFrame, catalog[catalog["midPointTai"] > mjd_start])
# precise filtering on midpointMjdTai
catalog = cast(pandas.DataFrame, catalog[catalog["midpointMjdTai"] > mjd_start])

_LOG.debug("found %d %ss", catalog.shape[0], table_name.name)
return catalog
Expand Down
40 changes: 21 additions & 19 deletions python/lsst/dax/apdb/apdbSql.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def _coerce_uint64(df: pandas.DataFrame) -> pandas.DataFrame:
return df.astype({name: np.int64 for name in names})


def _make_midPointTai_start(visit_time: dafBase.DateTime, months: int) -> float:
def _make_midpointMjdTai_start(visit_time: dafBase.DateTime, months: int) -> float:
"""Calculate starting point for time-based source search.
Parameters
Expand All @@ -116,7 +116,7 @@ def _make_midPointTai_start(visit_time: dafBase.DateTime, months: int) -> float:
Returns
-------
time : `float`
A ``midPointTai`` starting point, MJD time.
A ``midpointMjdTai`` starting point, MJD time.
"""
# TODO: `system` must be consistent with the code in ap_association
# (see DM-31996)
Expand Down Expand Up @@ -169,7 +169,7 @@ class ApdbSqlConfig(ApdbConfig):
htm_index_column = Field[str](
default="pixelId", doc="Name of a HTM index column for DiaObject and DiaSource tables"
)
ra_dec_columns = ListField[str](default=["ra", "decl"], doc="Names of ra/dec columns in DiaObject table")
ra_dec_columns = ListField[str](default=["ra", "dec"], doc="Names of ra/dec columns in DiaObject table")
dia_object_columns = ListField[str](
doc="List of columns to read from DiaObject, by default read all columns", default=[]
)
Expand Down Expand Up @@ -393,12 +393,14 @@ def getDiaForcedSources(
raise NotImplementedError("Region-based selection is not supported")

# TODO: DateTime.MJD must be consistent with code in ap_association,
# alternatively we can fill midPointTai ourselves in store()
midPointTai_start = _make_midPointTai_start(visit_time, self.config.read_forced_sources_months)
_LOG.debug("midPointTai_start = %.6f", midPointTai_start)
# alternatively we can fill midpointMjdTai ourselves in store()
midpointMjdTai_start = _make_midpointMjdTai_start(visit_time, self.config.read_forced_sources_months)
_LOG.debug("midpointMjdTai_start = %.6f", midpointMjdTai_start)

with Timer("DiaForcedSource select", self.config.timer):
sources = self._getSourcesByIDs(ApdbTables.DiaForcedSource, list(object_ids), midPointTai_start)
sources = self._getSourcesByIDs(ApdbTables.DiaForcedSource,
list(object_ids),
midpointMjdTai_start)

_LOG.debug("found %s DiaForcedSources", len(sources))
return sources
Expand Down Expand Up @@ -598,16 +600,16 @@ def _getDiaSourcesInRegion(self, region: Region, visit_time: dafBase.DateTime) -
Catalog containing DiaSource records.
"""
# TODO: DateTime.MJD must be consistent with code in ap_association,
# alternatively we can fill midPointTai ourselves in store()
midPointTai_start = _make_midPointTai_start(visit_time, self.config.read_sources_months)
_LOG.debug("midPointTai_start = %.6f", midPointTai_start)
# alternatively we can fill midpointMjdTai ourselves in store()
midpointMjdTai_start = _make_midpointMjdTai_start(visit_time, self.config.read_sources_months)
_LOG.debug("midpointMjdTai_start = %.6f", midpointMjdTai_start)

table = self._schema.get_table(ApdbTables.DiaSource)
columns = self._schema.get_apdb_columns(ApdbTables.DiaSource)
query = sql.select(*columns)

# build selection
time_filter = table.columns["midPointTai"] > midPointTai_start
time_filter = table.columns["midpointMjdTai"] > midpointMjdTai_start
where = sql.expression.and_(self._filterRegion(table, region), time_filter)
query = query.where(where)

Expand All @@ -634,18 +636,18 @@ def _getDiaSourcesByIDs(self, object_ids: List[int], visit_time: dafBase.DateTim
Catalog contaning DiaSource records.
"""
# TODO: DateTime.MJD must be consistent with code in ap_association,
# alternatively we can fill midPointTai ourselves in store()
midPointTai_start = _make_midPointTai_start(visit_time, self.config.read_sources_months)
_LOG.debug("midPointTai_start = %.6f", midPointTai_start)
# alternatively we can fill midpointMjdTai ourselves in store()
midpointMjdTai_start = _make_midpointMjdTai_start(visit_time, self.config.read_sources_months)
_LOG.debug("midpointMjdTai_start = %.6f", midpointMjdTai_start)

with Timer("DiaSource select", self.config.timer):
sources = self._getSourcesByIDs(ApdbTables.DiaSource, object_ids, midPointTai_start)
sources = self._getSourcesByIDs(ApdbTables.DiaSource, object_ids, midpointMjdTai_start)

_LOG.debug("found %s DiaSources", len(sources))
return sources

def _getSourcesByIDs(
self, table_enum: ApdbTables, object_ids: List[int], midPointTai_start: float
self, table_enum: ApdbTables, object_ids: List[int], midpointMjdTai_start: float
) -> pandas.DataFrame:
"""Returns catalog of DiaSource or DiaForcedSource instances given set
of DiaObject IDs.
Expand All @@ -656,8 +658,8 @@ def _getSourcesByIDs(
Database table.
object_ids :
Collection of DiaObject IDs
midPointTai_start : `float`
Earliest midPointTai to retrieve.
midpointMjdTai_start : `float`
Earliest midpointMjdTai to retrieve.
Returns
-------
Expand Down Expand Up @@ -688,7 +690,7 @@ def _getSourcesByIDs(
query = query.where(
sql.expression.and_(
table.columns["diaObjectId"].in_(int_ids),
table.columns["midPointTai"] > midPointTai_start,
table.columns["midpointMjdTai"] > midpointMjdTai_start,
)
)

Expand Down
4 changes: 2 additions & 2 deletions python/lsst/dax/apdb/tests/_apdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -410,7 +410,7 @@ def test_reassignObjects(self) -> None:
)
self.assert_catalog(res, len(sources) - 3, ApdbTables.DiaSource)

def test_midPointTai_src(self) -> None:
def test_midpointMjdTai_src(self) -> None:
"""Test for time filtering of DiaSources."""
config = self.make_config()
apdb = make_apdb(config)
Expand Down Expand Up @@ -448,7 +448,7 @@ def test_midPointTai_src(self) -> None:
res = apdb.getDiaSources(region, oids, visit_time2)
self.assert_catalog(res, 0, ApdbTables.DiaSource)

def test_midPointTai_fsrc(self) -> None:
def test_midpointMjdTai_fsrc(self) -> None:
"""Test for time filtering of DiaForcedSources."""
config = self.make_config()
apdb = make_apdb(config)
Expand Down
16 changes: 8 additions & 8 deletions python/lsst/dax/apdb/tests/data_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,21 +87,21 @@ def makeObjectCatalog(
Notes
-----
Returned catalog only contains three columns - ``diaObjectId`, ``ra``, and
``decl`` (in degrees).
``dec`` (in degrees).
"""
points = list(_genPointsInRegion(region, count))
# diaObjectId=0 may be used in some code for DiaSource foreign key to mean
# the same as ``None``.
ids = numpy.arange(start_id, len(points) + start_id, dtype=numpy.int64)
ras = numpy.array([sp.getRa().asDegrees() for sp in points], dtype=numpy.float64)
decls = numpy.array([sp.getDec().asDegrees() for sp in points], dtype=numpy.float64)
decs = numpy.array([sp.getDec().asDegrees() for sp in points], dtype=numpy.float64)
nDiaSources = numpy.ones(len(points), dtype=numpy.int32)
dt = visit_time.toPython()
data = dict(
kwargs,
diaObjectId=ids,
ra=ras,
decl=decls,
dec=decs,
nDiaSources=nDiaSources,
lastNonForcedSource=dt,
)
Expand Down Expand Up @@ -136,16 +136,16 @@ def makeSourceCatalog(
Returned catalog only contains small number of columns needed for tests.
"""
nrows = len(objects)
midPointTai = visit_time.get(system=DateTime.MJD)
midpointMjdTai = visit_time.get(system=DateTime.MJD)
df = pandas.DataFrame(
{
"diaSourceId": numpy.arange(start_id, start_id + nrows, dtype=numpy.int64),
"diaObjectId": objects["diaObjectId"],
"ccdVisitId": numpy.full(nrows, ccdVisitId, dtype=numpy.int64),
"parentDiaSourceId": 0,
"ra": objects["ra"],
"decl": objects["decl"],
"midPointTai": numpy.full(nrows, midPointTai, dtype=numpy.float64),
"dec": objects["dec"],
"midpointMjdTai": numpy.full(nrows, midpointMjdTai, dtype=numpy.float64),
"flags": numpy.full(nrows, 0, dtype=numpy.int64),
}
)
Expand Down Expand Up @@ -177,12 +177,12 @@ def makeForcedSourceCatalog(
Returned catalog only contains small number of columns needed for tests.
"""
nrows = len(objects)
midPointTai = visit_time.get(system=DateTime.MJD)
midpointMjdTai = visit_time.get(system=DateTime.MJD)
df = pandas.DataFrame(
{
"diaObjectId": objects["diaObjectId"],
"ccdVisitId": numpy.full(nrows, ccdVisitId, dtype=numpy.int64),
"midPointTai": numpy.full(nrows, midPointTai, dtype=numpy.float64),
"midpointMjdTai": numpy.full(nrows, midpointMjdTai, dtype=numpy.float64),
"flags": numpy.full(nrows, 0, dtype=numpy.int64),
}
)
Expand Down
22 changes: 11 additions & 11 deletions tests/config/schema.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,11 @@ tables:
mysql:datatype: DOUBLE
fits:tunit: deg
ivoa:ucd: pos.eq.ra
- name: decl
"@id": "#DiaObject.decl"
- name: dec
"@id": "#DiaObject.dec"
datatype: double
nullable: false
description: Decl-coordinate of the position of the object at time radecTai.
description: Dec-coordinate of the position of the object at time radecTai.
mysql:datatype: DOUBLE
fits:tunit: deg
ivoa:ucd: pos.eq.dec
Expand Down Expand Up @@ -150,8 +150,8 @@ tables:
mysql:datatype: DOUBLE
fits:tunit: deg
ivoa:ucd: pos.eq.ra
- name: decl
"@id": "#DiaSource.decl"
- name: dec
"@id": "#DiaSource.dec"
datatype: double
nullable: false
description: " Decl-coordinate of the center of this diaSource."
Expand All @@ -165,8 +165,8 @@ tables:
description: Time when this diaSource was reassociated from diaObject to ssObject
(if such reassociation happens, otherwise NULL).
mysql:datatype: DATETIME
- name: midPointTai
"@id": "#DiaSource.midPointTai"
- name: midpointMjdTai
"@id": "#DiaSource.midpointMjdTai"
datatype: double
nullable: false
description: Effective mid-exposure time for this diaSource.
Expand Down Expand Up @@ -215,8 +215,8 @@ tables:
span multiple ccds.
mysql:datatype: BIGINT
ivoa:ucd: meta.id;obs.image
- name: midPointTai
"@id": "#DiaForcedSource.midPointTai"
- name: midpointMjdTai
"@id": "#DiaForcedSource.midpointMjdTai"
datatype: double
nullable: false
description: Effective mid-exposure time for this diaForcedSource
Expand Down Expand Up @@ -287,8 +287,8 @@ tables:
description: RA-coordinate of the position of the object at time radecTai.
ivoa:ucd: pos.eq.ra
fits:tunit: deg
- name: decl
"@id": "#DiaObjectLast.decl"
- name: dec
"@id": "#DiaObjectLast.dec"
datatype: double
nullable: false
description: Decl-coordinate of the position of the object at time radecTai.
Expand Down
2 changes: 1 addition & 1 deletion tests/test_ap_verify_queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def createTestObjects(
data = {
id_column_name: numpy.arange(n_objects, dtype=numpy.int64),
"ra": numpy.full(n_objects, 1 * geom.degrees, dtype=numpy.float64),
"decl": numpy.full(n_objects, 1 * geom.degrees, dtype=numpy.float64),
"dec": numpy.full(n_objects, 1 * geom.degrees, dtype=numpy.float64),
}
for field, type in extra_fields.items():
data[field] = numpy.ones(n_objects, dtype=type)
Expand Down

0 comments on commit 31e1fb8

Please sign in to comment.