Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use primary catalog schema for margin cache. #232

Merged
merged 1 commit into from
Feb 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/hipscat_import/margin_cache/margin_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ def _reduce_margin_shards(client, args, partition_pixels):
output_path=args.catalog_path,
partition_order=pix.order,
partition_pixel=pix.pixel,
original_catalog_metadata=paths.get_common_metadata_pointer(args.input_catalog_path),
)
)

Expand Down
19 changes: 16 additions & 3 deletions src/hipscat_import/margin_cache/margin_cache_map_reduce.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import healpy as hp
import numpy as np
import pyarrow as pa
import pyarrow.dataset as ds
from hipscat import pixel_math
from hipscat.catalog.partition_info import PartitionInfo
from hipscat.io import file_io, paths
from hipscat.pixel_math.healpix_pixel import HealpixPixel
from hipscat.pixel_math.hipscat_id import HIPSCAT_ID_COLUMN

from hipscat_import.pipeline_resume_plan import get_pixel_cache_directory

Expand All @@ -29,7 +31,7 @@ def map_pixel_shards(
nest=True,
)

constrained_data = data.merge(margin_pairs, on="margin_pixel")
constrained_data = data.reset_index().merge(margin_pairs, on="margin_pixel")

if len(constrained_data):
constrained_data.groupby(["partition_order", "partition_pixel"]).apply(
Expand Down Expand Up @@ -90,13 +92,16 @@ def _to_pixel_shard(data, margin_threshold, output_path, ra_column, dec_column):
PartitionInfo.METADATA_PIXEL_COLUMN_NAME: np.uint64,
}
)
final_df = final_df.set_index(HIPSCAT_ID_COLUMN).sort_index()
delucchi-cmu marked this conversation as resolved.
Show resolved Hide resolved

final_df.to_parquet(shard_path)

del data, margin_data, final_df


def reduce_margin_shards(intermediate_directory, output_path, partition_order, partition_pixel):
def reduce_margin_shards(
intermediate_directory, output_path, partition_order, partition_pixel, original_catalog_metadata
):
"""Reduce all partition pixel directories into a single file"""
shard_dir = get_pixel_cache_directory(
intermediate_directory, HealpixPixel(partition_order, partition_pixel)
Expand All @@ -108,7 +113,15 @@ def reduce_margin_shards(intermediate_directory, output_path, partition_order, p
file_io.make_directory(margin_cache_dir, exist_ok=True)

if len(full_df):
schema = file_io.read_parquet_metadata(original_catalog_metadata).schema.to_arrow_schema()

schema = (
schema.append(pa.field("margin_Norder", pa.uint8()))
.append(pa.field("margin_Dir", pa.uint64()))
.append(pa.field("margin_Npix", pa.uint64()))
)

margin_cache_file_path = paths.pixel_catalog_file(output_path, partition_order, partition_pixel)

full_df.to_parquet(margin_cache_file_path)
full_df.to_parquet(margin_cache_file_path, schema=schema)
file_io.remove_directory(shard_dir)
9 changes: 7 additions & 2 deletions tests/hipscat_import/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import numpy.testing as npt
import pandas as pd
import pytest
from hipscat import pixel_math

# pylint: disable=missing-function-docstring, redefined-outer-name

Expand Down Expand Up @@ -179,10 +180,12 @@ def basic_data_shard_df():
porder = np.full(360, 1)
norder = np.full(360, 1)
npix = np.full(360, 0)
hipscat_indexes = pixel_math.compute_hipscat_id(ras, dec)

test_df = pd.DataFrame(
data=zip(ras, dec, ppix, porder, norder, npix),
data=zip(hipscat_indexes, ras, dec, ppix, porder, norder, npix),
columns=[
"_hipscat_index",
"weird_ra",
"weird_dec",
"partition_pixel",
Expand Down Expand Up @@ -211,10 +214,12 @@ def polar_data_shard_df():
porder = np.full(360, 2)
norder = np.full(360, 2)
npix = np.full(360, 0)
hipscat_indexes = pixel_math.compute_hipscat_id(ras, dec)

test_df = pd.DataFrame(
data=zip(ras, dec, ppix, porder, norder, npix),
data=zip(hipscat_indexes, ras, dec, ppix, porder, norder, npix),
columns=[
"_hipscat_index",
"weird_ra",
"weird_dec",
"partition_pixel",
Expand Down
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file modified tests/hipscat_import/data/small_sky_source_catalog/_metadata
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
"epoch": "J2000",
"ra_column": "source_ra",
"dec_column": "source_dec"
}
}
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
Norder,Dir,Npix,num_rows
0,0,4,50
1,0,47,2395
2,0,176,385
2,0,177,1510
2,0,178,1634
2,0,179,1773
2,0,180,655
2,0,181,903
2,0,182,1246
2,0,183,1143
2,0,184,1390
2,0,185,2942
2,0,186,452
2,0,187,683
Norder,Npix,Dir
0,4,0
1,47,0
2,176,0
2,177,0
2,178,0
2,179,0
2,180,0
2,181,0
2,182,0
2,183,0
2,184,0
2,185,0
2,186,0
2,187,0
Original file line number Diff line number Diff line change
@@ -1,39 +1,42 @@
{
"catalog_name": "small_sky_source_catalog",
"catalog_type": "source",
"version": "0.0.10.dev7+g0a79f90.d20230418",
"generation_date": "2023.04.20",
"epoch": "J2000",
"ra_kw": "source_ra",
"dec_kw": "source_dec",
"total_rows": 17161,
"epoch": "J2000",
"ra_column": "source_ra",
"dec_column": "source_dec",
"version": "0.2.6.dev9+ga051d36",
"generation_date": "2024.02.21",
"tool_args": {
"tool_name": "hipscat_import",
"version": "0.0.4.dev28+g2e31821.d20230420",
"version": "0.2.4.dev9+g5808d3f",
"runtime_args": {
"catalog_name": "small_sky_source_catalog",
"output_path": "/home/data",
"output_path": "/home/delucchi/git/hipscat-import/tests/hipscat_import/data",
"output_artifact_name": "small_sky_source_catalog",
"tmp_dir": "",
"overwrite": true,
"dask_tmp": "/tmp/pytest-of-delucchi/pytest-1261/test_dask_runner_source_table0",
"dask_tmp": "/tmp/user/11115/pytest-of-delucchi/pytest-184/test_import_source_table0",
"dask_n_workers": 1,
"dask_threads_per_worker": 1,
"catalog_path": "/home/data/small_sky_source_catalog",
"tmp_path": "/tmp/pytest-of-delucchi/pytest-1261/test_dask_runner_source_table0/small_sky_source_catalog/intermediate",
"catalog_path": "/home/delucchi/git/hipscat-import/tests/hipscat_import/data/small_sky_source_catalog",
"tmp_path": "/tmp/user/11115/pytest-of-delucchi/pytest-184/test_import_source_table0/small_sky_source_catalog/intermediate",
"epoch": "J2000",
"catalog_type": "source",
"input_path": "/home/data/small_sky_source",
"input_path": "/home/delucchi/git/hipscat-import/tests/hipscat_import/data/small_sky_source",
"input_paths": [
"/home/data/small_sky_source/small_sky_source.csv"
"file:///home/delucchi/git/hipscat-import/tests/hipscat_import/data/small_sky_source/small_sky_source.csv"
],
"input_format": "csv",
"input_file_list": [],
"ra_column": "source_ra",
"dec_column": "source_dec",
"use_hipscat_index": false,
"sort_columns": "source_id",
"constant_healpix_order": -1,
"highest_healpix_order": 2,
"pixel_threshold": 3000,
"mapping_healpix_order": 2,
"debug_stats_only": false,
"file_reader_info": {
"input_reader_type": "CsvReader",
Expand All @@ -46,4 +49,4 @@
}
}
}
}
}
22 changes: 22 additions & 0 deletions tests/hipscat_import/margin_cache/test_margin_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,28 @@ def test_margin_cache_gen(small_sky_source_catalog, tmp_path, dask_client):
assert data.dtypes[PartitionInfo.METADATA_DIR_COLUMN_NAME] == np.uint64
assert data.dtypes[PartitionInfo.METADATA_PIXEL_COLUMN_NAME] == np.uint64

npt.assert_array_equal(
data.columns,
[
"source_id",
"source_ra",
"source_dec",
"mjd",
"mag",
"band",
"object_id",
"object_ra",
"object_dec",
"Norder",
"Dir",
"Npix",
"margin_Norder",
"margin_Dir",
"margin_Npix",
],
)
assert data.index.name == "_hipscat_index"

catalog = HealpixDataset.read_from_hipscat(args.catalog_path)
assert catalog.on_disk
assert catalog.catalog_path == args.catalog_path
Expand Down
43 changes: 39 additions & 4 deletions tests/hipscat_import/margin_cache/test_margin_cache_map_reduce.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import os

import healpy as hp
import numpy as np
import pandas as pd
import pytest
from hipscat import pixel_math
from hipscat.io import paths
from hipscat.pixel_math.healpix_pixel import HealpixPixel

Expand Down Expand Up @@ -78,12 +81,44 @@ def test_reduce_margin_shards(tmp_path, basic_data_shard_df):
first_shard_path = paths.pixel_catalog_file(partition_dir, 1, 0)
second_shard_path = paths.pixel_catalog_file(partition_dir, 1, 1)

shard_df = basic_data_shard_df.drop(columns=["partition_order", "partition_pixel", "margin_pixel"])
ras = np.arange(0.0, 360.0)
dec = np.full(360, 0.0)
norder = np.full(360, 1)
ndir = np.full(360, 0)
npix = np.full(360, 0)
hipscat_indexes = pixel_math.compute_hipscat_id(ras, dec)
margin_order = np.full(360, 0)
margin_dir = np.full(360, 0)
margin_pixels = hp.ang2pix(2**3, ras, dec, lonlat=True, nest=True)

test_df = pd.DataFrame(
data=zip(hipscat_indexes, ras, dec, norder, ndir, npix, margin_order, margin_dir, margin_pixels),
columns=[
"_hipscat_index",
"weird_ra",
"weird_dec",
"Norder",
"Dir",
"Npix",
"margin_Norder",
"margin_Dir",
"margin_Npix",
],
)

# Create a schema parquet file.
schema_path = os.path.join(tmp_path, "metadata.parquet")
schema_df = test_df.drop(columns=["margin_Norder", "margin_Dir", "margin_Npix"])
schema_df.to_parquet(schema_path)

shard_df.to_parquet(first_shard_path)
shard_df.to_parquet(second_shard_path)
basic_data_shard_df = test_df

margin_cache_map_reduce.reduce_margin_shards(intermediate_dir, tmp_path, 1, 21)
basic_data_shard_df.to_parquet(first_shard_path)
basic_data_shard_df.to_parquet(second_shard_path)

margin_cache_map_reduce.reduce_margin_shards(
intermediate_dir, tmp_path, 1, 21, original_catalog_metadata=schema_path
)

result_path = paths.pixel_catalog_file(tmp_path, 1, 21)

Expand Down
Loading