Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

change dir name to docs and edit README for that #588

Merged
merged 5 commits into from
Apr 27, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: git://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.2.0
hooks:
- id: trailing-whitespace
- id: check-added-large-files
Expand All @@ -13,8 +13,8 @@ repos:
- id: end-of-file-fixer
- id: mixed-line-ending
args: ['--fix=no']
- repo: https://gitlab.com/pycqa/flake8
rev: '3.9.2'
- repo: https://github.com/pycqa/flake8
rev: '4.0.1'
hooks:
- id: flake8
additional_dependencies:
Expand All @@ -25,6 +25,6 @@ repos:
- flake8-rst-docstrings

- repo: https://github.com/psf/black
rev: 21.11b1
rev: 22.3.0
hooks:
- id: black
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ This is the main repository for HERA's monitor and control subsystems.
# Documentation
A detailed description of the monitor and control system and all the
related database tables can be found in our
[description document](Docs/mc_definition.pdf).
[description document](docs/mc_definition.pdf).

# Installation
Installation instructions may be found in [INSTALL.md](./INSTALL.md).
Expand Down
2 changes: 1 addition & 1 deletion alembic/versions/a68c0e31204e_onsite_baseline_july_2017.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def upgrade():
sa.Column("created_date", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(
["station_type_name"],
[u"station_type.station_type_name"],
["station_type.station_type_name"],
),
sa.PrimaryKeyConstraint("station_name"),
)
Expand Down
80 changes: 40 additions & 40 deletions alembic/versions/b1063869f198_bring_up_to_master.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,13 +138,13 @@ def upgrade():
)
op.drop_constraint("connections_pkey", "connections", type_="primary")
op.add_column(
u"connections", sa.Column("start_gpstime", sa.BigInteger(), nullable=False)
"connections", sa.Column("start_gpstime", sa.BigInteger(), nullable=False)
)
op.add_column(
u"connections", sa.Column("stop_gpstime", sa.BigInteger(), nullable=True)
"connections", sa.Column("stop_gpstime", sa.BigInteger(), nullable=True)
)
op.drop_column(u"connections", "stop_date")
op.drop_column(u"connections", "start_date")
op.drop_column("connections", "stop_date")
op.drop_column("connections", "start_date")
op.create_primary_key(
"connections_pkey",
"connections",
Expand All @@ -160,22 +160,22 @@ def upgrade():
)

op.add_column(
u"geo_location", sa.Column("created_gpstime", sa.BigInteger(), nullable=False)
"geo_location", sa.Column("created_gpstime", sa.BigInteger(), nullable=False)
)
op.drop_column(u"geo_location", "created_date")
op.drop_column("geo_location", "created_date")

op.add_column(u"hera_obs", sa.Column("jd_start", sa.Float(), nullable=False))
op.add_column(u"hera_obs", sa.Column("starttime", sa.Float(), nullable=False))
op.add_column(u"hera_obs", sa.Column("stoptime", sa.Float(), nullable=False))
op.drop_column(u"hera_obs", "stop_time_jd")
op.drop_column(u"hera_obs", "start_time_jd")
op.add_column("hera_obs", sa.Column("jd_start", sa.Float(), nullable=False))
op.add_column("hera_obs", sa.Column("starttime", sa.Float(), nullable=False))
op.add_column("hera_obs", sa.Column("stoptime", sa.Float(), nullable=False))
op.drop_column("hera_obs", "stop_time_jd")
op.drop_column("hera_obs", "start_time_jd")

op.drop_constraint("paper_temperatures_pkey", "paper_temperatures", type_="primary")
op.add_column(
u"paper_temperatures", sa.Column("time", sa.BigInteger(), nullable=False)
"paper_temperatures", sa.Column("time", sa.BigInteger(), nullable=False)
)
op.drop_column(u"paper_temperatures", "gps_time")
op.drop_column(u"paper_temperatures", "jd_time")
op.drop_column("paper_temperatures", "gps_time")
op.drop_column("paper_temperatures", "jd_time")
op.create_primary_key(
"paper_temperatures_pkey",
"paper_temperatures",
Expand All @@ -186,28 +186,28 @@ def upgrade():

op.drop_constraint("part_info_pkey", "part_info", type_="primary")
op.add_column(
u"part_info", sa.Column("posting_gpstime", sa.BigInteger(), nullable=False)
"part_info", sa.Column("posting_gpstime", sa.BigInteger(), nullable=False)
)
op.drop_column(u"part_info", "posting_date")
op.drop_column("part_info", "posting_date")
op.create_primary_key(
"part_info_pkey", "part_info", ["hpn", "hpn_rev", "posting_gpstime"]
)

op.add_column(
u"parts_paper", sa.Column("start_gpstime", sa.BigInteger(), nullable=False)
"parts_paper", sa.Column("start_gpstime", sa.BigInteger(), nullable=False)
)
op.add_column(
u"parts_paper", sa.Column("stop_gpstime", sa.BigInteger(), nullable=True)
"parts_paper", sa.Column("stop_gpstime", sa.BigInteger(), nullable=True)
)
op.drop_column(u"parts_paper", "stop_date")
op.drop_column(u"parts_paper", "start_date")
op.drop_column("parts_paper", "stop_date")
op.drop_column("parts_paper", "start_date")
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
u"parts_paper",
"parts_paper",
sa.Column(
"start_date",
postgresql.TIMESTAMP(timezone=True),
Expand All @@ -216,28 +216,28 @@ def downgrade():
),
)
op.add_column(
u"parts_paper",
"parts_paper",
sa.Column(
"stop_date",
postgresql.TIMESTAMP(timezone=True),
autoincrement=False,
nullable=True,
),
)
op.drop_column(u"parts_paper", "stop_gpstime")
op.drop_column(u"parts_paper", "start_gpstime")
op.drop_column("parts_paper", "stop_gpstime")
op.drop_column("parts_paper", "start_gpstime")
op.add_column(
u"part_info",
"part_info",
sa.Column(
"posting_date",
postgresql.TIMESTAMP(timezone=True),
autoincrement=False,
nullable=False,
),
)
op.drop_column(u"part_info", "posting_gpstime")
op.drop_column("part_info", "posting_gpstime")
op.add_column(
u"paper_temperatures",
"paper_temperatures",
sa.Column(
"jd_time",
postgresql.DOUBLE_PRECISION(precision=53),
Expand All @@ -246,17 +246,17 @@ def downgrade():
),
)
op.add_column(
u"paper_temperatures",
"paper_temperatures",
sa.Column(
"gps_time",
postgresql.DOUBLE_PRECISION(precision=53),
autoincrement=False,
nullable=False,
),
)
op.drop_column(u"paper_temperatures", "time")
op.drop_column("paper_temperatures", "time")
op.add_column(
u"hera_obs",
"hera_obs",
sa.Column(
"start_time_jd",
postgresql.DOUBLE_PRECISION(precision=53),
Expand All @@ -265,29 +265,29 @@ def downgrade():
),
)
op.add_column(
u"hera_obs",
"hera_obs",
sa.Column(
"stop_time_jd",
postgresql.DOUBLE_PRECISION(precision=53),
autoincrement=False,
nullable=False,
),
)
op.drop_column(u"hera_obs", "stoptime")
op.drop_column(u"hera_obs", "starttime")
op.drop_column(u"hera_obs", "jd_start")
op.drop_column("hera_obs", "stoptime")
op.drop_column("hera_obs", "starttime")
op.drop_column("hera_obs", "jd_start")
op.add_column(
u"geo_location",
"geo_location",
sa.Column(
"created_date",
postgresql.TIMESTAMP(timezone=True),
autoincrement=False,
nullable=False,
),
)
op.drop_column(u"geo_location", "created_gpstime")
op.drop_column("geo_location", "created_gpstime")
op.add_column(
u"connections",
"connections",
sa.Column(
"start_date",
postgresql.TIMESTAMP(timezone=True),
Expand All @@ -296,16 +296,16 @@ def downgrade():
),
)
op.add_column(
u"connections",
"connections",
sa.Column(
"stop_date",
postgresql.TIMESTAMP(timezone=True),
autoincrement=False,
nullable=True,
),
)
op.drop_column(u"connections", "stop_gpstime")
op.drop_column(u"connections", "start_gpstime")
op.drop_column("connections", "stop_gpstime")
op.drop_column("connections", "start_gpstime")
op.drop_table("rtp_process_record")
op.drop_table("rtp_process_event")
op.drop_table("lib_files")
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion hera_mc/cm_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -622,7 +622,7 @@ def get_astropytime(adate, atime=None, float_format=None):
atime
)
)
add_time += (float(d)) * 3600.0 / (60.0 ** i)
add_time += (float(d)) * 3600.0 / (60.0**i)
return return_date + TimeDelta(add_time, format="sec")


Expand Down
7 changes: 7 additions & 0 deletions hera_mc/tests/test_correlator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2809,3 +2809,10 @@ def test_redis_add_antenna_status_from_corrcm(mcsession):
result = test_session.get_antenna_status(most_recent=True)

assert len(result) >= 1


def test_pam_fem_id_to_string_list():
idno = ["fem0", "fem1"]
assert corr._pam_fem_id_to_string(idno) == "fem0:fem1"

return
8 changes: 4 additions & 4 deletions scripts/mc_server_status_daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def get_ip_address():
cpu_load_pct = os.getloadavg()[1] / num_cores * 100.0
uptime_days = (time.time() - psutil.boot_time()) / 86400.0

memory_size_gb = vmem.total / 1024 ** 3 # bytes => GiB
memory_size_gb = vmem.total / 1024**3 # bytes => GiB

# We only track disk usage on the root filesystem partition. We could
# potentially use `psutil.disk_partitions(all=False)` to try to track
Expand All @@ -136,7 +136,7 @@ def get_ip_address():
# specialized channels.

disk = psutil.disk_usage("/")
disk_size_gb = disk.total / 1024 ** 3 # bytes => GiB
disk_size_gb = disk.total / 1024**3 # bytes => GiB
disk_space_pct = (
disk.percent
) # note, this is misnamed a bit - it's the % used
Expand All @@ -145,12 +145,12 @@ def get_ip_address():
# `prev_index` so that the differences below give the total number
# of bytes transferred since the last report.

memory_used_pct = (mem_buf.mean() / 1024 ** 3) * 100.0 / memory_size_gb
memory_used_pct = (mem_buf.mean() / 1024**3) * 100.0 / memory_size_gb

tx_bytes = tx_buf[prev_index] - tx_buf[index]
rx_bytes = rx_buf[prev_index] - rx_buf[index]
network_bandwidth_mbs = (
(tx_bytes + rx_bytes) / 1024 ** 2 / (now - last_report)
(tx_bytes + rx_bytes) / 1024**2 / (now - last_report)
)

# Submit
Expand Down