Skip to content

Commit

Permalink
Merge pull request #159 from apel/release-1.6.2
Browse files Browse the repository at this point in the history
Release 1.6.2 to master
  • Loading branch information
tofu-rocketry committed Apr 23, 2018
2 parents ae6a947 + 0b9a8ad commit 9203c74
Show file tree
Hide file tree
Showing 16 changed files with 389 additions and 260 deletions.
9 changes: 8 additions & 1 deletion CHANGELOG
Original file line number Diff line number Diff line change
@@ -1,7 +1,14 @@
Changelog for apel
==================
* Mon Apr 16 2018 Adrian Coveney <adrian.coveney@stfc.ac.uk> - 1.6.2-1
- [parsers] Added remaining job statuses for SLURM that indicate the job has
stopped and that resources have been used.
- [server] Fix CpuCount being NULL in cloud accounting records and leading to
warnings when summarising.
- [docs] Remove references to specific LSF versions as all now allowed.

* Thu Dec 14 2017 Adrian Coveney <adrian.coveney@stfc.ac.uk> - 1.6.1-1
- [Parsers] Removed version restriction from LSF parser so that it can
- [parsers] Removed version restriction from LSF parser so that it can
additionally work with version 10 onwards.
- Added more columns to cloud summaries primary key to prevent mis-grouping.
- Added Python setup script to enable installation on non-RHEL-based systems.
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ Python and uses MySQL. It has the following components:
### apel-parsers

These extract data from the following batch systems:
* LSF 5.x to 9.x
* LSF
* PBS
* SGE/OGE
* SLURM
Expand Down
25 changes: 16 additions & 9 deletions apel.spec
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
%endif

Name: apel
Version: 1.6.1
Version: 1.6.2
%define releasenumber 1
Release: %{releasenumber}%{?dist}
Summary: APEL packages
Expand Down Expand Up @@ -34,7 +34,7 @@ apel-lib provides required libraries for the rest of APEL system.
%package parsers
Summary: Parsers for APEL system
Group: Development/Languages
Requires: apel-lib >= 1.6.1
Requires: apel-lib >= 1.6.2
Requires(pre): shadow-utils

%description parsers
Expand All @@ -44,7 +44,7 @@ supported by the APEL system: Torque, SGE and LSF.
%package client
Summary: APEL client package
Group: Development/Languages
Requires: apel-lib >= 1.6.1, apel-ssm
Requires: apel-lib >= 1.6.2, apel-ssm
Requires(pre): shadow-utils

%description client
Expand All @@ -55,7 +55,7 @@ SSM.
%package server
Summary: APEL server package
Group: Development/Languages
Requires: apel-lib >= 1.6.1, apel-ssm
Requires: apel-lib >= 1.6.2, apel-ssm
Requires(pre): shadow-utils

%description server
Expand Down Expand Up @@ -109,8 +109,8 @@ cp schemas/server-extra.sql %{buildroot}%_datadir/apel/
cp schemas/cloud.sql %{buildroot}%_datadir/apel/
cp schemas/storage.sql %{buildroot}%_datadir/apel/

cp scripts/update-1.5.1-1.6.0.sql %{buildroot}%_datadir/apel/
cp scripts/update-1.6.0-1.6.1.sql %{buildroot}%_datadir/apel/
# All update scripts matched by wildcard
cp scripts/update-*.sql %{buildroot}%_datadir/apel/

# accounting scripts
cp scripts/slurm_acc.sh %{buildroot}%_datadir/apel/
Expand Down Expand Up @@ -174,8 +174,8 @@ exit 0
%_datadir/apel/server-extra.sql
%_datadir/apel/cloud.sql
%_datadir/apel/storage.sql
%_datadir/apel/update-1.5.1-1.6.0.sql
%_datadir/apel/update-1.6.0-1.6.1.sql
# Include all update scripts by wildcard matching
%_datadir/apel/update-*.sql

%attr(755,root,root) %_datadir/apel/msg_status.py
%exclude %_datadir/apel/msg_status.pyc
Expand All @@ -199,8 +199,15 @@ exit 0
# ==============================================================================

%changelog
* Mon Apr 16 2018 Adrian Coveney <adrian.coveney@stfc.ac.uk> - 1.6.2-1
- [parsers] Added remaining job statuses for SLURM that indicate the job has
stopped and that resources have been used.
- [server] Fix CpuCount being NULL in cloud accounting records and leading to
warnings when summarising.
- [docs] Remove references to specific LSF versions as all now allowed.

* Thu Dec 14 2017 Adrian Coveney <adrian.coveney@stfc.ac.uk> - 1.6.1-1
- [Parsers] Removed version restriction from LSF parser so that it can
- [parsers] Removed version restriction from LSF parser so that it can
additionally work with version 10 onwards.
- Added more columns to cloud summaries primary key to prevent mis-grouping.
- Added Python setup script to enable installation on non-RHEL-based systems.
Expand Down
2 changes: 1 addition & 1 deletion apel/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@
@author Konrad Jopek, Will Rogers
'''
__version__ = (1, 6, 1)
__version__ = (1, 6, 2)
255 changes: 132 additions & 123 deletions apel/db/records/cloud.py
Original file line number Diff line number Diff line change
@@ -1,123 +1,132 @@
'''
Copyright (C) 2011 STFC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@author Will Rogers
'''

from apel.db.records import Record, InvalidRecordException
from apel.common import parse_fqan
from datetime import datetime, timedelta


class CloudRecord(Record):
'''
Class to represent one cloud record.
It knows about the structure of the MySQL table and the message format.
It stores its information in a dictionary self._record_content. The keys
are in the same format as in the messages, and are case-sensitive.
'''
def __init__(self):
'''Provide the necessary lists containing message information.'''

Record.__init__(self)

# Fields which are required by the message format.
self._mandatory_fields = ["VMUUID", "SiteName"]

# This list allows us to specify the order of lines when we construct records.
self._msg_fields = ["VMUUID", "SiteName", "CloudComputeService", "MachineName",
"LocalUserId", "LocalGroupId", "GlobalUserName", "FQAN",
"Status", "StartTime", "EndTime", "SuspendDuration",
"WallDuration", "CpuDuration", "CpuCount",
"NetworkType", "NetworkInbound", "NetworkOutbound", "PublicIPCount",
"Memory", "Disk", "BenchmarkType", "Benchmark",
"StorageRecordId", "ImageId", "CloudType"]

# This list specifies the information that goes in the database.
self._db_fields = self._msg_fields[:8] + ['VO', 'VOGroup', 'VORole'] + self._msg_fields[8:]
self._all_fields = self._db_fields

self._ignored_fields = ["UpdateTime"]

# Fields which will have an integer stored in them
self._int_fields = [ "SuspendDuration", "WallDuration", "CpuDuration", "CpuCount",
"NetworkInbound", "NetworkOutbound", "PublicIPCount", "Memory", "Disk"]

self._float_fields = ['Benchmark']
self._datetime_fields = ["StartTime", "EndTime"]

def _check_fields(self):
'''
Add extra checks to those made in every record.
'''
# First, call the parent's version.
Record._check_fields(self)

# Extract the relevant information from the user fqan.
# Keep the fqan itself as other methods in the class use it.
role, group, vo = parse_fqan(self._record_content['FQAN'])
# We can't / don't put NULL in the database, so we use 'None'
if role is None:
role = 'None'
if group is None:
group = 'None'
if vo is None:
vo = 'None'

if self._record_content['Benchmark'] is None:
# If Benchmark is not present in the original record the
# parent Record class level type checking will set it to
# None. We can't pass None as a Benchmark as the field is
# NOT NULL in the database, so we set it to something
# meaningful. In this case the float 0.0.
self._record_content['Benchmark'] = 0.0


self._record_content['VORole'] = role
self._record_content['VOGroup'] = group
self._record_content['VO'] = vo

# Check the values of StartTime and EndTime
# self._check_start_end_times()


def _check_start_end_times(self):
'''Checks the values of StartTime and EndTime in _record_content.
StartTime should be less than or equal to EndTime.
Neither StartTime or EndTime should be zero.
EndTime should not be in the future.
This is merely factored out for simplicity.
'''
try:
start = int(self._record_content['StartTime'])
end = int(self._record_content['EndTime'])
if end < start:
raise InvalidRecordException("EndTime is before StartTime.")

if start == 0 or end == 0:
raise InvalidRecordException("Epoch times StartTime and EndTime mustn't be 0.")

now = datetime.now()
# add two days to prevent timezone problems
tomorrow = now + timedelta(2)
if datetime.fromtimestamp(end) > tomorrow:
raise InvalidRecordException("Epoch time " + str(end) + " is in the future.")

except ValueError:
raise InvalidRecordException("Cannot parse an integer from StartTime or EndTime.")


'''
Copyright (C) 2011 STFC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@author Will Rogers
'''

from apel.db.records import Record, InvalidRecordException
from apel.common import parse_fqan
from datetime import datetime, timedelta


class CloudRecord(Record):
'''
Class to represent one cloud record.
It knows about the structure of the MySQL table and the message format.
It stores its information in a dictionary self._record_content. The keys
are in the same format as in the messages, and are case-sensitive.
'''
def __init__(self):
'''Provide the necessary lists containing message information.'''

Record.__init__(self)

# Fields which are required by the message format.
self._mandatory_fields = ["VMUUID", "SiteName"]

# This list allows us to specify the order of lines when we construct records.
self._msg_fields = ["VMUUID", "SiteName", "CloudComputeService", "MachineName",
"LocalUserId", "LocalGroupId", "GlobalUserName", "FQAN",
"Status", "StartTime", "EndTime", "SuspendDuration",
"WallDuration", "CpuDuration", "CpuCount",
"NetworkType", "NetworkInbound", "NetworkOutbound", "PublicIPCount",
"Memory", "Disk", "BenchmarkType", "Benchmark",
"StorageRecordId", "ImageId", "CloudType"]

# This list specifies the information that goes in the database.
self._db_fields = self._msg_fields[:8] + ['VO', 'VOGroup', 'VORole'] + self._msg_fields[8:]
self._all_fields = self._db_fields

self._ignored_fields = ["UpdateTime"]

# Fields which will have an integer stored in them
self._int_fields = [ "SuspendDuration", "WallDuration", "CpuDuration", "CpuCount",
"NetworkInbound", "NetworkOutbound", "PublicIPCount", "Memory", "Disk"]

self._float_fields = ['Benchmark']
self._datetime_fields = ["StartTime", "EndTime"]

def _check_fields(self):
'''
Add extra checks to those made in every record.
'''
# First, call the parent's version.
Record._check_fields(self)

# Extract the relevant information from the user fqan.
# Keep the fqan itself as other methods in the class use it.
role, group, vo = parse_fqan(self._record_content['FQAN'])
# We can't / don't put NULL in the database, so we use 'None'
if role is None:
role = 'None'
if group is None:
group = 'None'
if vo is None:
vo = 'None'

if self._record_content['Benchmark'] is None:
# If Benchmark is not present in the original record the
# parent Record class level type checking will set it to
# None. We can't pass None as a Benchmark as the field is
# NOT NULL in the database, so we set it to something
# meaningful. In this case the float 0.0.
self._record_content['Benchmark'] = 0.0


self._record_content['VORole'] = role
self._record_content['VOGroup'] = group
self._record_content['VO'] = vo

# If the message was missing a CpuCount, assume it used
# zero Cpus, to prevent a NULL being written into the column
# in the CloudRecords tables.
# Doing so would be a problem despite the CloudRecords
# table allowing it because the CloudSummaries table
# doesn't allow it, creating a problem at summariser time.
if self._record_content['CpuCount'] is None:
self._record_content['CpuCount'] = 0

# Check the values of StartTime and EndTime
# self._check_start_end_times()


def _check_start_end_times(self):
'''Checks the values of StartTime and EndTime in _record_content.
StartTime should be less than or equal to EndTime.
Neither StartTime or EndTime should be zero.
EndTime should not be in the future.
This is merely factored out for simplicity.
'''
try:
start = int(self._record_content['StartTime'])
end = int(self._record_content['EndTime'])
if end < start:
raise InvalidRecordException("EndTime is before StartTime.")

if start == 0 or end == 0:
raise InvalidRecordException("Epoch times StartTime and EndTime mustn't be 0.")

now = datetime.now()
# add two days to prevent timezone problems
tomorrow = now + timedelta(2)
if datetime.fromtimestamp(end) > tomorrow:
raise InvalidRecordException("Epoch time " + str(end) + " is in the future.")

except ValueError:
raise InvalidRecordException("Cannot parse an integer from StartTime or EndTime.")


Loading

0 comments on commit 9203c74

Please sign in to comment.