Skip to content

Commit

Permalink
Fixed bug in dsl_parser. Improved code quality
Browse files Browse the repository at this point in the history
  • Loading branch information
gerardparis committed Nov 10, 2017
1 parent cc6cf23 commit aae1db1
Show file tree
Hide file tree
Showing 10 changed files with 100 additions and 102 deletions.
6 changes: 2 additions & 4 deletions api/api/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,10 @@
import threading
import errno
import hashlib
import calendar
import logging
import redis
import os
import sys
import time

logger = logging.getLogger(__name__)
host = None
Expand Down Expand Up @@ -168,10 +166,10 @@ def rsync_dir_with_nodes(directory):
data = {'directory': directory, 'dest_directory': dest_directory, 'node_ip': node['ip'],
'ssh_username': node['ssh_username'], 'ssh_password': node['ssh_password']}

threading.Thread(target=rsync, args=(node, data)).start()
threading.Thread(target=rsync, args=data).start()


def rsync(node, data):
def rsync(data):
rsync_command = 'sshpass -p {ssh_password} rsync --progress --delete -avrz -e ssh {directory} {ssh_username}@{node_ip}:{dest_directory}'.format(**data)
os.system(rsync_command)

Expand Down
2 changes: 1 addition & 1 deletion api/api/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@
RABBITMQ_HOST = 'localhost'
RABBITMQ_PORT = 5672
RABBITMQ_USERNAME = 'guest'
RABBITMQ_PASSWORD = 'guest'
RABBITMQ_PASSWORD = 'guest' # noqa
RABBITMQ_EXCHANGE = 'amq.topic'

# Logstash
Expand Down
8 changes: 4 additions & 4 deletions api/filters/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ def test_filter_deploy_to_project_ok(self, mock_put_object):
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.content, '1')
mock_put_object.assert_called_with(settings.SWIFT_URL + "/AUTH_0123456789abcdef",
'fake_token', "storlet", "test-1.0.jar", mock.ANY, mock.ANY, mock.ANY,
'fake_token', ".storlet", "test-1.0.jar", mock.ANY, mock.ANY, mock.ANY,
mock.ANY, mock.ANY, mock.ANY, mock.ANY, mock.ANY, mock.ANY, mock.ANY)
self.assertTrue(self.r.hexists("pipeline:0123456789abcdef", "1"))
dumped_data = self.r.hget("pipeline:0123456789abcdef", "1")
Expand All @@ -239,7 +239,7 @@ def test_filter_deploy_to_project_and_container_ok(self, mock_put_object):
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.content, '1')
mock_put_object.assert_called_with(settings.SWIFT_URL + "/AUTH_0123456789abcdef",
'fake_token', "storlet", "test-1.0.jar", mock.ANY, mock.ANY, mock.ANY,
'fake_token', ".storlet", "test-1.0.jar", mock.ANY, mock.ANY, mock.ANY,
mock.ANY, mock.ANY, mock.ANY, mock.ANY, mock.ANY, mock.ANY, mock.ANY)
self.assertTrue(self.r.hexists("pipeline:0123456789abcdef:container1", "1"))
dumped_data = self.r.hget("pipeline:0123456789abcdef:container1", "1")
Expand Down Expand Up @@ -270,7 +270,7 @@ def test_filter_deploy_to_project_and_container_ok(self, mock_put_object):
# response = filter_deploy(request, "1", "0123456789abcdef")
# self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# mock_put_object.assert_called_with(settings.SWIFT_URL + "/AUTH_0123456789abcdef",
# 'fake_token', "storlet", "FakeFilter", mock.ANY, mock.ANY, mock.ANY,
# 'fake_token', ".storlet", "FakeFilter", mock.ANY, mock.ANY, mock.ANY,
# mock.ANY, mock.ANY, mock.ANY, mock.ANY, mock.ANY, mock.ANY, mock.ANY)
# self.assertTrue(self.r.hexists("pipeline:0123456789abcdef", "1"))
# dumped_data = self.r.hget("pipeline:0123456789abcdef", "1")
Expand Down Expand Up @@ -353,7 +353,7 @@ def test_unset_filter_ok(self, mock_delete_object):
self.r.hmset('pipeline:0123456789abcdef', {'20': json.dumps(data20), '21': json.dumps(data21)})
unset_filter(self.r, '0123456789abcdef', {'filter_type': 'storlet', 'filter_name': 'test-1.0.jar'}, 'fake_token')
mock_delete_object.assert_called_with(settings.SWIFT_URL + "/AUTH_0123456789abcdef",
'fake_token', "storlet", "test-1.0.jar", mock.ANY, mock.ANY, mock.ANY,
'fake_token', ".storlet", "test-1.0.jar", mock.ANY, mock.ANY, mock.ANY,
mock.ANY, mock.ANY)
self.assertFalse(self.r.hexists("pipeline:0123456789abcdef", "21")) # 21 was deleted
self.assertTrue(self.r.hexists("pipeline:0123456789abcdef", "20")) # 20 was not deleted
Expand Down
12 changes: 2 additions & 10 deletions api/metrics/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
from rest_framework import status
from rest_framework.test import APIRequestFactory

from metrics.views import metric_module_list, metric_module_detail, MetricModuleData, load_metrics
from metrics.views import metric_module_list, metric_module_detail, MetricModuleData


# Tests use database=10 instead of 0.
@override_settings(REDIS_CON_POOL=redis.ConnectionPool(host='localhost', port=6379, db=10),
Expand Down Expand Up @@ -114,15 +115,6 @@ def test_create_metric_module_ok(self, mock_rsync_dir):
metric_data = json.loads(response.content)
self.assertEqual(metric_data['metric_name'], 'test.py')

#
# load_metrics()
#

@mock.patch('metrics.views.start_metric')
def test_load_metrics(self, mock_start_metric):
load_metrics()
mock_start_metric.assert_called_with(1, 'm1')

#
# Aux methods
#
Expand Down
19 changes: 0 additions & 19 deletions api/metrics/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,25 +23,6 @@
logger = logging.getLogger(__name__)


def load_metrics():
try:
r = get_redis_connection()
except RedisError:
return JSONResponse('Error connecting with DB', status=500)

workload_metrics = r.keys("workload_metric:*")

if workload_metrics:
logger.info("Starting workload metrics")

for wm in workload_metrics:
wm_data = r.hgetall(wm)
if wm_data['status'] == 'Running':
actor_id = wm_data['metric_name'].split('.')[0]
metric_id = int(wm_data['id'])
start_metric(metric_id, actor_id)


@csrf_exempt
def list_activated_metrics(request):
"""
Expand Down
12 changes: 11 additions & 1 deletion api/policies/dsl_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,17 @@ def parse(input_string):
object_type = Group(Literal("OBJECT_TYPE")("type") + Literal("=") + word(alphanums)("object_value"))("object_type")
object_size = Group(Literal("OBJECT_SIZE")("size") + operand_object("operand") + number("object_value"))("object_size")
object_tag = Group(Literal("OBJECT_TAG")("tag") + Literal("=") + word(alphanums)("object_value"))("object_tag")
object_list = Group(object_type ^ object_size ^ object_type + "," + object_size ^ object_size + "," + object_type)
object_list = Group(object_type ^ object_size ^ object_tag ^
object_type + "," + object_size ^ object_size + "," + object_type ^
object_type + "," + object_tag ^ object_tag + "," + object_type ^
object_size + "," + object_tag ^ object_tag + "," + object_size ^
object_type + "," + object_size + "," + object_tag ^
object_type + "," + object_tag + "," + object_size ^
object_size + "," + object_type + "," + object_tag ^
object_size + "," + object_tag + "," + object_type ^
object_tag + "," + object_type + "," + object_size ^
object_tag + "," + object_size + "," + object_type)

to = Suppress("TO")

# Functions post-parsed
Expand Down
42 changes: 32 additions & 10 deletions api/policies/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from pyparsing import ParseException
from rest_framework import status
from rest_framework.test import APIRequestFactory
from policies.dsl_parser import parse
from policies.dsl_parser import parse, parse_condition, parse_group_tenants
from filters.views import filter_list, filter_deploy, FilterData
from policies.views import object_type_list, object_type_detail, static_policy_detail, dynamic_policy_detail, policy_list
from projects.views import add_projects_group
Expand Down Expand Up @@ -199,7 +199,6 @@ def test_registry_dynamic_policy_detail_with_method_not_allowed(self):
response = dynamic_policy_detail(request, '123')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)


#
# Parse tests
#
Expand Down Expand Up @@ -276,6 +275,28 @@ def test_parse_target_tenant_to_object_type_ok(self):
self.assertIsNotNone(object_type.object_value)
self.assertEqual(object_type.object_value, 'DOCS')

def test_parse_target_tenant_to_object_type_tag_size_ok(self):
self.setup_dsl_parser_data()
rule_str = 'FOR TENANT:0123456789abcdef DO SET compression TO OBJECT_TAG=tagtag, OBJECT_SIZE>10, OBJECT_TYPE=DOCS'
has_condition_list, rule_parsed = parse(rule_str)
self.assertFalse(has_condition_list)
self.assertIsNotNone(rule_parsed)
object_list = rule_parsed.object_list
self.assertIsNotNone(object_list)
object_type = object_list.object_type
self.assertIsNotNone(object_type)
self.assertIsNotNone(object_type.object_value)
self.assertEqual(object_type.object_value, 'DOCS')
object_tag = object_list.object_tag
self.assertIsNotNone(object_tag)
self.assertIsNotNone(object_tag.object_value)
self.assertEqual(object_tag.object_value, 'tagtag')
object_size = object_list.object_size
self.assertIsNotNone(object_size)
self.assertIsNotNone(object_size.object_value)
self.assertEqual(object_size.object_value, '10')
self.assertEqual(object_size.operand, '>')

def test_parse_target_tenant_with_parameters_ok(self):
self.setup_dsl_parser_data()
has_condition_list, rule_parsed = parse('FOR TENANT:0123456789abcdef DO SET compression WITH cparam1=11, cparam2=12, cparam3=13')
Expand Down Expand Up @@ -342,6 +363,12 @@ def test_parse_not_callable(self):
action_info = action_list[0]
self.assertEqual(action_info.callable, '')

def test_parse_condition_ok(self):
self.setup_dsl_parser_data()
condition_list = parse_condition("metric1 > 5.0 OR metric1 < 2.0")
self.assertIsNotNone(condition_list)
self.assertEqual(condition_list, [['metric1', '>', '5.0'], 'OR', ['metric1', '<', '2.0']])

#
# object_type tests
#
Expand Down Expand Up @@ -507,7 +534,6 @@ def test_update_object_type_with_empty_list(self):
response = object_type_detail(request, name)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)


#
# Aux methods
#
Expand Down Expand Up @@ -563,11 +589,9 @@ def create_object_type_docs(self):
def setup_dsl_parser_data(self):
# Simplified filter data:
self.r.hmset('filter:compression', {'valid_parameters': '{"cparam1": "integer", "cparam2": "integer", "cparam3": "integer"}'})
self.r.hmset('filter:encryption', { 'valid_parameters': '{"eparam1": "integer", "eparam2": "bool", "eparam3": "string"}'})
self.r.hmset('filter:encryption', {'valid_parameters': '{"eparam1": "integer", "eparam2": "bool", "eparam3": "string"}'})
self.r.hmset('metric:metric1', {'network_location': '?', 'type': 'integer'})
self.r.hmset('metric:metric2', {'network_location': '?', 'type': 'integer'})
# self.r.rpush('G:1', '0123456789abcdef')
# self.r.rpush('G:2', 'abcdef0123456789')

def create_tenant_group_1(self):
tenant_group_data = {'name': 'group1', 'attached_projects': json.dumps(['0123456789abcdef', 'abcdef0123456789'])}
Expand All @@ -592,14 +616,12 @@ def create_storage_nodes(self):

def create_metric_modules(self):
self.r.incr("workload_metrics:id") # setting autoincrement to 1
#self.r.hmset('workload_metric:1', {'metric_name': 'm1.py', 'class_name': 'Metric1', 'execution_server': 'proxy', 'out_flow': 'False',
# 'in_flow': 'False', 'status': 'Running', 'id': '1'})
self.r.hmset('workload_metric:1', {'metric_name': 'm1.py', 'class_name': 'Metric1', 'status': 'Running', 'get': 'False', 'put': 'False',
'execution_server': 'object', 'replicate' : 'True', 'ssync': 'True', 'id': '1' })
'execution_server': 'object', 'replicate': 'True', 'ssync': 'True', 'id': '1'})

def create_global_controllers(self):
self.r.incr("controllers:id") # setting autoincrement to 1
self.r.hmset('controller:1', {'class_name': 'MinTenantSLOGlobalSpareBWShare',
'controller_name': 'min_slo_tenant_global_share_spare_bw_v2.py',
'valid_parameters': 'method={put|get}', 'id': '1', 'instances': 0,
'enabled': 'False', 'description': 'Fake description'})
'enabled': 'False', 'description': 'Fake description'})
42 changes: 21 additions & 21 deletions api/policies/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ def policy_list(request):
for it in keys:
for key, value in r.hgetall(it).items():
policy = json.loads(value)
filter = r.hgetall('filter:' + str(policy['dsl_name']))
to_json_bools(filter, 'get', 'put', 'post', 'head', 'delete')
filter_data = r.hgetall('filter:' + str(policy['dsl_name']))
to_json_bools(filter_data, 'get', 'put', 'post', 'head', 'delete')
target_id = it.replace('pipeline:', '')
policy = {'id': key, 'target_id': target_id,
'target_name': project_list[target_id.split(':')[0]],
Expand All @@ -49,14 +49,14 @@ def policy_list(request):
'reverse': policy['reverse'],
'execution_order': policy['execution_order'],
'params': policy['params'],
'put': filter['put'],
'get': filter['get']}
if 'post' in filter:
policy['post'] = filter['post']
if 'head' in filter:
policy['head'] = filter['head']
if 'delete' in filter:
policy['delete'] = filter['delete']
'put': filter_data['put'],
'get': filter_data['get']}
if 'post' in filter_data:
policy['post'] = filter_data['post']
if 'head' in filter_data:
policy['head'] = filter_data['head']
if 'delete' in filter_data:
policy['delete'] = filter_data['delete']
policies.append(policy)
sorted_policies = sorted(policies, key=lambda x: int(itemgetter('execution_order')(x)))

Expand Down Expand Up @@ -190,17 +190,17 @@ def static_policy_detail(request, policy_id):
project_list['global'] = 'Global'
policy_redis = r.hget("pipeline:" + str(target), policy)
data = json.loads(policy_redis)
filter = r.hgetall('filter:' + str(data['dsl_name']))

to_json_bools(filter, 'get', 'put', 'post', 'head', 'delete')
data['get'] = filter['get']
data['put'] = filter['put']
if 'post' in filter:
data['post'] = filter['post']
if 'head' in filter:
data['head'] = filter['head']
if 'delete' in filter:
data['delete'] = filter['delete']
filter_data = r.hgetall('filter:' + str(data['dsl_name']))

to_json_bools(filter_data, 'get', 'put', 'post', 'head', 'delete')
data['get'] = filter_data['get']
data['put'] = filter_data['put']
if 'post' in filter_data:
data['post'] = filter_data['post']
if 'head' in filter_data:
data['head'] = filter_data['head']
if 'delete' in filter_data:
data['delete'] = filter_data['delete']
data["id"] = policy
data["target_id"] = target
data["target_name"] = project_list[target.split(':')[0]]
Expand Down
6 changes: 1 addition & 5 deletions api/projects/views.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,8 @@
from django.conf import settings
from django.http import HttpResponse
from django.http import StreamingHttpResponse
from django.views.decorators.csrf import csrf_exempt
from redis.exceptions import RedisError
from rest_framework import status
from rest_framework.exceptions import ParseError
from rest_framework.parsers import JSONParser, MultiPartParser, FormParser
from rest_framework.views import APIView
from rest_framework.parsers import JSONParser
from paramiko.ssh_exception import SSHException, AuthenticationException
from swiftclient import client as swift_client
import logging
Expand Down

0 comments on commit aae1db1

Please sign in to comment.