Skip to content

Commit

Permalink
added support for Application Load Balancer
Browse files Browse the repository at this point in the history
  • Loading branch information
concurrencylabs committed Oct 17, 2018
1 parent a4b46d5 commit 6149ff1
Show file tree
Hide file tree
Showing 5 changed files with 71 additions and 8 deletions.
3 changes: 3 additions & 0 deletions awspricecalculator/common/consts.py
Expand Up @@ -154,6 +154,8 @@
PRODUCT_FAMILY_STORAGE = 'Storage'
PRODUCT_FAMILY_SYSTEM_OPERATION = 'System Operation'
PRODUCT_FAMILY_LOAD_BALANCER = 'Load Balancer'
PRODUCT_FAMILY_APPLICATION_LOAD_BALANCER = 'Load Balancer-Application'
PRODUCT_FAMILY_NETWORK_LOAD_BALANCER = 'Load Balancer-Network'
PRODUCT_FAMILY_SNAPSHOT = "Storage Snapshot"
PRODUCT_FAMILY_SERVERLESS = "Serverless"
PRODUCT_FAMILY_DB_STORAGE = "Database Storage"
Expand All @@ -165,6 +167,7 @@
SUPPORTED_PRODUCT_FAMILIES = (PRODUCT_FAMILY_COMPUTE_INSTANCE, PRODUCT_FAMILY_DATABASE_INSTANCE,
PRODUCT_FAMILY_DATA_TRANSFER,PRODUCT_FAMILY_FEE, PRODUCT_FAMILY_API_REQUEST,
PRODUCT_FAMILY_STORAGE, PRODUCT_FAMILY_SYSTEM_OPERATION, PRODUCT_FAMILY_LOAD_BALANCER,
PRODUCT_FAMILY_APPLICATION_LOAD_BALANCER, PRODUCT_FAMILY_NETWORK_LOAD_BALANCER,
PRODUCT_FAMILY_SNAPSHOT,PRODUCT_FAMILY_SERVERLESS,PRODUCT_FAMILY_DB_STORAGE,
PRODUCT_FAMILY_DB_PIOPS,PRODUCT_FAMILY_KINESIS_STREAMS)

Expand Down
11 changes: 8 additions & 3 deletions awspricecalculator/common/models.py
Expand Up @@ -79,7 +79,7 @@ def __init__(self, **kargs):
#TODO: Add support for different license models
self.licenseModel = consts.SCRIPT_EC2_LICENSE_MODEL_NONE_REQUIRED
if self.operatingSystem == consts.SCRIPT_OPERATING_SYSTEM_WINDOWS:
self.licenseModel = consts.SCRIPT_EC2_LICENSE_MODEL_INCLUDED
self.licenseModel = consts.SCRIPT_EC2_LICENSE_MODEL_NONE_REQUIRED
if self.operatingSystem == consts.SCRIPT_OPERATING_SYSTEM_WINDOWS_BYOL:
self.licenseModel = consts.SCRIPT_EC2_LICENSE_MODEL_BYOL

Expand Down Expand Up @@ -107,11 +107,16 @@ def __init__(self, **kargs):
self.ebsStorageGbMonth = int(kargs.get('ebsStorageGbMonth',0))
self.ebsSnapshotGbMonth = int(kargs.get('ebsSnapshotGbMonth',0))

#ELB
#TODO: add support for ALB and NLB
#Elastic Load Balancer (classic)
self.elbHours = int(kargs.get('elbHours',0))
self.elbDataProcessedGb = int(kargs.get('elbDataProcessedGb',0))

#Application Load Balancer
self.albHours = int(kargs.get('albHours',0))
self.albLcus= int(kargs.get('albLcus',0))

#TODO: add support for Network Load Balancer

#TODO: Add support for shared and dedicated tenancies
self.tenancy = consts.SCRIPT_EC2_TENANCY_SHARED

Expand Down
17 changes: 15 additions & 2 deletions awspricecalculator/ec2/pricing.py
Expand Up @@ -91,11 +91,10 @@ def calculate(pdim):
query = ((priceQuery['usageType'] == consts.REGION_PREFIX_MAP[pdim.region]+'EBS:SnapshotUsage'))#EBS:SnapshotUsage comes with a prefix in the PriceList API file (i.e. EU-EBS:SnapshotUsage)
pricing_records, cost = phelper.calculate_price(consts.SERVICE_EBS, snapshotDb, query, pdim.ebsSnapshotGbMonth, pricing_records, cost)

#Load Balancer
#Classic Load Balancer
if pdim.elbHours:
#elbDb = dbs[phelper.create_file_key(consts.REGION_MAP[pdim.region], consts.TERM_TYPE_MAP[pdim.termType], consts.PRODUCT_FAMILY_LOAD_BALANCER)]
elbDb = dbs[phelper.create_file_key((consts.REGION_MAP[pdim.region], consts.TERM_TYPE_MAP[pdim.termType], consts.PRODUCT_FAMILY_LOAD_BALANCER))]
#TODO:Add support for LoadBalancing:Application
query = ((priceQuery['usageType'] == consts.REGION_PREFIX_MAP[pdim.region]+'LoadBalancerUsage') & (priceQuery['operation'] == 'LoadBalancing'))
pricing_records, cost = phelper.calculate_price(consts.SERVICE_ELB, elbDb, query, pdim.elbHours, pricing_records, cost)

Expand All @@ -105,6 +104,20 @@ def calculate(pdim):
query = ((priceQuery['usageType'] == consts.REGION_PREFIX_MAP[pdim.region]+'DataProcessing-Bytes') & (priceQuery['operation'] == 'LoadBalancing'))
pricing_records, cost = phelper.calculate_price(consts.SERVICE_ELB, elbDb, query, pdim.elbDataProcessedGb, pricing_records, cost)

#Application Load Balancer
#TODO: add support for Network Load Balancer
if pdim.albHours:
albDb = dbs[phelper.create_file_key((consts.REGION_MAP[pdim.region], consts.TERM_TYPE_MAP[pdim.termType], consts.PRODUCT_FAMILY_APPLICATION_LOAD_BALANCER))]
query = ((priceQuery['usageType'] == consts.REGION_PREFIX_MAP[pdim.region]+'LoadBalancerUsage') & (priceQuery['operation'] == 'LoadBalancing:Application'))
pricing_records, cost = phelper.calculate_price(consts.SERVICE_ELB, albDb, query, pdim.albHours, pricing_records, cost)

if pdim.albLcus:
albDb = dbs[phelper.create_file_key((consts.REGION_MAP[pdim.region], consts.TERM_TYPE_MAP[pdim.termType], consts.PRODUCT_FAMILY_APPLICATION_LOAD_BALANCER))]
query = ((priceQuery['usageType'] == consts.REGION_PREFIX_MAP[pdim.region]+'LCUUsage') & (priceQuery['operation'] == 'LoadBalancing:Application'))
pricing_records, cost = phelper.calculate_price(consts.SERVICE_ELB, albDb, query, pdim.albLcus, pricing_records, cost)



#TODO: EIP
#TODO: Dedicated Host
#TODO: NAT Gateway
Expand Down
2 changes: 1 addition & 1 deletion cloudformation/function-plus-schedule.json
Expand Up @@ -94,7 +94,7 @@
"Role": { "Fn::GetAtt" : ["LambdaRealtimeCalculatePricingRole", "Arn"] },
"Code": {
"S3Bucket": { "Fn::Join" : [ "", ["concurrencylabs-deployment-artifacts-public-", { "Ref" : "AWS::Region" }] ] },
"S3Key": "lambda-near-realtime-pricing/calculate-near-realtime-pricing-v3.7.zip"
"S3Key": "lambda-near-realtime-pricing/calculate-near-realtime-pricing-v3.8.zip"
},
"Runtime": "python2.7",
"Timeout": "300",
Expand Down
46 changes: 44 additions & 2 deletions functions/calculate-near-realtime.py
Expand Up @@ -136,6 +136,8 @@ def handler(event, context):
elb_hours = 0
elb_data_processed_gb = 0
elb_instances = {}
alb_hours = 0
alb_lcus = 0

#Get tagged ELB(s) and their registered instances
#taggedelbs = find_elbs(tagkey, tagvalue)
Expand All @@ -153,7 +155,8 @@ def handler(event, context):
log.info("Found tagged Network Load Balancers:{}".format(taggednlbs))

#TODO: once pricing for ALB and NLB is added to awspricecalculator, separate hours by ELB type
elb_hours += (len(taggedelbs)+len(taggedalbs)+len(taggednlbs))*HOURS_DICT[DEFAULT_FORECAST_PERIOD]
elb_hours += (len(taggedelbs)+len(taggednlbs))*HOURS_DICT[DEFAULT_FORECAST_PERIOD]
alb_hours += len(taggedalbs)*HOURS_DICT[DEFAULT_FORECAST_PERIOD]


if elb_instances:
Expand All @@ -174,13 +177,23 @@ def handler(event, context):
else:
log.info("Didn't find any tagged, running EC2 instances")

#Calculate ELB cost
#Calculate Classic ELB cost
if elb_hours:
elb_cost = ec2pricing.calculate(data.Ec2PriceDimension(region=region, elbHours=elb_hours,elbDataProcessedGb=elb_data_processed_gb))
if 'pricingRecords' in elb_cost:
pricing_records.extend(elb_cost['pricingRecords'])
ec2Cost = ec2Cost + elb_cost['totalCost']

#Calculate Application Load Balancer cost
if alb_hours:
alb_lcus = calculate_alb_lcus(start, end, taggedalbs)*calculate_forecast_factor()
alb_cost = ec2pricing.calculate(data.Ec2PriceDimension(region=region, albHours=alb_hours, albLcus=alb_lcus))
if 'pricingRecords' in alb_cost:
pricing_records.extend(alb_cost['pricingRecords'])
ec2Cost = ec2Cost + alb_cost['totalCost']



#Calculate EC2 compute time for ALL instance types found (subscribed to ELB or not) - group by instance types
all_instance_dict = {}
all_instance_dict.update(ec2_instances)
Expand Down Expand Up @@ -268,6 +281,7 @@ def handler(event, context):

#Lambda functions
#TODO: add support for lambda function qualifiers
#TODO: calculate data ingested into CloudWatch Logs
lambdafunctions = resource_manager.get_resources(SERVICE_LAMBDA, RESOURCE_LAMBDA_FUNCTION)
for func in lambdafunctions:
executions = calculate_lambda_executions(start, end, func)
Expand Down Expand Up @@ -599,6 +613,34 @@ def calculate_elb_data_processed(start, end, elb_instances):

return result

"""
For each ALB, get the value for the ConsumedLCUs metric
"""


def calculate_alb_lcus(start, end, albs):
result = 0

for a in albs:
log.info("Getting ConsumedLCUs for ALB: [{}]".format(a))
metricsLcus = cwclient.get_metric_statistics(
Namespace='AWS/ApplicationELB',
MetricName='ConsumedLCUs',
Dimensions=[{'Name': 'LoadBalancer','Value': "app/{}".format(a)}],
StartTime=start,
EndTime=end,
Period=60*METRIC_WINDOW,
Statistics = ['Sum']
)
for datapoint in metricsLcus['Datapoints']:
result += datapoint.get('Sum',0)

log.info ("Total ConsumedLCUs consumed by ALBs in time window of ["+str(METRIC_WINDOW)+"] minutes :["+str(result)+"]")

return result





def calculate_lambda_executions(start, end, func):
Expand Down

0 comments on commit 6149ff1

Please sign in to comment.