Skip to content

Commit

Permalink
adding downlaod file with conversion unit
Browse files Browse the repository at this point in the history
  • Loading branch information
rahulworld committed Aug 5, 2018
1 parent 7a97482 commit 3be4ff1
Show file tree
Hide file tree
Showing 4 changed files with 68 additions and 253 deletions.
Binary file not shown.
Binary file not shown.
247 changes: 31 additions & 216 deletions istsos/plugins/unit_con_pint/retrievers/aiopg/observations.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,16 +444,12 @@ def __get_array_2(self, offerings, request):

@asyncio.coroutine
def __get_array(self, offerings, request):
# print("To PRINT REQUEST AT OBSERVATION")
# print(request[0]['offerings']['observable_properties'])
# To_unit=request['json']['in_unit']
# print("################################3")
ConvertUnit=''
dbmanager = yield from self.init_connection()
cur = dbmanager.cur
op_filter = request.get_filter('observedProperties')
# print('Print Unit of observations line 250')
# print(op_filter)
print('Print Unit of observations line 250')
print(op_filter)
tables = {}
columns = []
headers = [{
Expand Down Expand Up @@ -514,22 +510,6 @@ def __get_array(self, offerings, request):
unionSelect = []
jsonKeys = []
unionColumns = []
# for idx in range(0, len(columns)):
# unionSelect.append(
# "SUM(c%s)::text as c%s" % (idx, idx)
# )
# unionColumns.append(
# "NULL::double precision"
# )
# jsonKeys.append("COALESCE(c%s, 'null')" % (idx))
# for idx in range(0, len(columns)):
# unionSelect.append(
# "SUM(c%s)::text as c%s" % (idx, idx)
# )
# unionColumns.append(
# "NULL::double precision as c%s" % (idx)
# )
# jsonKeys.append("COALESCE(c%s, 'null')" % (idx))

for idx in range(0, len(columns)):
unionSelect.append(
Expand All @@ -545,117 +525,24 @@ def __get_array(self, offerings, request):
temporal = []
where = []
params = []
if request.get_filters() is not None:
keys = list(request.get_filters())
for key in keys:
fltr = request.get_filters()[key]
if key == 'temporal':
if fltr['fes'] == 'during':
temporal.append("""
begin_time >= %s::timestamp with time zone
AND
end_time <= %s::timestamp with time zone
""")
params.extend(fltr['period'])

elif fltr['fes'] == 'equals':
temporal.append("""
begin_time = end_time
AND
begin_time = %s::timestamp with time zone
""")
params.append(fltr['instant'])

where.append(
"(%s)" % (' OR '.join(temporal))
)

# print('Print Unit of Measurement')
# print(headers)

for table in tables.keys():
off_cols = tables[table]
cols = unionColumns.copy()
# print('Print col in observations')
# print(cols)
# print(off_cols)
for col in off_cols:
# ConvertScript="""np.ting from(select SUBSTRING(CAST(tmp.num as varchar),'[0-9]+') as ting from(select %s *'m'::unit@ 'mm' as num)as tmp)as np"""% (", ",col)
# ConvertScript="""
# c0.ting as c0
# from
# (
# select SUBSTRING(CAST(tmp.num as varchar),'[0-9]+') as ting
# from
# (
# select """+col+ """*'m'::unit@ 'mm' as num from data._belin
# )as tmp)"""
##################################
# ConvertScript="""np.ting as c0
# from
# (
# select SUBSTRING(CAST(tmp.num as varchar),'[0-9]+') as ting
# from
# (
# select """+col+ """*'m'::unit@ 'mm' as num
# )as tmp) as np"""
# # ConvertScript="np.ting from(select SUBSTRING(CAST(tmp.num as varchar),'[0-9]+') as ting from(select" +col+ "*'m'::unit@ 'mm' as num)as tmp)as np"
# print('Printing ConvertScript')
# print(ConvertScript)
###################################
# cols[
# columns.index(col)
# ] = unionColumns[columns.index(col)].replace(
# "NULL::double precision", ConvertScript
# )
####################################3
# convert_unit="""%s*'%s'::unit@@'%s' """%(col,ConvertUnit,To_unit)
# print('Print convert query for postgresql-unit')
# print(convert_unit)
# cols[
# columns.index(col)
# ] = unionColumns[columns.index(col)].replace(
# "NULL::double precision",
# convert_unit
# )
#############################
# cols[
# columns.index(col)
# ] = unionColumns[columns.index(col)].replace(
# "NULL::double precision",
# col+"*'m'::unit@@'mm' "
# )
if 'in_unit' in request['json']:
To_unit=request['json']['in_unit']
convert_unit="""%s*'%s'::unit@@'%s' """%(col,ConvertUnit,To_unit)
# print('Print convert query for postgresql-unit')
# print(convert_unit)
cols[
columns.index(col)
] = unionColumns[columns.index(col)].replace(
"NULL::double precision",
convert_unit
)
else:
cols[
columns.index(col)
] = unionColumns[columns.index(col)].replace(
"NULL::double precision",
col
)

# print('Print col in observations 1')
# print(cols)
# print(off_cols)
# uSql = """
# SELECT
# end_time, %s
# FROM
# data.%s
# """ % (
# ", ".join(cols), table
# )
cols[
columns.index(col)
] = unionColumns[columns.index(col)].replace(
"NULL::double precision",
col
)

print('Print col in observations 1')
print(cols)
print(off_cols)
uSql = """
SELECT
end_time, %s
Expand All @@ -664,104 +551,25 @@ def __get_array(self, offerings, request):
""" % (
", ".join(cols), table
)
print('hhhhhhhhhhhhhhhhhhhhhhhhhhhh')

# print('Query Printing uSql')
# print(uSql)
# uSql = """
# SELECT
# end_time, %s '*' %s ::unit@ %s
# end_time, %s
# FROM
# data.%s
# """ % (
# ", ".join(cols), ConvertUnit, To_unit, table
# ", ".join(cols), table
# )
if len(where) > 0:
uSql += "WHERE %s" % (
'AND'.join(where)
)
unions.append("(%s)" % uSql)
# print('Query Printing uSql')
# print(uSql)

jsonSql = """
SELECT array_agg(
ARRAY[
to_char(end_time, 'YYYY-MM-DD"T"HH24:MI:SSZ'),
%s
]
)
FROM
""" % (
", ".join(jsonKeys),
)
# print('Query Printing jsonSql')
# print(jsonSql)
# print('Query Printing unionSelect')
# print(unionSelect)
# print('union')
# print(unions)

# sql = """
# SET enable_seqscan=false;
# SET SESSION TIME ZONE '+00:00';
# %s
# (
# SELECT end_time, %s
# FROM (
# %s
# ) a
# GROUP BY end_time
# ORDER BY end_time
# ) b
# """ % (
# jsonSql,
# unionSelect,
# " UNION ".join(unions)
# )

sql = """
SET enable_seqscan=false;
SET SESSION TIME ZONE '+00:00';
%s
(
SELECT end_time, %s
FROM (
%s
) a
GROUP BY end_time
ORDER BY end_time
) b
""" % (
jsonSql,
unionSelect,
" UNION ".join(unions)
)
print(uSql)

# sql = """
# SET enable_seqscan=false;
# SET SESSION TIME ZONE '+00:00';
# %s
print('hhhhhhhhhhhhhhhhhhhhhhhhhhhhertrbbbbbbbbbbbbbbbet')
# istsos.debug(
# (
# SELECT end_time, %s*%s::unit@%s
# FROM (
# %s
# ) a
# GROUP BY end_time
# ORDER BY end_time
# ) b
# """ % (
# jsonSql,
# unionSelect,
# ConvertUnit,
# To_unit,
# " UNION ".join(unions)
# )

istsos.debug(
(
yield from cur.mogrify(sql, tuple(params*len(unions)))
).decode("utf-8")
)
# yield from cur.mogrify(uSql, tuple(params*len(unions)))
# ).decode("utf-8")
# )
# istsos.debug(
# (
# yield from cur.mogrify(sql, tuple(params*2))
Expand All @@ -770,11 +578,18 @@ def __get_array(self, offerings, request):

# print("Observation.py")
# print(sql)
yield from cur.execute(sql, tuple(params*len(unions)))
# yield from cur.execute(sql, tuple(params*0))
sql="SELECT _9 as c0 FROM data._belin"
yield from cur.execute(sql, { "type": "array" })
print('hhhhhhhhhhhhhhhhhhhhhhhhhhhhertredsfsdfggggggggggggggggggggggt')
rec = yield from cur.fetchone()
request['observations'] = rec[0]
request['headers'] = headers
print('rec1')
print(rec)
request['observations'] = []
# request['headers'] = headers
# dbmanager1 = yield from self.init_connection()
# cur1 = dbmanager1.cur
# yield from cur1.execute(uSql)
# rec1 = yield from cur1.fetchone()
# recs = yield from cur.fetchall()
istsos.debug("Data is fetched!")

Expand Down
74 changes: 37 additions & 37 deletions istsos/plugins/unit_con_pint/unitConvPint.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,44 +37,44 @@ def before(self, request):

@asyncio.coroutine
def after(self, request):
from_unit=request['json']['data']['from_unit']
to_unit=request['json']['data']['to_unit']
# time=request['json']['headers'][0]['column']
# value=request['json']['headers'][1]['name']
# from_unit=request['headers'][1]['uom']
# to_unit=request['json']['to']
# print(request['observations'])
recs=request['observations'].copy()
ConvertUnit=[]
for rec in recs:
# change=rec[1]*ureg.kilometers
# change1=change.to(ureg.meter)
# change2=change1.magnitude
change=str(rec[1])+"*"+from_unit+"to"+to_unit
# change=Q_(str(rec[1]), ureg.degC).to(ureg.kelvin)
# change=str(rec[1])+"*degC"+"to"+"degF"
# change=str(rec[1])+"*ureg.degC"+"to"+"ureg.degF"
# change=rec[1]*ureg.degC
# change=str(rec[1])+"* kelvin to degF"
src, dst = change.split('to')
change1=Q_(src).to(dst)
# home = Q_(rec[1], ureg.degC)
# change1=home.to('degF')
# change1=Q_(rec[1], ureg.degC).to(ureg.kelvin).magnitude
change2=change1.magnitude
# print(change2)
# print(change1)
ConvertUnit.append({
"datetime" : rec[0],
"value": change2
})
# request['observations1'].append({
# "timestamp": str(rec[0]),
# "rainfall": change2
# })
# from_unit=request['json']['data']['from_unit']
# to_unit=request['json']['data']['to_unit']
# # time=request['json']['headers'][0]['column']
# # value=request['json']['headers'][1]['name']
# # from_unit=request['headers'][1]['uom']
# # to_unit=request['json']['to']
# # print(request['observations'])
# recs=request['observations'].copy()
# ConvertUnit=[]
# for rec in recs:
# # change=rec[1]*ureg.kilometers
# # change1=change.to(ureg.meter)
# # change2=change1.magnitude
# change=str(rec[1])+"*"+from_unit+"to"+to_unit
# # change=Q_(str(rec[1]), ureg.degC).to(ureg.kelvin)
# # change=str(rec[1])+"*degC"+"to"+"degF"
# # change=str(rec[1])+"*ureg.degC"+"to"+"ureg.degF"
# # change=rec[1]*ureg.degC
# # change=str(rec[1])+"* kelvin to degF"
# src, dst = change.split('to')
# change1=Q_(src).to(dst)
# # home = Q_(rec[1], ureg.degC)
# # change1=home.to('degF')
# # change1=Q_(rec[1], ureg.degC).to(ureg.kelvin).magnitude
# change2=change1.magnitude
# # print(change2)
# # print(change1)
# ConvertUnit.append({
# "datetime" : rec[0],
# "value": change2
# })
# # request['observations1'].append({
# # "timestamp": str(rec[0]),
# # "rainfall": change2
# # })
request['response'] = Response(
json_source=Response.get_template({
"data": ConvertUnit,
"headers": request['headers']
"data": request['observations'],
# "headers": request['headers']
})
)

0 comments on commit 3be4ff1

Please sign in to comment.