Skip to content

Commit

Permalink
update url for api requests
Browse files Browse the repository at this point in the history
  • Loading branch information
gtsarik committed Dec 12, 2017
1 parent cfbdc88 commit 9c299e8
Show file tree
Hide file tree
Showing 5 changed files with 101 additions and 60 deletions.
17 changes: 10 additions & 7 deletions src/api/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
# DataSet
# url(r'^datasets/', 'api.views.datasets_list', name='datasets_list'),
# url(r'^polygons/', CustomerPolygonsList.as_view()),
url(r'^datasets/', DataSetList.as_view({'get': 'list'})),
url(r'^datasets-list/', DataSetList.as_view({'get': 'list'})),
# url(r'^datasets/', DataSetList.as_view()),
url(r'^dataset/(?P<ds_id>[0-9]+)/$', DataSetDetail.as_view()),

Expand All @@ -43,19 +43,22 @@
# TimeSeries
url(r'^timeseries-list/$', TimeSeriesList.as_view({'get': 'list'})),
# url(r'^timeseries-list/$', TimeSeriesList.as_view()),
url(r'^timeseries/(?P<ts_id>[0-9]+)/$', TimeSeriesDetail.as_view()),
url(r'^timeseries/(?P<shapefile_id>[0-9]+)/$', TimeSeriesDetail.as_view()),
# url(r'^timeseries/', TimeSeriesDetail.as_view({'get': 'list'})),
url(r'^timeseries', TimeSeriesNameDetail.as_view()),

# Reports Attribute
url(r'^reports-list/$', ReportsList.as_view({'get': 'list'})),
# url(r'^reports-list/$', ReportsList.as_view()),
url(r'^reports/(?P<ds_id>[0-9]+)/$', ReportsDetail.as_view()),

# upload AOI file
url(r'^upload/(?P<ds_id>[0-9]+)/$', UploadFileAoiView.as_view()),
url(r'^upload-aoi/(?P<ds_id>[0-9]+)/$', UploadFileAoiView.as_view()),

# upload file to FTP
url(r'^upload/$', UploadFileFtpView.as_view()),

# Reports Attribute
url(r'^reports-list/$', ReportsList.as_view({'get': 'list'})),
# url(r'^reports-list/$', ReportsList.as_view()),
url(r'^report/(?P<ds_id>[0-9]+)/$', ReportsDetail.as_view()),


# Auth Token
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
Expand Down
75 changes: 50 additions & 25 deletions src/api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -568,13 +568,16 @@ class TimeSeriesDetail(APIView):
# authentication_classes = (SessionAuthentication, BasicAuthentication)
permission_classes = (IsAuthenticated,)

def get(self, request, ts_id, format=None):
def get(self, request, shapefile_id, format=None):
data = {'auth': 'Need YOUR ACCESS TOKEN'}

if request.auth:
try:
queryset = TimeSeriesResults.objects.get(pk=ts_id)
serializer = TimeSeriesResultSerializer(queryset)
# dataset = DataSet.objects.get(pk=ds_id)
queryset = TimeSeriesResults.objects.filter(
user=request.user,
customer_polygons__id=shapefile_id).order_by('id')
serializer = TimeSeriesResultSerializer(queryset, many=True)
data = serializer.data
except TimeSeriesResults.DoesNotExist:
return Response({'error': 'TimeSeries Does Not Exist'}, status=status.HTTP_400_BAD_REQUEST)
Expand All @@ -598,13 +601,12 @@ def get(self, request, format=None):
try:
queryset = TimeSeriesResults.objects.filter(
user=request.user,
customer_polygons__name=request.GET['name']).order_by('id')
customer_polygons__name=request.GET['shapefile_name']).order_by('id')
serializer = TimeSeriesResultSerializer(queryset, many=True)
data = serializer.data
# except KeyError:
except Exception, e:
print '!!!!!!!!!!!!! ERROR TimeSeriesResults ================================ ', e
return Response({'error': 'Invalid TimeSeries Name'},
return Response({'error': 'Invalid ShapeFile Name'},
status=status.HTTP_400_BAD_REQUEST)

return Response(data)
Expand Down Expand Up @@ -710,7 +712,7 @@ def get(self, request, ds_id, format=None):
serializer = ReportsSerializer(queryset, many=True)
data = serializer.data
except Exception, e:
return Response({'error': e}, status=status.HTTP_400_BAD_REQUEST)
return Response({'error': 'DataSet Does Not Exist'}, status=status.HTTP_400_BAD_REQUEST)

return Response(data)

Expand All @@ -725,11 +727,11 @@ class UploadFileAoiView(APIView):
permission_classes = (IsAuthenticated,)
# parser_classes = (FileUploadParser,)

def get_object(self, ds_id):
try:
return DataSet.objects.get(pk=ds_id)
except DataSet.DoesNotExist:
return 'Invalid Dataset ID'
# def get_object(self, ds_id):
# try:
# return DataSet.objects.get(pk=ds_id)
# except DataSet.DoesNotExist:
# return 'Invalid Dataset ID'

def post(self, request, ds_id, format=None):
error = ''
Expand All @@ -739,14 +741,14 @@ def post(self, request, ds_id, format=None):
statistic = 'Mean'
doc_kml = None
urls = []
data = {'auth': 'Need YOUR ACCESS TOKEN'}
data = {'auth error': 'Need YOUR ACCESS TOKEN'}

if request.auth:
try:
dataset = DataSet.objects.get(pk=ds_id)
except DataSet.DoesNotExist:
data = {
'error': 'Invalid Dataset ID',
'error': 'DataSet Does Not Exist',
'status': status.HTTP_400_BAD_REQUEST
}
return Response(data)
Expand Down Expand Up @@ -784,11 +786,21 @@ def post(self, request, ds_id, format=None):
name=fl).delete()

if 'reports' in request.GET:
reports_list = request.GET['reports'].replace('%', ' ')
# reports_list = request.GET['reports'].replace(' ', '')
reports_list = request.GET['reports'].replace('+', ' ')
reports_list = reports_list.split(',')
reports_names = reports_list
new_rep = ''

# data = {
# 'REP GET': request.GET['reports'],
# 'REP LIST': reports_list,
# 'REP NAME': reports_names,
# 'status': status.HTTP_400_BAD_REQUEST,
# }

# return Response(data)

for report in reports_list:
if dataset.is_ts:
rep_ts = report.split(' ')[:-1]
Expand Down Expand Up @@ -816,9 +828,23 @@ def post(self, request, ds_id, format=None):
return Response(data)

reports.append(new_rep)
else:
data = {
'error': 'For calculations in the body of the request, you must specify a list of reports',
'status': status.HTTP_400_BAD_REQUEST,
}

return Response(data)

if 'statistic' in request.GET:
statistic = request.GET['statistic'].split(',')
statistic = request.GET['statistic']

# data = {
# 'statistic': statistic,
# 'status': status.HTTP_400_BAD_REQUEST,
# }

# return Response(data)

if ext == '.kmz':
zip_file = '{0}.zip'.format(fl)
Expand All @@ -845,9 +871,9 @@ def post(self, request, ds_id, format=None):

if error:
data = {
'filename': new_kml_file,
'filename': file_name,
'error': 'Error in the shapefile structure',
'status': status.HTTP_400_BAD_REQUEST,
'error KMZ': error
}

return Response(data)
Expand All @@ -859,9 +885,9 @@ def post(self, request, ds_id, format=None):
info_window = get_info_window(doc_kml, fl, path_new_kml)
except Exception, e:
data = {
'filename': new_kml_file,
'filename': file_name,
'error': 'Error in the shapefile structure',
'status': status.HTTP_400_BAD_REQUEST,
'error KMZ': e
}

return Response(data)
Expand All @@ -880,8 +906,8 @@ def post(self, request, ds_id, format=None):
if error:
data = {
'filename': file_name,
'error': 'Error in the shapefile structure',
'status': status.HTTP_400_BAD_REQUEST,
'error KML': error
}

return Response(data)
Expand Down Expand Up @@ -950,9 +976,8 @@ def post(self, request, ds_id, format=None):
############################################################################

data = {
'error': e,
'status': status.HTTP_400_BAD_REQUEST,
'message': 'Please add the GEO data to create Time Series.'
'error': 'Please add the GEO data to create Time Series.',
'status': status.HTTP_400_BAD_REQUEST
}

return Response(data)
Expand All @@ -974,8 +999,8 @@ def post(self, request, ds_id, format=None):
except Exception, e:
data = {
'filename': file_name,
'error': 'Error in the shapefile structure',
'status': status.HTTP_400_BAD_REQUEST,
'error': e
}

return Response(data)
Expand Down
20 changes: 10 additions & 10 deletions src/core/editor_shapefiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,8 +243,8 @@ def get_info_window(doc_kml, file_name, path_to_file):


def getUploadListTifFiles(customer, dataset, *args):
print '!!!!!!!!!!!!!!!!!!! ARGS ====================== ', args
print '!!!!!!!!!!!!!!!!!!! DATASET ====================== ', dataset
# print '!!!!!!!!!!!!!!!!!!! ARGS ====================== ', args
# print '!!!!!!!!!!!!!!!!!!! DATASET ====================== ', dataset
list_files_tif = []
# list_data_db = []
# attributes_tmp = {}
Expand All @@ -263,10 +263,10 @@ def getUploadListTifFiles(customer, dataset, *args):
# attributes_reports = AttributesReport.objects.filter(
# user=customer, data_set=dataset)

print '!!!!!!!!!!!!!!!!!!! IS TS ====================== ', dataset.is_ts
# print '!!!!!!!!!!!!!!!!!!! IS TS ====================== ', dataset.is_ts

# print '!!!!!!!!!!!!!!!!!!! statistic ====================== ', statistic
print '!!!!!!!!!!!!!!!!!!! Attributes ====================== ', attributes
# print '!!!!!!!!!!!!!!!!!!! Attributes ====================== ', attributes
# print '!!!!!!!!!!!!!!!!!!! attributes_reports ====================== ', attributes_reports
# print '!!!!!!!!!!!!!!!!!!! upload_file ====================== ', upload_file

Expand All @@ -277,7 +277,7 @@ def getUploadListTifFiles(customer, dataset, *args):
# attributes_reports = sorted(attributes_reports.keys())
shelf_data = dataset.shelf_data

print '!!!!!!!!!!!!!!!!!!! SHD ====================== ', shelf_data
# print '!!!!!!!!!!!!!!!!!!! SHD ====================== ', shelf_data

# print '!!!!!!!!!!!!!!!!!!! 2 attributes_reports ====================== ', attributes_reports

Expand All @@ -289,7 +289,7 @@ def getUploadListTifFiles(customer, dataset, *args):
attr_list = attr.split('_')
project_directory = os.path.join(PROJECTS_PATH, dataset.results_directory)

print '!!!!!!!!!! ATTR LIST ========================= ', attr_list
# print '!!!!!!!!!! ATTR LIST ========================= ', attr_list
# print '!!!!!!!!!! sub_dir_path ========================= ', sub_dir_path
# print '!!!!!!!!!! project_directory ========================= ', project_directory

Expand All @@ -298,7 +298,7 @@ def getUploadListTifFiles(customer, dataset, *args):
pr_root, pr_dirs, pr_files = os.walk(project_directory).next()
pr_dirs.sort()

print '!!!!!!!!!! DIRS ========================= ', pr_dirs
# print '!!!!!!!!!! DIRS ========================= ', pr_dirs
# print '!!!!!!!!!! project_directory ========================= ', project_directory
# print '!!!!!!!!!! attr.attribute ========================= ', attr.attribute

Expand All @@ -307,7 +307,7 @@ def getUploadListTifFiles(customer, dataset, *args):
# attribute_name = attr_list[0].split(' ')[:-1]
# attribute_name = str((' ').join(attribute_name))

print '!!!!!!!!!! PD ========================= ', pd
# print '!!!!!!!!!! PD ========================= ', pd
# print '!!!!!!!!!! attr.attribute TYPE ========================= ', type(attribute_name)


Expand Down Expand Up @@ -489,7 +489,7 @@ def create_new_calculations_aoi(customer, doc_kml, data_set, *args):
attr_name = line_list[1]
shd_attr_name = attr_name

print '!!!!!!!!!!!!! line_list =========================== ', line_list[2]
# print '!!!!!!!!!!!!! line_list =========================== ', line_list[2]
# print '!!!!!!!!!!!!! select_shd =========================== ', select_shd
# print '!!!!!!!!!!!!! attr_name =========================== ', attr_name
# print '!!!!!!!!!!!!! shd_attr_name =========================== ', shd_attr_name
Expand Down Expand Up @@ -827,7 +827,7 @@ def createUploadTimeSeriesResults(customer, aoi, attributes, data_set):
# print '!!!!!!! 2 NEW LINE ========================== ', new_line

addUploadTsToDB(ts_name, aoi.user, aoi.data_set, aoi, result_year,
sub_dir_name, result_date, ts_value, attr)
sub_dir_name, result_date, ts_value, cur_attr)

# list_files_tif.append(fl_tif)
# list_data_db.append(str_data_db)
Expand Down
13 changes: 13 additions & 0 deletions src/core/functions_customer.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,19 @@ def getCountTs(dataset, shd):
return count_ts


def getGeoCoord(filename):
coord = []
f = open(filename)

for line in f.readlines():
line = line.rstrip('\n')
line = line.split(',')
tmp = [float(line[0]), float(line[1])]
coord.append(tmp)

return coord


def createKml(user, filename, info_window, url, data_set, count_color, *args):
# Create KML file for the draw polygon

Expand Down
36 changes: 18 additions & 18 deletions src/customers/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1272,17 +1272,17 @@ def getIndex(stroke):
return index


def getGeoCoord(filename):
coord = []
f = open(filename)
# def getGeoCoord(filename):
# coord = []
# f = open(filename)

for line in f.readlines():
line = line.rstrip('\n')
line = line.split(',')
tmp = [float(line[0]), float(line[1])]
coord.append(tmp)
# for line in f.readlines():
# line = line.rstrip('\n')
# line = line.split(',')
# tmp = [float(line[0]), float(line[1])]
# coord.append(tmp)

return coord
# return coord


# def addPolygonToDB(name, kml_name, user, kml_path, kml_url, ds, text_kml=''):
Expand Down Expand Up @@ -3412,7 +3412,7 @@ def customer_section(request):

# get the lat/lon values for a GeoTIFF files
try:
print '!!!!!!!!!! FILE TIF =============================== ', file_tif
# print '!!!!!!!!!! FILE TIF =============================== ', file_tif

ds = gdal.Open(file_tif)
width = ds.RasterXSize
Expand Down Expand Up @@ -3555,13 +3555,13 @@ def customer_section(request):
cLat = 0
cLng = 0

print '!!!!!!!!!! E centerY =============================== ', cLat
print '!!!!!!!!!! E centerX =============================== ', cLng
# print '!!!!!!!!!! E centerY =============================== ', cLat
# print '!!!!!!!!!! E centerX =============================== ', cLng

print '!!!!!!!!!! MIN Y LAT 1 =============================== ', eLat_1
print '!!!!!!!!!! MIN X LNG 1 =============================== ', eLng_1
print '!!!!!!!!!! MAX Y LAT 2 =============================== ', eLat_2
print '!!!!!!!!!! MAX X LNG 2 =============================== ', eLng_2
# print '!!!!!!!!!! MIN Y LAT 1 =============================== ', eLat_1
# print '!!!!!!!!!! MIN X LNG 1 =============================== ', eLng_1
# print '!!!!!!!!!! MAX Y LAT 2 =============================== ', eLat_2
# print '!!!!!!!!!! MAX X LNG 2 =============================== ', eLng_2

# print '!!!!!!!!!!!!!!!!! data_set =============================== ', cip_choice.data_set.name
# print '!!!!!!!!!!!!!!!!! google_map_zoom =============================== ', google_map_zoom
Expand Down Expand Up @@ -4473,8 +4473,8 @@ def files_lister(request):
# for n in tmp_list:
# select_attr.append(n.split('_')[0])

print '!!!!!!!!!!!!!!!! ATTR LIST ============================ ', select_attr
print '!!!!!!!!!!!!!!!! STAT LIST ============================ ', select_stat
# print '!!!!!!!!!!!!!!!! ATTR LIST ============================ ', select_attr
# print '!!!!!!!!!!!!!!!! STAT LIST ============================ ', select_stat

if upload_fl:
path_test_data = os.path.join(path_ftp_user, upload_fl)
Expand Down

0 comments on commit 9c299e8

Please sign in to comment.