Skip to content

Commit

Permalink
updated to support analysis on new pile format
Browse files Browse the repository at this point in the history
  • Loading branch information
warmerdam committed May 21, 2016
1 parent 0725b87 commit f7ff3a8
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 82 deletions.
44 changes: 14 additions & 30 deletions ground_surveyor/analyse_unit_field_pile.py
Expand Up @@ -7,7 +7,7 @@
import scipy.signal
import json

from osgeo import gdal_array
from osgeo import gdal_array, gdal

from ground_surveyor import gsconfig

Expand All @@ -24,10 +24,15 @@ def __init__(self):
self.metadata = {}
self.input_metadata = []

def load_pile(file_list_selected):
def load_pile(pile_filename):
dc = DataCube()

dc.n_file = len(file_list_selected)
raw_ds = gdal.Open(pile_filename)

dc.input_metadata = json.load(
open(os.path.splitext(pile_filename)[0]+'.json'))

dc.n_file = raw_ds.RasterCount

dc.cube = numpy.zeros((3,dc.n_file+1,
gsconfig.UF_TILE_SIZE,
Expand All @@ -43,24 +48,9 @@ def load_pile(file_list_selected):
}

for i_file in range(dc.n_file):

unit_field_filename_path = file_list_selected[i_file]
unit_field_image = gdal_array.LoadFile(unit_field_filename_path)

unit_field_metadata_filename_path = os.path.splitext(unit_field_filename_path)[0] + '.json'

json_data = open(unit_field_metadata_filename_path)

unit_field_metadata = json.loads(json_data.read())

#////////////////////////////////////////////////////////////////
#// load a one band image for the selected unit field
dc.cube[0,i_file,:,:] = unit_field_image
dc.input_metadata.append(unit_field_metadata)

#////////////////////////////////////////////////////////////////
#// get the timestamp
dc.metadata['timestamp'][i_file] = unit_field_metadata['timestamp']
dc.cube[0,i_file,:,:] = raw_ds.GetRasterBand(i_file+1).ReadAsArray()
dc.metadata['timestamp'][i_file] \
= dc.input_metadata[i_file]['timestamp']

return dc

Expand Down Expand Up @@ -160,27 +150,21 @@ def save_pile(dc, basepath=''):
pass

json.dump(dc.metadata,
open(basepath+'datacube_metadata.json','w'),
open(basepath+'_datacube_metadata.json','w'),
indent=4)

gdal_array.SaveArray(
numpy.reshape(dc.cube[0,:,:,:],
(dc.n_file+1,
n_pix_unit_field_on_the_side,
n_pix_unit_field_on_the_side)),
basepath+'datacube_raw.tif')
gdal_array.SaveArray(
numpy.reshape(dc.cube[1,:,:,:],
(dc.n_file+1,
n_pix_unit_field_on_the_side,
n_pix_unit_field_on_the_side)),
basepath+'datacube_small.tif')
basepath+'_small.tif')
gdal_array.SaveArray(
numpy.reshape(dc.cube[2,:,:,:],
(dc.n_file+1,
n_pix_unit_field_on_the_side,
n_pix_unit_field_on_the_side)),
basepath+'datacube_large.tif')
basepath+'_large.tif')


if __name__ == '__main__':
Expand Down
34 changes: 0 additions & 34 deletions scripts/analyse_unit_field.py

This file was deleted.

46 changes: 28 additions & 18 deletions scripts/run_unit_fields.py
Expand Up @@ -8,8 +8,8 @@
from ground_surveyor import analyse_unit_field_pile as uf_analyse


def process_pile(out_basename, filelist):
dc = uf_analyse.load_pile(filelist)
def process_pile(out_basename, raw_pile):
dc = uf_analyse.load_pile(raw_pile)
uf_analyse.analyse_pile(dc)
uf_analyse.compute_median(dc)
uf_analyse.compute_spatial_cross_correlations(dc)
Expand All @@ -19,32 +19,42 @@ def main():
aparser = argparse.ArgumentParser(
description='Run analysis on a set of unit field piles')

aparser.add_argument('dir', default='.',
help='Directory containing a set of unit field piles.')
aparser.add_argument('piles', default='.', nargs='+',
help='Pile(s) to process.')

args = aparser.parse_args()

logging.basicConfig(level=logging.INFO)
#logging.basicConfig(level=logging.DEBUG)

piles = {}
file_list = os.listdir(args.dir)
for filename in file_list:
if (not filename.startswith('uf_')) or (not filename.endswith('.tif')):
continue

pile_name = '_'.join(filename.split('_')[1:3])
# As a special hack, if a single directory is provided, we will scan
# it for all piles.
if len(args.piles) == 1 and os.path.isdir(args.piles[0]):
dirname = args.piles[0]
args.piles = []
for filename in os.listdir(dirname):
if filename.startswith('uf_') and filename.endswith('_raw.tif'):
args.piles.append('%s/%s', dirname, filename)


if pile_name not in piles.keys():
piles[pile_name] = []
for filename in args.piles:
basename = os.path.basename(filename)
dirname = os.path.dirname(filename)
if dirname == '':
dirname = '.'

piles[pile_name].append(os.path.join(args.dir,filename))
if (not basename.startswith('uf_')) \
or (not basename.endswith('_raw.tif')):
logging.warning('%s does not look like a pile, skipping.',
filename)
continue

for pile_name in piles.keys():
print pile_name, len(piles[pile_name])

pile_name = '_'.join(basename.split('_')[1:3])
print pile_name

process_pile('%s/ufr_%s_' % (args.dir, pile_name),
piles[pile_name])
process_pile('%s/ufr_%s' % (dirname, pile_name),
filename)



Expand Down

0 comments on commit f7ff3a8

Please sign in to comment.