Skip to content

Commit

Permalink
Change predictions now integrated with a new class
Browse files Browse the repository at this point in the history
  • Loading branch information
sambowers committed Feb 12, 2018
1 parent e4a3251 commit 72082bb
Showing 1 changed file with 85 additions and 187 deletions.
272 changes: 85 additions & 187 deletions biota/change.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,56 +7,41 @@
import scipy.ndimage
from scipy.ndimage.measurements import label

import matplotlib.pyplot as plt
import pdb




def lee_filter(img, size):
"""
# From https://stackoverflow.com/questions/39785970/speckle-lee-filter-in-python
"""
from scipy.ndimage.filters import uniform_filter
from scipy.ndimage.measurements import variance

img_mean = uniform_filter(img, (size, size))
img_sqr_mean = uniform_filter(img**2, (size, size))
img_variance = img_sqr_mean - img_mean**2

overall_variance = variance(img)

img_weights = img_variance**2 / (img_variance**2 + overall_variance**2)
img_output = img_mean + img_weights * (img - img_mean)

return img_output



def getChange(AGB_t1, AGB_t2, F_threshold = 10., C_threshold = 0.2):
def getChange(AGB_t1, AGB_t2, F_threshold = 10., C_threshold = 0.2, min_pixels = 1):
"""
Returns pixels that meet change detection thresholds for country.
Args:
F_threshold = threshold above which a pixel is forest
C_threshold = threshold of proportional change with which to begin change detection
"""

# First 2007 to 2010

# Pixels that move from forest to notforest or vice versa

# Pixels that move from forest to nonforest or vice versa
F_NF = np.logical_and(AGB_t1 >= F_threshold, AGB_t2 < F_threshold)
NF_F = np.logical_and(AGB_t1 < F_threshold, AGB_t2 >= F_threshold)
F_F = np.logical_and(AGB_t1 >= F_threshold, AGB_t2 >= F_threshold)
NF_NF = np.logical_and(AGB_t1 < F_threshold, AGB_t2 < F_threshold)

# Change pixels
CHANGE = np.logical_or(((AGB_t1 - AGB_t2) / AGB_t1) >= C_threshold, ((AGB_t1 - AGB_t2) / AGB_t1) < (- C_threshold))
NOCHANGE = CHANGE==False

NOCHANGE = CHANGE == False
# Trajectory
DECREASE = AGB_t2 < AGB_t1
INCREASE = AGB_t2 >= AGB_t1


# Get a minimum pixel extent
if min_pixels != 1:
CHANGE_INCREASE, _ = cal.getContiguousAreas(CHANGE & INCREASE, True, min_pixels = min_pixels)
CHANGE_DECREASE, _ = cal.getContiguousAreas(CHANGE & DECREASE, True, min_pixels = min_pixels)
CHANGE = np.logical_or(CHANGE_INCREASE, CHANGE_DECREASE)

metrics = {}

# These are all the possible definitions. Note: they *must* add up to one.
Expand All @@ -73,129 +58,50 @@ def getChange(AGB_t1, AGB_t2, F_threshold = 10., C_threshold = 0.2):
return metrics


def getContiguousAreas(data, value, min_pixels):
'''
Get pixels that come from the same contigous area.
Args:
data: A numpy array
min_val: The minimum value to contain within the cotiguous area (inclusive)
max_val: The maxiumum value to contain within the contigous area (inclusive)
min_area: What minimum area should be included (number of pixels, queens move)
Returns:
A binary array of pixels that meet the conditiuons
'''

# Extract area that meets condition
binary_array = data == value * 1

# Label contigous areas with a number
s = scipy.ndimage.generate_binary_structure(2,2) # This connects diagonal elements
location_id, n_areas = label(binary_array, structure = s)

# Get count of each value in array
label_area = np.bincount(location_id.flatten())[1:]

# Find those IDs that meet minimum area requirements
include_id = np.arange(1, n_areas + 1)[label_area >= min_pixels]

# Get a binary array of location_id pixels that meet the minimum area requirement
contiguous_area = np.in1d(location_id, include_id).reshape(data.shape) * 1

# Return an array giving values to each area
#location_id[contiguous_area == False] = 0

return contiguous_area#, location_id


def _window_mean(img, window_size = 3):
'''
Based on https://stackoverflow.com/questions/18419871/improving-code-efficiency-standard-deviation-on-sliding-windows
'''

from scipy import signal

c1 = signal.convolve2d(img, np.ones((window_size, window_size)) / (window_size ** 2), boundary = 'symm')

border = window_size/2

return c1[border:-border, border:-border]


def _window_stdev(img, window_size = 3):
'''
Based on https://stackoverflow.com/questions/18419871/improving-code-efficiency-standard-deviation-on-sliding-windows
and http://nickc1.github.io/python,/matlab/2016/05/17/Standard-Deviation-(Filters)-in-Matlab-and-Python.html
'''

from scipy import signal

c1 = signal.convolve2d(img, np.ones((window_size, window_size)) / (window_size ** 2), boundary = 'symm')
c2 = signal.convolve2d(img*img, np.ones((window_size, window_size)) / (window_size ** 2), boundary = 'symm')

border = window_size / 2

return np.sqrt(c2 - c1 * c1)[border:-border, border:-border]




def enhanced_lee_filter(img, window_size = 3, n_looks = 16):
'''
Filters a masked array with the enhanced lee filter.
Args:
img: A masked array
Returns:
A masked array with a filtered verison of img
'''

assert type(window_size == int), "Window size must be an integer. You input the value: %s"%str(window_size)
assert (window_size % 2) == 1, "Window size must be an odd number. You input the value: %s"%str(window_size)

k = 1. #Adequate for most SAR images

# Set parameters to default
#cu = 0.523
#cmax = 1.73

cu = (1./n_looks) ** 0.5
cmax = (1 + (2./n_looks)) ** 0.5

img_mask = img.data
img_mask[img.mask == True] = np.nan

img_mean = _window_mean(img_mask, window_size = window_size)
img_std = _window_stdev(img_mask, window_size = window_size)

ci = img_std / img_mean
ci[np.isfinite(ci) == False] = 0.

w_t = np.zeros_like(ci)

# There are three conditions in the enhanced lee filter
w_t[ci <= cu] = 1.
w_t[ci >= cmax] = 0.

s = np.logical_and(ci > cu, ci < cmax)
w_t[s] = np.exp((-k * (ci[s] - cu)) / (cmax - ci[s]))
class CHANGE(object):
"""
A tree cover change object, based on ALOS tiles.
Change objects have the following properties:
Attributes:
lat:
lon:
DN:
mask:
"""

img_filtered = (img_mean * w_t) + (img_mask * (1. - w_t))

img_filtered = np.ma.array(img_filtered, mask = np.isnan(img_filtered))

img_filtered.data[img_filtered.mask] = 0.

return img_filtered

def rebin(a, shape = (450, 450)):
'''
Reduce array size to 0.5 ha resolution by taking mean, to increase ENL.
# From https://stackoverflow.com/questions/8090229/resize-with-averaging-or-rebin-a-numpy-2d-array
'''
sh = shape[0],a.shape[0]//shape[0],shape[1],a.shape[1]//shape[1]
return a.reshape(sh).mean(-1).mean(1)
def __init__(self, dataloc, lat, lon, year1, year2):
"""
Loads data and metadata for an ALOS mosaic tile.
"""

# Test that inputs are of reasonable lats/lons/years
assert type(lat) == int, "Latitude must be an integer."
assert lat < 90. or lat > -90., "Latitude must be between -90 and 90 degrees."
assert type(lon) == int, "Longitude must be an integer."
assert lon < 180. or lon > -180., "Longitude must be between -180 and 180 degrees."
assert type(year1) == int, "year1 must be an integer."
assert (year1 >= 2007 and year2 <= 2010) or (year1 >= 2015 and year1 <= dt.datetime.now().year1), "year1 must be in the range 2007 - 2010 and 2015 - present. Your input year1 was %s."%str(year1)
assert type(year2) == int, "year2 must be an integer."
assert (year2 >= 2007 and year2 <= 2010) or (year2 >= 2015 and year2 <= dt.datetime.now().year2), "year2 must be in the range 2007 - 2010 and 2015 - present. Your input year2 was %s."%str(year2)

# Set up the two ALOS tiles to compare
self.data_t1 = cal.ALOS(dataloc, lat, lon, year1, factor = 3)
self.data_t2 = cal.ALOS(dataloc, lat, lon, year2, factor = 3)

def getChange(self, F_threshold = 10., C_threshold = 0.2, min_pixels = 1):
'''
'''

AGB_t1 = self.data_t1.getAGB(lee_filter = True)
AGB_t2 = self.data_t2.getAGB(lee_filter = True)

metrics = getChange(AGB_t1, AGB_t2, F_threshold = F_threshold, C_threshold = C_threshold, min_pixels = min_pixels)

return metrics





Expand All @@ -208,47 +114,28 @@ def rebin(a, shape = (450, 450)):

#Nhambita = lat -18 lon 33

lat = -19
lat = -18#-19
lon = 33#34#39
data_2007 = cal.ALOS(lat, lon, 2007, data_dir)

data_2007 = cal.ALOS(data_dir, lat, lon, 2007)
#data_2008 = cal.ALOS(lat, lon, 2008, data_dir)
#data_2009 = cal.ALOS(lat, lon, 2009, data_dir)
data_2010 = cal.ALOS(lat, lon, 2010, data_dir)

gamma0_2007 = data_2007.getGamma0(units='decibels')
#gamma0_2008 = data_2008.getGamma0()
#gamma0_2009 = data_2009.getGamma0()
gamma0_2010 = data_2010.getGamma0(units='decibels')


gamma0_2007 = enhanced_lee_filter(gamma0_2007, window_size = 3, n_looks = 16)
gamma0_2010 = enhanced_lee_filter(gamma0_2010, window_size = 3, n_looks = 16)
#gamma0_2007 = enhanced_lee_filter(rebin(gamma0_2007), window_size = 5, n_looks = 16*4)
#gamma0_2010 = enhanced_lee_filter(rebin(gamma0_2010), window_size = 5, n_looks = 16*4)


AGB_2007 = 715.667 * (10 ** (gamma0_2007 / 10.)) - 5.967
AGB_2010 = 715.667 * (10 ** (gamma0_2010 / 10.)) - 5.967

#AGB_2007 = data_2007.getAGB()
#AGB_2010 = data_2010.getAGB()

#AGB_2008 = lee_filter(data_2008.getAGB(), 10)
#AGB_2009 = lee_filter(data_2009.getAGB(), 10)
#AGB_2010 = lee_enhanced_filter(data_2010.getAGB())

#change_07_08 = getChange(AGB_2007, AGB_2008)
#change_08_09 = getChange(AGB_2008, AGB_2009)
#change_09_10 = getChange(AGB_2009, AGB_2010)
change_07_10 = getChange(AGB_2007, AGB_2010, F_threshold = 10., C_threshold = 0.2)

contiguous_change_area = getContiguousAreas(change_07_10['degradation'] + change_07_10['deforestation'], 1, 8)
data_2010 = cal.ALOS(data_dir, lat, lon, 2010 )

change_07_10 = CHANGE(data_dir, lat, lon, 2007, 2010)
metrics = change_07_10.getChange(F_threshold = 15, C_threshold = 0.2, min_pixels = 6)

plt.imshow(metrics['deforestation'] * 2 + metrics['degradation'])
plt.colorbar()
plt.show()

deforestation = change_07_10['deforestation'] * contiguous_change_area
degradation = change_07_10['degradation'] * contiguous_change_area
#change_07_10 = getChange(AGB_2007, AGB_2010, F_threshold = 10., C_threshold = 0.2)
#contiguous_change_area = getContiguousAreas(change_07_10['degradation'] + change_07_10['deforestation'], 1, 8)

#deforestation = change_07_10['deforestation'] * contiguous_change_area
#degradation = change_07_10['degradation'] * contiguous_change_area



#deforestation_07_08 = getContiguousAreas(change_07_08['deforestation'], 1, 8)
#deforestation_08_09 = getContiguousAreas(change_08_09['deforestation'], 1, 8)
Expand All @@ -259,7 +146,16 @@ def rebin(a, shape = (450, 450)):


# This result looks nice. TODO: calculate % change of entire contiguous change region

"""
lakes = '/home/sbowers3/DATA/GIS_data/mozambique/diva/MOZ_wat/MOZ_water_areas_dcw.shp'
rivers = '/home/sbowers3/DATA/GIS_data/mozambique/diva/MOZ_wat/MOZ_water_lines_dcw.shp'
lake_mask = cal.rasterizeShapefile(data_2007, lakes, buffer_size = 0.005)
river_mask = cal.rasterizeShapefile(data_2007, rivers, buffer_size = 0.005)
water_mask = np.logical_or(river_mask, lake_mask)
output_im = np.ma.array(deforestation*2+degradation,mask=water_mask)
output_im.data[output_im.mask] = 0
from osgeo import osr, gdal
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
Expand All @@ -268,9 +164,11 @@ def rebin(a, shape = (450, 450)):
ds = driver.Create(output_dir + 'forest_loss.tif', 4500, 4500, 1, gdal.GDT_UInt16)
ds.SetGeoTransform(data_2007.geo_t)
ds.SetProjection(srs.ExportToWkt())
ds.GetRasterBand(1).WriteArray(deforestation*2 + degradation)
ds.GetRasterBand(1).WriteArray(output_im)
ds.GetRasterBand(1).SetNoDataValue(0)
ds = None

"""


"""
geo_t = (33.0, 0.00022222222222222223*10, 0.0, -18.0, 0.0, -0.00022222222222222223*10)
Expand Down

0 comments on commit 72082bb

Please sign in to comment.