From f28e5ce552c62b4bc360fa184619e7137edf8466 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 24 Apr 2018 19:30:51 +0200 Subject: [PATCH 001/180] Import evtk instead of pyevtk, if available Only if the evtk package is not found do we use the old pyevtk package. The evtk package is being built from evtk source as part of newer e3sm-unified packages and is on the e3sm channel. pyevtk does not seem to reliably support python 3 and some versions seem to have bugs in the setup script. --- python_scripts/paraview_vtk_field_extractor/utils.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/python_scripts/paraview_vtk_field_extractor/utils.py b/python_scripts/paraview_vtk_field_extractor/utils.py index fde2a33c0..6311798fc 100644 --- a/python_scripts/paraview_vtk_field_extractor/utils.py +++ b/python_scripts/paraview_vtk_field_extractor/utils.py @@ -7,7 +7,10 @@ files on MPAS grids. """ -from pyevtk.vtk import VtkFile, VtkPolyData +try: + from evtk.vtk import VtkFile, VtkPolyData +except ImportError: + from pyevtk.vtk import VtkFile, VtkPolyData import sys, glob import numpy From 2bb3dd90c6345f58dbfa77d8ef1b555c1ac04139 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 21 May 2018 01:29:04 -0700 Subject: [PATCH 002/180] Fix time slice file names for time-dependent output --- .../paraview_vtk_field_extractor.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index e1e0a7232..afe85572d 100755 --- a/python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -161,10 +161,10 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, pvd_file.write('\n') if not combine_output and not np.all(var_has_time_dim): - out_prefix = "staticFieldsOn{}".format(suffix) + static_prefix = "staticFieldsOn{}".format(suffix) varIndices = np.arange(nVars)[np.logical_not(var_has_time_dim)] timeIndependentFile = utils.write_vtp_header(out_dir, - out_prefix, + static_prefix, varIndices[0], varIndices, variable_list, From 817c84a2498d66e0dae724285a5fef0bda27492c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 21 May 2018 11:59:13 +0200 Subject: [PATCH 003/180] Fix array masking with netCDF4 1.4.0 The latest netCDF4 package automatically masks all numpy arrays, which doesn't play nicely with pyevtk. To fix this, set_auto_mask(False) has been used when opening NetCDF files. --- .../paraview_vtk_field_extractor.py | 15 +++++++-------- .../paraview_vtk_field_extractor/utils.py | 13 ++++++++++++- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index e1e0a7232..50ca26ad0 100755 --- a/python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -57,7 +57,6 @@ import os import numpy as np -from netCDF4 import Dataset as NetCDFFile from netCDF4 import date2num from datetime import datetime @@ -87,7 +86,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, outType = 'float64' # Get dimension info to allocate the size of Colors - time_series_file = NetCDFFile(file_names[0], 'r') + time_series_file = utils.open_netcdf(file_names[0]) if mesh_file is not None: # blockDim may not exist in time series file @@ -185,7 +184,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if prev_file != file_names[time_index]: if prev_file != "": time_series_file.close() - time_series_file = NetCDFFile(file_names[time_index], 'r') + time_series_file = utils.open_netcdf(file_names[time_index]) prev_file = file_names[time_index] if any_var_has_time_dim: @@ -386,9 +385,9 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, separate_mesh_file = False # Setting dimension values: - time_series_file = NetCDFFile(time_file_names[0], 'r') + time_series_file = utils.open_netcdf(time_file_names[0]) if separate_mesh_file: - mesh_file = NetCDFFile(args.mesh_filename, 'r') + mesh_file = utils.open_netcdf(args.mesh_filename) else: mesh_file = None extra_dims = utils.parse_extra_dims(args.dimension_list, time_series_file, @@ -408,7 +407,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if len(cellVars) > 0: print " -- Extracting cell fields --" - mesh_file = NetCDFFile(args.mesh_filename, 'r') + mesh_file = utils.open_netcdf(args.mesh_filename) # Build cell geometry (vertices, connectivity, offsets, valid_mask) = \ @@ -431,7 +430,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if len(vertexVars) > 0: print " -- Extracting vertex fields --" - mesh_file = NetCDFFile(args.mesh_filename, 'r') + mesh_file = utils.open_netcdf(args.mesh_filename) # Build vertex geometry (vertices, connectivity, offsets, valid_mask) = \ @@ -455,7 +454,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if len(edgeVars) > 0: print " -- Extracting edge fields --" - mesh_file = NetCDFFile(args.mesh_filename, 'r') + mesh_file = utils.open_netcdf(args.mesh_filename) # Build cell list (vertices, connectivity, offsets, valid_mask) = \ diff --git a/python_scripts/paraview_vtk_field_extractor/utils.py b/python_scripts/paraview_vtk_field_extractor/utils.py index fde2a33c0..638a13a04 100644 --- a/python_scripts/paraview_vtk_field_extractor/utils.py +++ b/python_scripts/paraview_vtk_field_extractor/utils.py @@ -20,6 +20,17 @@ except: use_progress_bar = False + +def open_netcdf(file_name): + nc_file = NetCDFFile(file_name, 'r') + # turn off auto mask (if applicable) + try: + nc_file.set_auto_mask(False) + except AttributeError: + pass + return nc_file + + def is_valid_mesh_var(mesh_file, variable_name): if mesh_file is None: return False @@ -70,7 +81,7 @@ def setup_time_indices(fn_pattern, xtimeName): # {{{ i_file = 0 for file_name in file_list: try: - nc_file = NetCDFFile(file_name, 'r') + nc_file = open_netcdf(file_name) except IOError: print "Warning: could not open {}".format(file_name) continue From c9f6b7e8539684004c1f477a59907b0075f586a7 Mon Sep 17 00:00:00 2001 From: Mark Petersen Date: Fri, 25 May 2018 10:46:50 -0600 Subject: [PATCH 004/180] Add python scripts to alter coastlines Added: widen_transect_edge_masks.py and add_land_locked_cells_to_mask.py --- .../add_land_locked_cells_to_mask.py | 212 ++++++++++++++++++ .../widen_transect_edge_masks.py | 52 +++++ 2 files changed, 264 insertions(+) create mode 100755 ocean/coastline_alteration/add_land_locked_cells_to_mask.py create mode 100755 ocean/coastline_alteration/widen_transect_edge_masks.py diff --git a/ocean/coastline_alteration/add_land_locked_cells_to_mask.py b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py new file mode 100755 index 000000000..aa720a3f1 --- /dev/null +++ b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py @@ -0,0 +1,212 @@ +#!/usr/bin/env python +""" +Name: add_land_locked_cells_to_mask.py +Author: Mark Petersen, Adrian Turner + +Find ocean cells that are land-locked, and alter the cell +mask so that they are counted as land cells. +""" +import os +from netCDF4 import Dataset +import numpy as np +import argparse + +parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument("-f", "--mask_file", dest="mask_filename", + help="Mask file that includes cell and edge masks.", + metavar="MASKFILE", required=True) +parser.add_argument("-m", "--mesh_file", dest="mesh_filename", + help="MPAS Mesh filename.", metavar="MESHFILE", + required=True) +parser.add_argument("-l", "--latitude_threshold", dest="latitude_threshold", + help="Minimum latitude, in degrees, for transect widening.", + required=False, type=float, default=43.0) +parser.add_argument("-n", "--number_sweeps", dest="nSweeps", + help="Maximum number of sweeps to search for land-locked cells.", + required=False, type=int, default=10) +args = parser.parse_args() + +latitude_threshold_radians = args.latitude_threshold*3.1415/180. + +# Obtain mesh variables +meshFile = Dataset(args.mesh_filename, "r") +nCells = len(meshFile.dimensions["nCells"]) +maxEdges = len(meshFile.dimensions["maxEdges"]) +cellsOnCell = meshFile.variables["cellsOnCell"][:, :] +nEdgesOnCell = meshFile.variables["nEdgesOnCell"][:] +latCell = meshFile.variables["latCell"][:] +lonCell = meshFile.variables["lonCell"][:] +meshFile.close() + +# Obtain transect mask variables +maskFile = Dataset(args.mask_filename, "a") + +nRegions = len(maskFile.dimensions["nRegions"]) +regionCellMasks = maskFile.variables["regionCellMasks"][:, :] + +landMask = np.zeros(nCells, dtype="i") +try: + landMaskDiagnostic = maskFile.createVariable("landMaskDiagnostic", "i", dimensions=("nCells")) +except: + landMaskDiagnostic = maskFile.variables["landMaskDiagnostic"][:] + +print "Running add_land_locked_cells_to_mask.py. Total number of cells: ", nCells + +# set landMask = flattened regionCellMasks +landMask = np.amax(regionCellMasks, axis=1) +# use np.array, as simple = makes a pointer +landMaskNew = np.array(landMask) +activeEdgeSum = np.zeros(maxEdges, dtype="i") + +# Removable cells are ocean cells outside of latitude threshold +removableCellIndex = np.zeros(nCells, dtype="i") +nRemovableCells = 0 + +print "Step 1: Searching for land-locked cells. Remove cells that only have isolated active edges." +landLockedCounter = 0 +for iCell in range(nCells): + landMaskDiagnostic[iCell] = landMask[iCell] + # skip if outside latitude threshold or if this is already a land cell + if abs(latCell[iCell]) < latitude_threshold_radians or landMask[iCell] == 1: + continue + removableCellIndex[nRemovableCells] = iCell + nRemovableCells += 1 + activeEdgeSum[:] = 0 + for iEdgeOnCell in range(nEdgesOnCell[iCell]): + # check if neighbor is an ocean cell (landMask=0) + # subtract 1 to convert 1-base to 0-base: + if landMask[cellsOnCell[iCell, iEdgeOnCell]-1] == 0: + activeEdgeSum[iEdgeOnCell] += 1 + # % is modulo operator: + iP1 = (iEdgeOnCell + 1) % nEdgesOnCell[iCell] + activeEdgeSum[iP1] += 1 + + if np.amax(activeEdgeSum[0:nEdgesOnCell[iCell]]) == 1: + maskFile['regionCellMasks'][iCell, 1] = 1 + landLockedCounter += 1 + landMaskNew[iCell] = 1 + landMaskDiagnostic[iCell] = 2 + +landMask[:] = landMaskNew[:] +print " Number of landLocked cells: ", landLockedCounter + +print "Step 2: Searching for land-locked cells. Remove cells that have any isolated active edges." +for iSweep in range(args.nSweeps): + landLockedCounter = 0 + for iRemovableCell in range(0, nRemovableCells): + iCell = removableCellIndex[iRemovableCell] + if landMask[iCell] == 1: + continue + for iEdgeOnCell in range(nEdgesOnCell[iCell]): + # check if neighbor is an ocean cell (landMask=0) + # subtract 1 to convert 1-base to 0-base: + if landMask[cellsOnCell[iCell, iEdgeOnCell]-1] == 0: + # % is modulo operator: + iP1 = (iEdgeOnCell + 1) % nEdgesOnCell[iCell] + iM1 = (iEdgeOnCell - 1) % nEdgesOnCell[iCell] + # Is this neighbor's two neighbors to left and right land? + # if so, sum of masks is two. + # subtract 1 to convert 1-base to 0-base: + if (landMask[cellsOnCell[iCell, iP1]-1] + + landMask[cellsOnCell[iCell, iM1]-1]) == 2: + landLockedCounter += 1 + landMaskNew[iCell] = 1 + maskFile['regionCellMasks'][iCell, 1] = 1 + landMaskDiagnostic[iCell] = 3 + # once we remove this cell, we can quit checking over edges + break + + landMask[:] = landMaskNew[:] + print " Sweep: ", iSweep+1, "Number of landLocked cells removed: ", landLockedCounter + if landLockedCounter == 0: + break + +print "Step 3: Perform flood fill, starting from open ocean." +floodFill = np.zeros(nCells, dtype="i") +floodableCellIndex = np.zeros(nCells, dtype="i") +nFloodableCells = 0 +floodFill[:] = -1 +d2r = 3.1415/180.0 + +# init flood fill to 0 for water, -1 for land, 1 for known open ocean regions +for iRemovableCell in range(0, nRemovableCells): + iCell = removableCellIndex[iRemovableCell] + if (landMaskDiagnostic[iCell] == 0): + floodFill[iCell] = 0 + if (latCell[iCell] > 84.0*d2r # North Pole + or lonCell[iCell] > 160.0*d2r and lonCell[iCell] < 230.0*d2r and latCell[iCell] > 73.0*d2r # Arctic + or lonCell[iCell] > 315.0*d2r and lonCell[iCell] < 340.0*d2r and latCell[iCell] > 15.0*d2r and latCell[iCell] < 45.0*d2r # North Atlantic + or lonCell[iCell] > 290.0*d2r and lonCell[iCell] < 300.0*d2r and latCell[iCell] > 72.0*d2r and latCell[iCell] < 75.0*d2r # North Atlantic + or lonCell[iCell] > 0.0*d2r and lonCell[iCell] < 10.0*d2r and latCell[iCell] > 70.0*d2r and latCell[iCell] < 75.0*d2r # North Atlantic 2 + or lonCell[iCell] > 150.0*d2r and lonCell[iCell] < 225.0*d2r and latCell[iCell] > 0.0*d2r and latCell[iCell] < 45.0*d2r # North Pacific + or lonCell[iCell] > 0.0*d2r and lonCell[iCell] < 5.0*d2r and latCell[iCell] > -60.0*d2r and latCell[iCell] < 0.0*d2r # South Atlantic + or lonCell[iCell] > 180.0*d2r and lonCell[iCell] < 280.0*d2r and latCell[iCell] > -60.0*d2r and latCell[iCell] < -10.0*d2r # South Pacific + or lonCell[iCell] > 0.0*d2r and lonCell[iCell] < 165.0*d2r and latCell[iCell] > -60.0*d2r and latCell[iCell] < -45.0*d2r): # Southern Ocean + floodFill[iCell] = 1 + landMaskDiagnostic[iCell] = 5 # indicates seed region + else: + floodableCellIndex[nFloodableCells] = iCell + nFloodableCells += 1 +print " Initial number of flood cells: ", nFloodableCells + +# sweep over neighbors of known open ocean points +for iSweep in range(0, nCells): + newFloodCellsThisSweep = 0 + + for iFloodableCell in range(0, nFloodableCells): + iCell = floodableCellIndex[iFloodableCell] + if (floodFill[iCell] == 0): + + for iCellOnCellSweep in range(0, nEdgesOnCell[iCell]): + iCellNeighbor = cellsOnCell[iCell, iCellOnCellSweep]-1 + + if (floodFill[iCellNeighbor] == 1): + floodFill[iCell] = 1 + newFloodCellsThisSweep += 1 + break + + print " Sweep ", iSweep, " new flood cells this sweep: ", newFloodCellsThisSweep + + if (newFloodCellsThisSweep == 0): + break + +oceanMask = np.zeros(nCells, dtype="i") +for iCell in range(0, nCells): + if (floodFill[iCell] == 1): + oceanMask[iCell] = 1 + +print "Step 4: Searching for land-locked cells, step 3: revert cells with connected active edges" +for iSweep in range(args.nSweeps): + landLockedCounter = 0 + for iRemovableCell in range(0, nRemovableCells): + iCell = removableCellIndex[iRemovableCell] + # only remove a cell that was added in lats round (red cells) + if landMaskDiagnostic[iCell] == 3: + for iEdgeOnCell in range(nEdgesOnCell[iCell]): + # check if neighbor is an ocean cell (landMask=0) + # subtract 1 to convert 1-base to 0-base: + if oceanMask[cellsOnCell[iCell, iEdgeOnCell]-1] == 1: + # % is modulo operator: + iP1 = (iEdgeOnCell + 1) % nEdgesOnCell[iCell] + iM1 = (iEdgeOnCell - 1) % nEdgesOnCell[iCell] + # Is either of this neighbor's two neighbors to left and right ocean? + # if so, sum of masks is two. + # subtract 1 to convert 1-base to 0-base: + if (landMask[cellsOnCell[iCell, iP1]-1] == 0 + or landMask[cellsOnCell[iCell, iM1]-1] == 0): + landLockedCounter += 1 + landMaskNew[iCell] = 0 + maskFile['regionCellMasks'][iCell, 1] = 0 + landMaskDiagnostic[iCell] = 4 + oceanMask[iCell] = 1 + # once we remove this cell, we can quit checking over edges + break + + landMask[:] = landMaskNew[:] + print " Sweep: ", iSweep+1, "Number of land-locked cells returned: ", landLockedCounter + if landLockedCounter == 0: + break + +maskFile.close() diff --git a/ocean/coastline_alteration/widen_transect_edge_masks.py b/ocean/coastline_alteration/widen_transect_edge_masks.py new file mode 100755 index 000000000..e941045c6 --- /dev/null +++ b/ocean/coastline_alteration/widen_transect_edge_masks.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +""" +Name: widen_transect_edge_masks.py +Author: Mark Petersen + +Alter transects to be at least two cells wide. This is used for critical +passages, to avoid sea ice blockage. Specifically, mark cells on both sides +of each transect edge mask as a water cell. +""" +import numpy as np +from netCDF4 import Dataset +import argparse + +parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument("-f", "--mask_file", dest="mask_filename", + help="Mask file with cell and edge transect masks.", + metavar="MASKFILE", + required=True) +parser.add_argument("-m", "--mesh_file", dest="mesh_filename", + help="MPAS Mesh filename.", metavar="MESHFILE", + required=True) +parser.add_argument("-l", "--latitude_threshold", dest="latitude_threshold", + help="Minimum latitude, degrees, for transect widening.", + required=False, type=float, default=43.0) +args = parser.parse_args() + +latitude_threshold_radians = args.latitude_threshold*3.1415/180. + +# Obtain mesh variables +meshFile = Dataset(args.mesh_filename, "r") +nEdges = len(meshFile.dimensions["nEdges"]) +cellsOnEdge = meshFile.variables["cellsOnEdge"][:, :] +latEdge = meshFile.variables["latEdge"][:] +meshFile.close() + +# Obtain transect mask variables +maskFile = Dataset(args.mask_filename, "a") +nTransects = len(maskFile.dimensions["nTransects"]) +transectCellMasks = maskFile.variables["transectCellMasks"][:, :] +transectEdgeMasks = maskFile.variables["transectEdgeMasks"][:, :] + +print("widen_transect_edge_masks.py: Widening transects to two cells wide") +for iEdge in range(nEdges): + if abs(latEdge[iEdge]) > latitude_threshold_radians: + for iTransect in range(nTransects): + if transectEdgeMasks[iEdge, iTransect] == 1: + maskFile['transectCellMasks'][cellsOnEdge[iEdge, 0]-1, iTransect] = 1 + maskFile['transectCellMasks'][cellsOnEdge[iEdge, 1]-1, iTransect] = 1 + +maskFile.close() From 2616ac2a798144d954beb35d59b14175ce6d942f Mon Sep 17 00:00:00 2001 From: Mark Petersen Date: Sat, 2 Jun 2018 15:51:11 -0600 Subject: [PATCH 005/180] Change "add landlocked cells" script to not overwrite For long init runs like RRS18to6, it's important to have input and output files, rather than overwriting, because the process will often get killed halfway through. --- .../add_land_locked_cells_to_mask.py | 45 ++++++++++++------- 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/ocean/coastline_alteration/add_land_locked_cells_to_mask.py b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py index aa720a3f1..3ed99c01e 100755 --- a/ocean/coastline_alteration/add_land_locked_cells_to_mask.py +++ b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py @@ -7,16 +7,26 @@ mask so that they are counted as land cells. """ import os +import shutil from netCDF4 import Dataset import numpy as np import argparse +def removeFile(fileName): + try: + os.remove(fileName) + except OSError: + pass + parser = \ argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) -parser.add_argument("-f", "--mask_file", dest="mask_filename", +parser.add_argument("-f", "--input_mask_file", dest="input_mask_filename", + help="Mask file that includes cell and edge masks.", + metavar="INPUTMASKFILE", required=True) +parser.add_argument("-o", "--output_mask_file", dest="output_mask_filename", help="Mask file that includes cell and edge masks.", - metavar="MASKFILE", required=True) + metavar="OUTPUTMASKFILE", required=True) parser.add_argument("-m", "--mesh_file", dest="mesh_filename", help="MPAS Mesh filename.", metavar="MESHFILE", required=True) @@ -40,22 +50,23 @@ lonCell = meshFile.variables["lonCell"][:] meshFile.close() -# Obtain transect mask variables -maskFile = Dataset(args.mask_filename, "a") +removeFile(args.output_mask_filename) +shutil.copyfile(args.input_mask_filename,args.output_mask_filename) -nRegions = len(maskFile.dimensions["nRegions"]) -regionCellMasks = maskFile.variables["regionCellMasks"][:, :] +# Obtain original cell mask from input file +inputMaskFile = Dataset(args.input_mask_filename, "r") +nRegions = len(inputMaskFile.dimensions["nRegions"]) +regionCellMasks = inputMaskFile.variables["regionCellMasks"][:, :] +# set landMask = flattened regionCellMasks +landMask = np.amax(regionCellMasks, axis=1) +inputMaskFile.close() -landMask = np.zeros(nCells, dtype="i") -try: - landMaskDiagnostic = maskFile.createVariable("landMaskDiagnostic", "i", dimensions=("nCells")) -except: - landMaskDiagnostic = maskFile.variables["landMaskDiagnostic"][:] +# Open output file +outputMaskFile = Dataset(args.output_mask_filename, "a") +landMaskDiagnostic = outputMaskFile.createVariable("landMaskDiagnostic", "i", dimensions=("nCells")) print "Running add_land_locked_cells_to_mask.py. Total number of cells: ", nCells -# set landMask = flattened regionCellMasks -landMask = np.amax(regionCellMasks, axis=1) # use np.array, as simple = makes a pointer landMaskNew = np.array(landMask) activeEdgeSum = np.zeros(maxEdges, dtype="i") @@ -84,7 +95,7 @@ activeEdgeSum[iP1] += 1 if np.amax(activeEdgeSum[0:nEdgesOnCell[iCell]]) == 1: - maskFile['regionCellMasks'][iCell, 1] = 1 + outputMaskFile['regionCellMasks'][iCell, 1] = 1 landLockedCounter += 1 landMaskNew[iCell] = 1 landMaskDiagnostic[iCell] = 2 @@ -113,7 +124,7 @@ + landMask[cellsOnCell[iCell, iM1]-1]) == 2: landLockedCounter += 1 landMaskNew[iCell] = 1 - maskFile['regionCellMasks'][iCell, 1] = 1 + outputMaskFile['regionCellMasks'][iCell, 1] = 1 landMaskDiagnostic[iCell] = 3 # once we remove this cell, we can quit checking over edges break @@ -198,7 +209,7 @@ or landMask[cellsOnCell[iCell, iM1]-1] == 0): landLockedCounter += 1 landMaskNew[iCell] = 0 - maskFile['regionCellMasks'][iCell, 1] = 0 + outputMaskFile['regionCellMasks'][iCell, 1] = 0 landMaskDiagnostic[iCell] = 4 oceanMask[iCell] = 1 # once we remove this cell, we can quit checking over edges @@ -209,4 +220,4 @@ if landLockedCounter == 0: break -maskFile.close() +outputMaskFile.close() From 54272d095bc419707cfa6b3dec2f212ab19157bd Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:04:38 -0600 Subject: [PATCH 006/180] Remove grid_gen/ascii_netcdf_packager --- grid_gen/ascii_netcdf_packager/.gitignore | 2 - grid_gen/ascii_netcdf_packager/Makefile | 11 - grid_gen/ascii_netcdf_packager/README | 39 -- .../ascii_to_netcdf_packager.cpp | 663 ------------------ 4 files changed, 715 deletions(-) delete mode 100644 grid_gen/ascii_netcdf_packager/.gitignore delete mode 100644 grid_gen/ascii_netcdf_packager/Makefile delete mode 100644 grid_gen/ascii_netcdf_packager/README delete mode 100644 grid_gen/ascii_netcdf_packager/ascii_to_netcdf_packager.cpp diff --git a/grid_gen/ascii_netcdf_packager/.gitignore b/grid_gen/ascii_netcdf_packager/.gitignore deleted file mode 100644 index 98844dc60..000000000 --- a/grid_gen/ascii_netcdf_packager/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*.x -*.nc diff --git a/grid_gen/ascii_netcdf_packager/Makefile b/grid_gen/ascii_netcdf_packager/Makefile deleted file mode 100644 index 154cc06d8..000000000 --- a/grid_gen/ascii_netcdf_packager/Makefile +++ /dev/null @@ -1,11 +0,0 @@ -CXX = g++ -CPPFLAGS = -I${NETCDF}/include -CXXFLAGS = -O3 -LIBS = -L${NETCDF}/lib -lnetcdf -lnetcdf_c++ -EXE = AsciiNetCDFPackager.x - -all: - $(CXX) $(CPPFLAGS) ascii_to_netcdf_packager.cpp $(CXXFLAGS) $(LIBS) -o $(EXE) - -clean: - rm -f $(EXE) diff --git a/grid_gen/ascii_netcdf_packager/README b/grid_gen/ascii_netcdf_packager/README deleted file mode 100644 index 57205468a..000000000 --- a/grid_gen/ascii_netcdf_packager/README +++ /dev/null @@ -1,39 +0,0 @@ -Readme for ascii_to_netcdf_packager.cpp - -Author: Doug Jacobsen - -Purpose: - ascii_to_netcdf_packager.cpp is a piece of software designed create a NetCDF - file that contains enough information to build an MPAS mesh from. - -Requirements: - ascii_to_netcdf_packager.cpp requires the c++ netcdf libraries to be able to read/write NetCDF files. - It has been tested using g++ version 4.8.1 - -Usage of ascii_to_netcdf_packager.cpp: - ./AsciiNetCDFPackager.x [mod] - - Input options are: - [mod] (Optional) -- This allows the output mesh name to be modified - from grid.N.nc to grid.mod.N.nc - - This program reads three ascii files from the current directory: - * end_points.dat - This file should contain the x, y, and z coordinates - for every cell center in the mesh. Each row is a - point, and the columns are order x y z. - - * triangles.dat - This file contains the indices for cells that make up - each triangle. Each row is a triangle listing the - indices for each cell that is a vertex of the - triangle. Each column is an index of a triangle - vertex. - - * point_density.dat - This file contains the value of the density - function evaluated at each cell center. - - Upon completion, this program will create a new file named grid.N.nc that - contains enough information to build an MPAS mesh using the - MpasMeshConverter.x program. - - In the file name, N will be replaced with the number of cells in the mesh. - diff --git a/grid_gen/ascii_netcdf_packager/ascii_to_netcdf_packager.cpp b/grid_gen/ascii_netcdf_packager/ascii_to_netcdf_packager.cpp deleted file mode 100644 index 2c6a4e527..000000000 --- a/grid_gen/ascii_netcdf_packager/ascii_to_netcdf_packager.cpp +++ /dev/null @@ -1,663 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#define ID_LEN 10 - -using namespace std; -//using namespace tr1; - -int nCells, nVertices, vertexDegree; -bool spherical=false; -double sphereRadius=1.0; -int connectivityBase; -string in_history = ""; -string in_file_id = ""; - -// Connectivity and location information {{{ - -vector xCell, yCell, zCell; -vector xVertex, yVertex, zVertex; -vector< vector > cellsOnVertex; -vector meshDensity; - -// }}} - -// Iterators {{{ -vector::iterator int_itr; -vector< vector >::iterator vec_int_itr; -vector< vector >::iterator vec_dbl_itr; -vector::iterator dbl_itr; -// }}} - -/* Building/Ordering functions {{{ */ -int readGridInput(const string inputFilename); -int buildVertices(); -/*}}}*/ - -/* Output functions {{{*/ -int outputGridDimensions(const string outputFilename); -int outputGridAttributes(const string outputFilename, const string inputFilename); -int outputGridCoordinates(const string outputFilename); -int outputVertexConnectivity(const string outputFilename); -int outputMeshDensity(const string outputFilename); -/*}}}*/ - -/* Utility functions {{{*/ -int circumcenter(double x1, double y1, double z1, double x2, double y2, double z2, double x3, double y3, double z3, double *cx, double *cy, double *cz); -int isCCW(double x1, double y1, double z1, double x2, double y2, double z2, double x3, double y3, double z3); -/*}}}*/ - -string gen_random(const int len); - -int main ( int argc, char *argv[] ) { - int error; - ostringstream out_name_stream; - string out_name; - string in_name = "grid.nc"; - - cout << endl << endl; - cout << "************************************************************" << endl; - cout << "ASCII_TO_NETCDF_PACKAGER:\n"; - cout << " C++ version\n"; - cout << " Convert a set of ascii files describing a grid into a NetCDF file describing the same grid.\n"; - cout << " Requires cell information, and connectivity of the dual grid. Along with density values of each cell.\n"; - cout << endl << endl; - cout << " Compiled on " << __DATE__ << " at " << __TIME__ << ".\n"; - cout << "************************************************************" << endl; - cout << endl << endl; - - srand(time(NULL)); - - cout << "Reading input grid." << endl; - error = readGridInput(in_name); - if(error) return 1; - - - if ( argc > 1 ) { - out_name_stream << "grid." << argv[1] << "." << nCells << ".nc"; - } else { - out_name_stream << "grid." << nCells << ".nc"; - } - out_name = out_name_stream.str(); - - cout << "Building veritces." << endl; - error = buildVertices(); - if(error) return 1; - - cout << endl << "Writing file: " << out_name << endl << endl; - - cout << "Writing grid dimensions" << endl; - if(error = outputGridDimensions(out_name)){ - cout << "Error - " << error << endl; - exit(error); - } - - cout << "Writing grid attributes" << endl; - if(error = outputGridAttributes(out_name, in_name)){ - cout << "Error - " << error << endl; - exit(error); - } - - cout << "Writing grid coordinates" << endl; - if(error = outputGridCoordinates(out_name)){ - cout << "Error - " << error << endl; - exit(error); - } - - cout << "Writing vertex connectivity" << endl; - if(error = outputVertexConnectivity(out_name)){ - cout << "Error - " << error << endl; - exit(error); - } - - cout << "Reading and writing meshDensity" << endl; - if(error = outputMeshDensity(out_name)){ - cout << "Error - " << error << endl; - exit(error); - } - - return 0; -} - -/* Building/Ordering functions {{{ */ -int readGridInput(const string inputFilename){/*{{{*/ - double x, y, z; - ifstream cells("end_points.dat"); - ifstream dual_cells("triangles.dat"); - ifstream density("point_density.dat"); - string line; - vector *dual_cell; - int iVtx; - - double xRange[2], yRange[2], zRange[2]; - -#ifdef _DEBUG - cout << endl << endl << "Begin function: readGridInput" << endl << endl; -#endif - - xCell.clear(); - yCell.clear(); - zCell.clear(); - - xRange[0] = DBL_MAX; - xRange[1] = DBL_MIN; - yRange[0] = DBL_MAX; - yRange[1] = DBL_MIN; - zRange[0] = DBL_MAX; - zRange[1] = DBL_MIN; - - while(!cells.eof()){ - cells >> x >> y >> z; - - if(cells.good()){ - xRange[0] = min(xRange[0], x); - xRange[1] = max(xRange[1], x); - yRange[0] = min(yRange[0], y); - yRange[1] = max(yRange[1], y); - zRange[0] = min(zRange[0], z); - zRange[1] = max(zRange[1], z); - xCell.push_back(x); - yCell.push_back(y); - zCell.push_back(z); - } - } - cells.close(); - - if( fabs(xRange[1] - xRange[0]) > FLT_EPSILON && fabs(yRange[1] - yRange[0]) > FLT_EPSILON && fabs(zRange[1] - zRange[0]) > FLT_EPSILON ){ - spherical = true; - } - - if (spherical) { - sphereRadius = sqrt(xCell[0]*xCell[0] + yCell[0]*yCell[0] + zCell[0]*zCell[0]); - } - - cellsOnVertex.clear(); - connectivityBase = INT_MAX; - - nVertices = 0; - - for(std::string line; getline(dual_cells, line); ){ - nVertices++; - } - cellsOnVertex.resize(nVertices); - - dual_cells.close(); - dual_cells.open("triangles.dat"); - - iVtx = 0; - for(std::string line; getline(dual_cells, line); ){ - int start_idx = 0; - int count = 0; - for(int i = 0; i < line.length(); i++){ - count++; - if(line[i] == ' '){ - std::string idx = line.substr(start_idx, count); - - cellsOnVertex.at(iVtx).push_back( atoi(idx.c_str()) ); - - if (atoi(idx.c_str()) >= 0){ - connectivityBase = min(connectivityBase, atoi(idx.c_str())); - } - - count = 0; - start_idx = i; - } - } - - std::string last_idx = line.substr(start_idx); - cellsOnVertex.at(iVtx).push_back( atoi(last_idx.c_str()) ); - - if (atoi(last_idx.c_str()) >= 0){ - connectivityBase = min(connectivityBase, atoi(last_idx.c_str())); - } - - vertexDegree = cellsOnVertex.at(iVtx).size(); - iVtx++; - } - dual_cells.close(); - - meshDensity.clear(); - while(!density.eof()){ - double dens; - - density >> dens; - meshDensity.push_back(dens); - } - density.close(); - - nCells = xCell.size(); - nVertices = cellsOnVertex.size(); - - cout << "Read dimensions:" << endl; - cout << " nCells = " << xCell.size() << endl; - cout << " nVertices = " << cellsOnVertex.size() << endl; - cout << " vertexDegree = " << vertexDegree << endl; - cout << " Spherical? = " << spherical << endl; - cout << " Sphere Radius = " << sphereRadius << endl; - cout << " Connectivity base = " << connectivityBase << endl; - - cout << "" << endl; - cout << "X range: " << xRange[0] << " " << xRange[1] << endl; - cout << "Y range: " << yRange[0] << " " << yRange[1] << endl; - cout << "Z range: " << zRange[0] << " " << zRange[1] << endl; - - return 0; -}/*}}}*/ - -int buildVertices(){/*{{{*/ - double x, y, z, norm; - int v1, v2, v3; - - xVertex.clear(); - yVertex.clear(); - zVertex.clear(); - - for(int i = 0; i < cellsOnVertex.size(); i++){ - v1 = cellsOnVertex.at(i).at(0) - connectivityBase; - v2 = cellsOnVertex.at(i).at(1) - connectivityBase; - v3 = cellsOnVertex.at(i).at(2) - connectivityBase; - - if(!isCCW(xCell[v1], yCell[v1], zCell[v1], xCell[v2], yCell[v2], zCell[v2], xCell[v3], yCell[v3], zCell[v3])){ - v2 = cellsOnVertex.at(i).at(2) - connectivityBase; - v3 = cellsOnVertex.at(i).at(1) - connectivityBase; - } - - /* - cout << "Circumcenter of : " << v1 << " " << v2 << " " << v3 << endl; - cout << " 1 - " << xCell[v1] << " " << yCell[v1] << " " << zCell[v1] << endl; - cout << " 2 - " << xCell[v2] << " " << yCell[v2] << " " << zCell[v2] << endl; - cout << " 3 - " << xCell[v3] << " " << yCell[v3] << " " << zCell[v3] << endl; - // */ - - circumcenter(xCell[v1], yCell[v1], zCell[v1], - xCell[v2], yCell[v2], zCell[v2], - xCell[v3], yCell[v3], zCell[v3], - &x, &y, &z); - - if (spherical){ - norm = sqrt( x*x + y*y + z*z ); - x = (x / norm) * sphereRadius; - y = (y / norm) * sphereRadius; - z = (z / norm) * sphereRadius; - } - - xVertex.push_back(x); - yVertex.push_back(y); - zVertex.push_back(z); - } - - return 0; - -}/*}}}*/ -/*}}}*/ - -/* Output functions {{{*/ -int outputGridDimensions( const string outputFilename ){/*{{{*/ - /************************************************************************ - * - * This function writes the grid dimensions to the netcdf file named - * outputFilename - * - * **********************************************************************/ - // Return this code to the OS in case of failure. - static const int NC_ERR = 2; - - // set error behaviour (matches fortran behaviour) - NcError err(NcError::verbose_nonfatal); - - // open the scvtmesh file - NcFile grid(outputFilename.c_str(), NcFile::Replace, NULL, 0, NcFile::Offset64Bits); - - int junk; - - nCells = xCell.size(); - - /* - for(vec_int_itr = edgesOnCell.begin(); vec_int_itr != edgesOnCell.end(); ++vec_int_itr){ - maxEdges = std::max(maxEdges, (int)(*vec_int_itr).size()); - }*/ - - // check to see if the file was opened - if(!grid.is_valid()) return NC_ERR; - - // define dimensions - NcDim *nCellsDim; - NcDim *nEdgesDim; - NcDim *nVerticesDim; - NcDim *maxEdgesDim; - NcDim *maxEdges2Dim; - NcDim *TWODim; - NcDim *THREEDim; - NcDim *vertexDegreeDim; - NcDim *timeDim; - - // write dimensions - if (!(nCellsDim = grid.add_dim( "nCells", xCell.size()) )) return NC_ERR; - if (!(nVerticesDim = grid.add_dim( "nVertices", xVertex.size()) )) return NC_ERR; - if (!(TWODim = grid.add_dim( "TWO", 2) )) return NC_ERR; - if (!(vertexDegreeDim = grid.add_dim( "vertexDegree", vertexDegree) )) return NC_ERR; - if (!(timeDim = grid.add_dim( "Time") )) return NC_ERR; - - grid.close(); - - // file closed when file obj goes out of scope - return 0; -}/*}}}*/ -int outputGridAttributes( const string outputFilename, const string inputFilename ){/*{{{*/ - /************************************************************************ - * - * This function writes the grid dimensions to the netcdf file named - * outputFilename - * - * **********************************************************************/ - // Return this code to the OS in case of failure. - static const int NC_ERR = 2; - - // set error behaviour (matches fortran behaviour) - NcError err(NcError::verbose_nonfatal); - - // open the scvtmesh file - NcFile grid(outputFilename.c_str(), NcFile::Write); - - // check to see if the file was opened - if(!grid.is_valid()) return NC_ERR; - NcBool sphereAtt, radiusAtt; - NcBool history, id, spec, conventions, source, periodic; - string history_str = ""; - string id_str = ""; - string parent_str =""; - - // write attributes - if(!spherical){ - if (!(sphereAtt = grid.add_att( "on_a_sphere", "NO\0"))) return NC_ERR; - if (!(radiusAtt = grid.add_att( "sphere_radius", 1.0))) return NC_ERR; - } else { - if (!(sphereAtt = grid.add_att( "on_a_sphere", "YES\0"))) return NC_ERR; - if (!(radiusAtt = grid.add_att( "sphere_radius", sphereRadius))) return NC_ERR; - } - - history_str += "AsciiToNetCDFPackager.x "; - if(in_history != ""){ - history_str += "\n"; - history_str += in_history; - } - - id_str = gen_random(ID_LEN); - - if (!(history = grid.add_att( "history", history_str.c_str() ))) return NC_ERR; - if (!(conventions = grid.add_att( "Conventions", "MPAS" ))) return NC_ERR; - if (!(source = grid.add_att( "source", "MpasMeshConverter.x" ))) return NC_ERR; - if (!(id = grid.add_att( "file_id", id_str.c_str() ))) return NC_ERR; - if (!(periodic = grid.add_att( "is_periodic", "NO\0" ))) return NC_ERR; - - grid.close(); - - // file closed when file obj goes out of scope - return 0; -}/*}}}*/ -int outputGridCoordinates( const string outputFilename) {/*{{{*/ - /************************************************************************ - * - * This function writes the grid coordinates to the netcdf file named - * outputFilename - * This includes all cell centers, vertices, and edges. - * Both cartesian and lat,lon, as well as all of their indices - * - * **********************************************************************/ - // Return this code to the OS in case of failure. - static const int NC_ERR = 2; - - // set error behaviour (matches fortran behaviour) - NcError err(NcError::verbose_nonfatal); - - // open the scvtmesh file - NcFile grid(outputFilename.c_str(), NcFile::Write); - - // check to see if the file was opened - if(!grid.is_valid()) return NC_ERR; - - // fetch dimensions - NcDim *nCellsDim = grid.get_dim( "nCells" ); - NcDim *nVerticesDim = grid.get_dim( "nVertices" ); - - int nCells = nCellsDim->size(); - int nVertices = nVerticesDim->size(); - - //Define nc variables - NcVar *xCellVar, *yCellVar, *zCellVar, *xVertexVar, *yVertexVar, *zVertexVar; - - int i; - - // Build and write cell coordinate arrays - cout << "Writing xcell" << endl; - if (!(xCellVar = grid.add_var("xCell", ncDouble, nCellsDim))) return NC_ERR; - if (!xCellVar->put(&xCell[0],nCells)) return NC_ERR; - cout << "Writing ycell" << endl; - if (!(yCellVar = grid.add_var("yCell", ncDouble, nCellsDim))) return NC_ERR; - if (!yCellVar->put(&yCell[0],nCells)) return NC_ERR; - cout << "Writing zcell" << endl; - if (!(zCellVar = grid.add_var("zCell", ncDouble, nCellsDim))) return NC_ERR; - if (!zCellVar->put(&zCell[0],nCells)) return NC_ERR; - - //Build and write vertex coordinate arrays - cout << "Writing xvertex" << endl; - if (!(xVertexVar = grid.add_var("xVertex", ncDouble, nVerticesDim))) return NC_ERR; - if (!xVertexVar->put(&xVertex[0],nVertices)) return NC_ERR; - cout << "Writing yvertex" << endl; - if (!(yVertexVar = grid.add_var("yVertex", ncDouble, nVerticesDim))) return NC_ERR; - if (!yVertexVar->put(&yVertex[0],nVertices)) return NC_ERR; - cout << "Writing zvertex" << endl; - if (!(zVertexVar = grid.add_var("zVertex", ncDouble, nVerticesDim))) return NC_ERR; - if (!zVertexVar->put(&zVertex[0],nVertices)) return NC_ERR; - - grid.close(); - - return 0; -}/*}}}*/ -int outputVertexConnectivity( const string outputFilename) {/*{{{*/ - /***************************************************************** - * - * This function writes all of the *OnVertex arrays. Including - * cellsOnVertex - * - * ***************************************************************/ - // Return this code to the OS in case of failure. - static const int NC_ERR = 2; - - // set error behaviour (matches fortran behaviour) - NcError err(NcError::verbose_nonfatal); - - // open the scvtmesh file - NcFile grid(outputFilename.c_str(), NcFile::Write); - - // check to see if the file was opened - if(!grid.is_valid()) return NC_ERR; - - // fetch dimensions - NcDim *nVerticesDim = grid.get_dim( "nVertices" ); - NcDim *vertexDegreeDim = grid.get_dim( "vertexDegree" ); - - // define nc variables - NcVar *covVar, *eovVar, *bdryVertVar; - - int nVertices = nVerticesDim->size(); - int vertexDegree = vertexDegreeDim->size(); - int i, j; - - int *tmp_arr; - - // Build and write COV array - tmp_arr = new int[nVertices*vertexDegree]; - - for(i = 0; i < nVertices; i++){ - for(j = 0; j < vertexDegree; j++){ - tmp_arr[i*vertexDegree + j] = 0; - } - } - - i = 0; - for(vec_int_itr = cellsOnVertex.begin(); vec_int_itr != cellsOnVertex.end(); ++vec_int_itr){ - j = 0; - for(int_itr = (*vec_int_itr).begin(); int_itr != (*vec_int_itr).end(); ++int_itr){ - tmp_arr[i*vertexDegree + j] = (*int_itr) - connectivityBase + 1; - j++; - } - i++; - } - - if (!(covVar = grid.add_var("cellsOnVertex", ncInt, nVerticesDim, vertexDegreeDim))) return NC_ERR; - if (!covVar->put(tmp_arr,nVertices,vertexDegree)) return NC_ERR; - - cellsOnVertex.clear(); - - return 0; -}/*}}}*/ -int outputMeshDensity( const string outputFilename) {/*{{{*/ - /*************************************************************************** - * - * This function writes the meshDensity variable. Read in from the file SaveDensity - * - * *************************************************************************/ - // Return this code to the OS in case of failure. - static const int NC_ERR = 2; - - // set error behaviour (matches fortran behaviour) - NcError err(NcError::verbose_nonfatal); - - // open the scvtmesh file - NcFile grid(outputFilename.c_str(), NcFile::Write); - - // check to see if the file was opened - if(!grid.is_valid()) return NC_ERR; - - // fetch dimensions - NcDim *nCellsDim = grid.get_dim( "nCells" ); - - NcVar *cDensVar; - - int nCells = nCellsDim->size(); - int i, j, k; - int junk_int; - double junk_dbl; - - vector dbl_tmp_arr; - - //Write meshDensity - if (!(cDensVar = grid.add_var("meshDensity", ncDouble, nCellsDim))) return NC_ERR; - if (!cDensVar->put(&meshDensity.at(0),nCells)) return NC_ERR; - - return 0; -}/*}}}*/ -/*}}}*/ - -string gen_random(const int len) {/*{{{*/ - static const char alphanum[] = - "0123456789" -// "ABCDEFGHIJKLMNOPQRSTUVWXYZ" - "abcdefghijklmnopqrstuvwxyz"; - - string rand_str = ""; - - for (int i = 0; i < len; ++i) { - rand_str += alphanum[rand() % (sizeof(alphanum) - 1)]; - } - - return rand_str; -}/*}}}*/ - -int circumcenter(double x1, double y1, double z1, double x2, double y2, double z2, double x3, double y3, double z3, double *cx, double *cy, double *cz){/*{{{*/ - - if(spherical){ - double a, b, c, pbc, apc, abp; - double bottom; - double x23, y23, z23; - double x31, y31, z31; - double x12, y12, z12; - - x23 = x2 - x3; - y23 = y2 - y3; - z23 = z2 - z3; - - x31 = x3 - x1; - y31 = y3 - y1; - z31 = z3 - z1; - - x12 = x1 - x2; - y12 = y1 - y2; - z12 = z1 - z2; - - a = pow(x23, 2) + pow(y23, 2) + pow(z23, 2); - b = pow(x31, 2) + pow(y31, 2) + pow(z31, 2); - c = pow(x12, 2) + pow(y12, 2) + pow(z12, 2); -// cout << " ABC: " << a << " " << b << " " << c << endl; - - pbc = a*(-a + b + c); - apc = b*( a - b + c); - abp = c*( a + b - c); - - bottom = pbc + apc + abp; - - *cx = (pbc * x1 + apc * x2 + abp * x3) / bottom; - *cy = (pbc * y1 + apc * y2 + abp * y3) / bottom; - *cz = (pbc * z1 + apc * z2 + abp * z3) / bottom; - } else { - double d; - - d = 2.0 * ( x1 * (y2 - y3) + x2 * (y3 - y1) + x3 * (y1 - y2)); - - *cx = (( powf(x1, 2) + powf(y1, 2) ) * (y2 - y3) + ( powf(x2, 2) + powf(y2, 2) ) * (y3 - y1) + ( powf(x3, 2) + powf(y3, 2) ) * (y1 - y2)) / d; - *cy = (( powf(x1, 2) + powf(y1, 2) ) * (x3 - x2) + ( powf(x2, 2) + powf(y2, 2) ) * (x1 - x3) + ( powf(x3, 2) + powf(y3, 2) ) * (x2 - x1)) / d; - *cz = 0.0; - } - - return 0; -}/*}}}*/ - -int isCCW(double x1, double y1, double z1, double x2, double y2, double z2, double x3, double y3, double z3){/*{{{*/ - double nx, ny, nz; - double ux, uy, uz; - double vx, vy, vz; - double cx, cy, cz; - double dot; - - if (spherical){ - nx = x1; - ny = y1; - nz = z1; - } else { - nx = 0.0; - ny = 0.0; - nz = 1.0; - } - - ux = x2 - x1; - uy = y2 - y1; - uz = z2 - z1; - vx = x3 - x1; - vy = y3 - y1; - vz = z3 - z1; - - cx = uy * vz - uz * vy; - cy = uz * vx - ux * vz; - cz = ux * vy - uy * vx; - - dot = cx * nx + cy * ny + cz * nz; - - if (dot > 0.0) { - return 1; - } else { - return 0; - } -}/*}}}*/ From 7e81bc58b2a4bbc02f545420ed1ebd3dd001134a Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:05:11 -0600 Subject: [PATCH 007/180] Remove grid_gen/basin --- grid_gen/basin/.gitignore | 10 - .../basin/Namelists/namelist.DOME_3D_overflow | 22 - .../basin/Namelists/namelist.Ilicak2_overflow | 22 - .../Namelists/namelist.Ilicak2_overflow_sigma | 22 - grid_gen/basin/Namelists/namelist.SOMA | 31 - .../Namelists/namelist.aquaplanet_one_layer | 29 - .../namelist.baroclinic_channel_Ilicak4 | 22 - .../basin/Namelists/namelist.global_realistic | 31 - .../Namelists/namelist.internal_wave_Ilicak3 | 22 - .../Namelists/namelist.isopycnal_channel | 21 - .../Namelists/namelist.lock_exchange_Ilicak1 | 22 - .../Namelists/namelist.sub_ice_shelf_test1 | 25 - .../Namelists/namelist.sub_ice_shelf_test2 | 25 - .../Namelists/namelist.sub_ice_shelf_test3 | 25 - .../namelist.unitTestCVMixConvection | 22 - .../Namelists/namelist.unitTestCVMixShear | 22 - grid_gen/basin/README | 24 - grid_gen/basin/dx/README | 14 - grid_gen/basin/namelist.basin | 1 - grid_gen/basin/runit | 20 - grid_gen/basin/src/Makefile | 82 - grid_gen/basin/src/basin.F | 3192 ----------------- grid_gen/basin/src/module_cullLoops.F | 84 - grid_gen/basin/src/module_read_TS.F | 143 - grid_gen/basin/src/module_read_U.F | 130 - grid_gen/basin/src/module_read_monthly.F | 154 - grid_gen/basin/src/module_read_netcdf.F | 523 --- grid_gen/basin/src/module_read_topo.F | 109 - grid_gen/basin/src/module_write_netcdf.F | 694 ---- grid_gen/basin/src/utilities.F | 781 ---- 30 files changed, 6324 deletions(-) delete mode 100644 grid_gen/basin/.gitignore delete mode 100644 grid_gen/basin/Namelists/namelist.DOME_3D_overflow delete mode 100644 grid_gen/basin/Namelists/namelist.Ilicak2_overflow delete mode 100644 grid_gen/basin/Namelists/namelist.Ilicak2_overflow_sigma delete mode 100644 grid_gen/basin/Namelists/namelist.SOMA delete mode 100644 grid_gen/basin/Namelists/namelist.aquaplanet_one_layer delete mode 100644 grid_gen/basin/Namelists/namelist.baroclinic_channel_Ilicak4 delete mode 100644 grid_gen/basin/Namelists/namelist.global_realistic delete mode 100644 grid_gen/basin/Namelists/namelist.internal_wave_Ilicak3 delete mode 100644 grid_gen/basin/Namelists/namelist.isopycnal_channel delete mode 100644 grid_gen/basin/Namelists/namelist.lock_exchange_Ilicak1 delete mode 100644 grid_gen/basin/Namelists/namelist.sub_ice_shelf_test1 delete mode 100644 grid_gen/basin/Namelists/namelist.sub_ice_shelf_test2 delete mode 100644 grid_gen/basin/Namelists/namelist.sub_ice_shelf_test3 delete mode 100644 grid_gen/basin/Namelists/namelist.unitTestCVMixConvection delete mode 100644 grid_gen/basin/Namelists/namelist.unitTestCVMixShear delete mode 100644 grid_gen/basin/README delete mode 100644 grid_gen/basin/dx/README delete mode 120000 grid_gen/basin/namelist.basin delete mode 100755 grid_gen/basin/runit delete mode 100644 grid_gen/basin/src/Makefile delete mode 100644 grid_gen/basin/src/basin.F delete mode 100644 grid_gen/basin/src/module_cullLoops.F delete mode 100644 grid_gen/basin/src/module_read_TS.F delete mode 100644 grid_gen/basin/src/module_read_U.F delete mode 100644 grid_gen/basin/src/module_read_monthly.F delete mode 100644 grid_gen/basin/src/module_read_netcdf.F delete mode 100644 grid_gen/basin/src/module_read_topo.F delete mode 100644 grid_gen/basin/src/module_write_netcdf.F delete mode 100644 grid_gen/basin/src/utilities.F diff --git a/grid_gen/basin/.gitignore b/grid_gen/basin/.gitignore deleted file mode 100644 index 2dee6bde4..000000000 --- a/grid_gen/basin/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -# Ignore all netcdf files and graph files -fort* -map -*.mod -*.f90 - -# Ignore all PNGs -*.png - -# Each tool should also have it's own .gitignore file that ignores the build files for that tool. diff --git a/grid_gen/basin/Namelists/namelist.DOME_3D_overflow b/grid_gen/basin/Namelists/namelist.DOME_3D_overflow deleted file mode 100644 index c98ff3bf6..000000000 --- a/grid_gen/basin/Namelists/namelist.DOME_3D_overflow +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 25 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'DOME_3D_overflow' - initial_conditions = 'DOME_3D_overflow' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .true. - top_layers_without_land = 3 - layer_thickness_total_max = 3600.0 - f0 = 1.0e-4 - beta = 0.0 - omega = 0.0 - Lx = 2000.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.Ilicak2_overflow b/grid_gen/basin/Namelists/namelist.Ilicak2_overflow deleted file mode 100644 index ac7547c55..000000000 --- a/grid_gen/basin/Namelists/namelist.Ilicak2_overflow +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 100 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'Ilicak2_overflow' - initial_conditions = 'Ilicak2_overflow' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 2000.0 - f0 = 0.0 - beta = 0.0 - omega = 0.0 - Lx = 4.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.Ilicak2_overflow_sigma b/grid_gen/basin/Namelists/namelist.Ilicak2_overflow_sigma deleted file mode 100644 index 8e6bb9c3c..000000000 --- a/grid_gen/basin/Namelists/namelist.Ilicak2_overflow_sigma +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 100 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'Ilicak2_overflow_sigma' - initial_conditions = 'Ilicak2_overflow_sigma' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_total_max = 2000.0 - f0 = 0.0 - beta = 0.0 - omega = 0.0 - Lx = 4.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.SOMA b/grid_gen/basin/Namelists/namelist.SOMA deleted file mode 100644 index 559b41eea..000000000 --- a/grid_gen/basin/Namelists/namelist.SOMA +++ /dev/null @@ -1,31 +0,0 @@ -&basin - nVertLevelsMOD = 40 - on_a_sphere = 'YES' - expand_from_unit_sphere = .true. - sphere_radius = 6.37122e6 - zLevel_thickness = 'SOMA_40_Level' - bottom_topography = 'SOMA_Circular_Basin' - initial_conditions = 'SOMA_TS' - eliminate_inland_seas=.true. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .false. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - - ! These variables may be used for acc wind amplification - amplify_acc_wind = .false. - amp_wind_factor = 2.0 - amp_wind_center_lat = -35.0 - amp_wind_spread_lat = 3.0 - - ! These variables are not needed for realistic global topography: - - ! layer_total_max = 2000.0 - ! f0 = -1.1e-4 - ! beta = 1.4e-11 - ! omega = 7.29212e-5 - ! Lx = 3200.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.aquaplanet_one_layer b/grid_gen/basin/Namelists/namelist.aquaplanet_one_layer deleted file mode 100644 index 0d614f412..000000000 --- a/grid_gen/basin/Namelists/namelist.aquaplanet_one_layer +++ /dev/null @@ -1,29 +0,0 @@ -&basin - nVertLevelsMOD = 1 - on_a_sphere = 'YES' - expand_from_unit_sphere = .true. - sphere_radius = 1.0 - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'uniform_TS' - uniform_T = 10.0 - uniform_S = 34.0 - uniform_tracer1 = 1.0 - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .false. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_total_max = 3000.0 - - ! These variables are not needed for aquaplanet. - ! u,f,h are initialized in the code. - - f0 = 0.0 - beta = 0.0 - omega = 0.0 - ! Lx = 3200.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.baroclinic_channel_Ilicak4 b/grid_gen/basin/Namelists/namelist.baroclinic_channel_Ilicak4 deleted file mode 100644 index 8815f57fb..000000000 --- a/grid_gen/basin/Namelists/namelist.baroclinic_channel_Ilicak4 +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 20 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'baroclinic_channel_Ilicak4' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 1000.0 - f0 = -1.2e-4 - beta = 0.0 - omega = 0.0 - Lx = 160.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.global_realistic b/grid_gen/basin/Namelists/namelist.global_realistic deleted file mode 100644 index a905b5cc4..000000000 --- a/grid_gen/basin/Namelists/namelist.global_realistic +++ /dev/null @@ -1,31 +0,0 @@ -&basin - nVertLevelsMOD = 40 - on_a_sphere = 'YES' - expand_from_unit_sphere = .true. - sphere_radius = 6.37122e6 - zLevel_thickness = 'POP_40_zLevel' - bottom_topography = 'realistic_ETOPO' - initial_conditions = 'realistic_WOCE' - eliminate_inland_seas=.true. - load_woce_IC = .true. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .false. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - - ! These variables may be used for acc wind amplification - ! amplify_acc_wind = .false. - ! amp_wind_factor = 2.0 - ! amp_wind_center_lat = -35.0 - ! amp_wind_spread_lat = 3.0 - - ! These variables are not needed for realistic global topography: - - ! layer_total_max = 2000.0 - ! f0 = -1.1e-4 - ! beta = 1.4e-11 - ! omega = 7.29212e-5 - ! Lx = 3200.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.internal_wave_Ilicak3 b/grid_gen/basin/Namelists/namelist.internal_wave_Ilicak3 deleted file mode 100644 index dcaee8973..000000000 --- a/grid_gen/basin/Namelists/namelist.internal_wave_Ilicak3 +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 20 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'internal_wave_Ilicak3' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .false. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 500.0 - f0 = 0.0 - beta = 0.0 - omega = 0.0 - Lx = 20.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.isopycnal_channel b/grid_gen/basin/Namelists/namelist.isopycnal_channel deleted file mode 100644 index c3139c1f8..000000000 --- a/grid_gen/basin/Namelists/namelist.isopycnal_channel +++ /dev/null @@ -1,21 +0,0 @@ -&basin - nVertLevelsMOD = 3 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'isopycnal_3layer' - bottom_topography = 'flat_bottom' - initial_conditions = 'isopycnal_3layer' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - f0 = -1.1e-4 - beta = 1.4e-11 - omega = 0.0 - Lx = 2048.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.lock_exchange_Ilicak1 b/grid_gen/basin/Namelists/namelist.lock_exchange_Ilicak1 deleted file mode 100644 index 529e8cceb..000000000 --- a/grid_gen/basin/Namelists/namelist.lock_exchange_Ilicak1 +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 20 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'lock_exchange_Ilicak1' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 20.0 - f0 = 0.0 - beta = 0.0 - omega = 0.0 - Lx = 4.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test1 b/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test1 deleted file mode 100644 index 679346941..000000000 --- a/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test1 +++ /dev/null @@ -1,25 +0,0 @@ -&basin - nVertLevelsMOD = 22 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'sub_ice_shelf_test1' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 1100.0 - f0 = -1.4e-4 - beta = 0.0 - omega = 0.0 - Lx = 50.0e3 - surfaceWindStressMax = 0.0 - iceShelfCavityThickness = 10 - iceShelfEdgeWidth = 15.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test2 b/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test2 deleted file mode 100644 index 78777074d..000000000 --- a/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test2 +++ /dev/null @@ -1,25 +0,0 @@ -&basin - nVertLevelsMOD = 22 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'sub_ice_shelf_test2' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 1100.0 - f0 = -1.4e-4 - beta = 0.0 - omega = 0.0 - Lx = 50.0e3 - surfaceWindStressMax = 0.1 - iceShelfCavityThickness = 10 - iceShelfEdgeWidth = 15.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test3 b/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test3 deleted file mode 100644 index 914bafb65..000000000 --- a/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test3 +++ /dev/null @@ -1,25 +0,0 @@ -&basin - nVertLevelsMOD = 22 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'sub_ice_shelf_test3' - initial_conditions = 'sub_ice_shelf_test3' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 1100.0 - f0 = -1.4e-4 - beta = 0.0 - omega = 0.0 - Lx = 50.0e3 - surfaceWindStressMax = 0.1 - iceShelfCavityThickness = 10 - iceShelfEdgeWidth = 15.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.unitTestCVMixConvection b/grid_gen/basin/Namelists/namelist.unitTestCVMixConvection deleted file mode 100644 index 5ba575b39..000000000 --- a/grid_gen/basin/Namelists/namelist.unitTestCVMixConvection +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 20 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'unitTestCVMixConvection' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .false. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 1000.0 - f0 = 0.0 - beta = 0.0 - omega = 0.0 - Lx = 160.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.unitTestCVMixShear b/grid_gen/basin/Namelists/namelist.unitTestCVMixShear deleted file mode 100644 index b375088cf..000000000 --- a/grid_gen/basin/Namelists/namelist.unitTestCVMixShear +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 20 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'unitTestCVMixShear' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .false. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 1000.0 - f0 = 0.0 - beta = 0.0 - omega = 0.0 - Lx = 160.0e3 -/ diff --git a/grid_gen/basin/README b/grid_gen/basin/README deleted file mode 100644 index e64446471..000000000 --- a/grid_gen/basin/README +++ /dev/null @@ -1,24 +0,0 @@ -Program: basin - -This program reads in a MPAS grid file (grid.nc) and outputs a modified grid file -(ocean.nc) and it's associated graph.info files for partitions from 2 up to 1024 in powers of 2. - -The purpose of this code is to remove grid cells from any valid MPAS grid, and to -add initial condition variables like h, u, u_src, forcing, and tracers. -Please see source file src/basin.F to define the specifics of the output grid file. - -The required files are: - grid.nc is an mpas grid, either spherical or Cartesian - namelist.basin may point to a standard case in the namelists directory. - -After grid.nc and namelist.basin files has been placed in this directory, -simply run the script runit. - -This script will compile basin, run basin (producing an ocean.nc file) and use metis -to partition the graph.info file. - -The metis portion of the script requires the executable kmetis to be in your path. -If it is not, none of the graph.info.part.N files will be created, but can later be -created manually using metis and the assocaite graph.info file. - - diff --git a/grid_gen/basin/dx/README b/grid_gen/basin/dx/README deleted file mode 100644 index cb69a3aed..000000000 --- a/grid_gen/basin/dx/README +++ /dev/null @@ -1,14 +0,0 @@ -The dx directory will contain the files for OpenDX visualization after -basin is run. These files include: -h.data -mesh.cfg -mesh.net -ocean.area.data -ocean.dx -ocean.edge.data -ocean.face.data -ocean.loop.data -ocean.position.data -vector.dx -vector.position.data - diff --git a/grid_gen/basin/namelist.basin b/grid_gen/basin/namelist.basin deleted file mode 120000 index 05bd486db..000000000 --- a/grid_gen/basin/namelist.basin +++ /dev/null @@ -1 +0,0 @@ -Namelists/namelist.global_realistic \ No newline at end of file diff --git a/grid_gen/basin/runit b/grid_gen/basin/runit deleted file mode 100755 index 60f647f31..000000000 --- a/grid_gen/basin/runit +++ /dev/null @@ -1,20 +0,0 @@ -rm -f map -cd src -make clean -make -mv map .. -cd .. -rm -f graph* -./map -#cd ../metis-4.0 -kmetis graph.info 2 -kmetis graph.info 4 -kmetis graph.info 8 -kmetis graph.info 16 -kmetis graph.info 32 -kmetis graph.info 64 -kmetis graph.info 128 -kmetis graph.info 256 -kmetis graph.info 512 -kmetis graph.info 1024 -#cd ../basin diff --git a/grid_gen/basin/src/Makefile b/grid_gen/basin/src/Makefile deleted file mode 100644 index aade64cf0..000000000 --- a/grid_gen/basin/src/Makefile +++ /dev/null @@ -1,82 +0,0 @@ -# IBM with Xlf compilers -#FC = xlf90 -#CC = xlc -#FFLAGS = -qrealsize=8 -g -C -#CFLAGS = -g -#LDFLAGS = -g -C - -# pgf90 -#FC = pgf90 -#CC = pgcc -#FFLAGS = -r8 -O3 -#CFLAGS = -O3 -#LDFLAGS = -O3 - -## ifort -FC = ifort -CC = icc -FFLAGS = -real-size 64 #-g -traceback -check all -CFLAGS = #-g -LDFLAGS = #-g -traceback -check all - -# gfortran -#FC = gfortran -#CC = gcc -#FFLAGS = -O3 -m64 -ffree-line-length-none -fdefault-real-8 -fconvert=big-endian -ffree-form -#CFLAGS = -#LDFLAGS = - -# absoft -#FC = f90 -#CC = gcc -#FFLAGS = -dp -O3 -#CFLAGS = -O3 -#LDFLAGS = -O3 -#NETCDF = /Users/maltrud/local - - -CPP = cpp -P -traditional -CPPFLAGS = -CPPINCLUDES = -INCLUDES = -I$(NETCDF)/include - -LIBS = -L$(NETCDF)/lib -NCLIB = -lnetcdf -NCLIBF = -lnetcdff -ifneq ($(wildcard $(NETCDF)/lib/libnetcdff.*), ) # CHECK FOR NETCDF4 - LIBS += $(NCLIBF) -endif # CHECK FOR NETCDF4 -LIBS += $(NCLIB) - -RM = rm -f - -########################## - -.SUFFIXES: .F .o - - -OBJS = basin.o \ - utilities.o \ - module_read_netcdf.o \ - module_read_topo.o \ - module_read_TS.o \ - module_read_U.o \ - module_read_monthly.o \ - module_cullLoops.o \ - module_write_netcdf.o - -all: map - -basin.o: utilities.o module_write_netcdf.o module_read_netcdf.o module_read_topo.o module_read_TS.o module_read_U.o module_read_monthly.o module_cullLoops.o - -map: $(OBJS) - $(FC) $(LDFLAGS) -o $@ $(OBJS) $(LIBS) - -clean: - $(RM) *.o *.mod pop *.f90 - -.F.o: - $(RM) $@ $*.mod - $(CPP) $(CPPFLAGS) $(CPPINCLUDES) $< > $*.f90 - $(FC) $(FFLAGS) -c $*.f90 $(INCLUDES) - #$(RM) $*.f90 diff --git a/grid_gen/basin/src/basin.F b/grid_gen/basin/src/basin.F deleted file mode 100644 index 2afcfff80..000000000 --- a/grid_gen/basin/src/basin.F +++ /dev/null @@ -1,3192 +0,0 @@ -program map_to_basin - -use read_netcdf -use read_topo -use read_TS -use read_U -use read_MONTHLY -use write_netcdf -use utilities -use cullLoops - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Program: basin.F -! -! This program is meant to add land to grids, as well as initial conditions. -! -! This program is used to take a specific mesh, and remove Cells from it -! It can be used to change a planar grid into a Channel or a basin grid, or to -! Change a spherical grid into a Limited area spherical grid. -! -! How to use: -! Step 1: Link namelist.basin to the correct namelist file. -! Step 2: Change parameters and flags in namelist file as needed. -! Step 3: Check get_init_conditions routine for initial T&S, thickness, etc. -! Step 4: Check define_kmt routine for bottomDepth and kmt (maxLevelCell) variables. -! Step 5: Check get_dz routine for hZLevel variable. -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -implicit none - -! original grid variables -integer :: time, nCells, nEdges, nVertices -integer :: maxEdges, maxEdges2, TWO, vertexDegree, nVertLevels -integer, allocatable, dimension(:) :: indexToCellID, indexToEdgeID, indexToVertexID -real, allocatable, dimension(:) :: xCell, yCell, zCell, latCell, lonCell, meshDensity -real, allocatable, dimension(:) :: xEdge, yEdge, zEdge, latEdge, lonEdge -real, allocatable, dimension(:) :: xVertex, yVertex, zVertex, latVertex, lonVertex -integer, allocatable, dimension(:) :: nEdgesOnCell, nEdgesOnEdge -integer, allocatable, dimension(:,:) :: cellsOnCell, edgesOnCell, verticesOnCell -integer, allocatable, dimension(:,:) :: cellsOnEdge, verticesOnEdge, edgesOnEdge -integer, allocatable, dimension(:,:) :: cellsOnVertex, edgesOnVertex -real, allocatable, dimension(:) :: areaCell, areaTriangle, dcEdge, dvEdge, angleEdge -real, allocatable, dimension(:,:) :: kiteAreasOnVertex, weightsOnEdge - -real, allocatable, dimension(:) :: fCell, fEdge, fVertex, bottomDepth, work1, surfaceWindStress -real, allocatable, dimension(:,:,:) :: normalVelocity, tangentialVelocity, layerThickness -real, allocatable, dimension(:,:,:) :: density - -integer nt_lon, nt_lat, nt_depth -integer nu_lon, nu_lat, nu_depth -real(kind=4), allocatable, dimension(:) :: t_lon, t_lat, depth_t -real(kind=4), allocatable, dimension(:) :: u_lon, u_lat, depth_u -real(kind=4), allocatable, dimension(:,:) :: mTEMP, mSALT -real(kind=4), allocatable, dimension(:,:,:) :: TEMP, SALT -real(kind=4), allocatable, dimension(:,:) :: TAUX, TAUY - -real(kind=4), allocatable, dimension(:,:,:) :: SST_MONTHLY, SSS_MONTHLY -real(kind=4), allocatable, dimension(:,:,:) :: TAUX_MONTHLY, TAUY_MONTHLY - -real, dimension(:), allocatable :: dz -integer :: nMonths = 1 - - real (kind=8) :: ymid, ytmp, ymax, xmid, xloc, yloc, pert, ymin, distance, r, c1(3), c2(3) - real (kind=8) :: latmid, lattmp, latmax, latmin - integer :: cell1, cell2 -real :: eos_linear_alpha, eos_linear_beta, eos_linear_Tref, eos_linear_Sref, eos_linear_densityref - -! new grid variables -real, allocatable, dimension(:) :: hZLevel, refBottomDepth -integer :: nCellsNew, nEdgesNew, nVerticesNew -integer :: maxEdgesNew, maxEdges2New, TWONew, vertexDegreeNew, nVertLevelsNew -integer, allocatable, dimension(:) :: indexToCellIDNew, indexToEdgeIDNew, indexToVertexIDNew -real, allocatable, dimension(:) :: xCellNew, yCellNew, zCellNew, latCellNew, lonCellNew, meshDensityNew, meshSpacingNew -real, allocatable, dimension(:) :: xEdgeNew, yEdgeNew, zEdgeNew, latEdgeNew, lonEdgeNew -real, allocatable, dimension(:) :: xVertexNew, yVertexNew, zVertexNew, latVertexNew, lonVertexNew -integer, allocatable, dimension(:) :: nEdgesOnCellNew, nEdgesOnEdgeNew, flipVerticesOnEdgeOrdering -integer, allocatable, dimension(:,:) :: cellsOnCellNew, edgesOnCellNew, verticesOnCellNew -integer, allocatable, dimension(:,:) :: cellsOnEdgeNew, verticesOnEdgeNew, edgesOnEdgeNew -integer, allocatable, dimension(:,:) :: cellsOnVertexNew, edgesOnVertexNew -integer, allocatable, dimension(:,:) :: boundaryEdgeNew, boundaryVertexNew -real, allocatable, dimension(:) :: areaCellNew, areaTriangleNew, dcEdgeNew, dvEdgeNew, angleEdgeNew -real, allocatable, dimension(:,:) :: kiteAreasOnVertexNew, weightsOnEdgeNew, normalsNew - -real, allocatable, dimension(:) :: fCellNew, fEdgeNew, fVertexNew, bottomDepthNew -real, allocatable, dimension(:) :: surfaceWindStressNew -real, allocatable, dimension(:) :: surfaceWindStressNewZonal, surfaceWindStressNewMeridional -real, allocatable, dimension(:,:,:) :: normalVelocityNew, layerThicknessNew -real, allocatable, dimension(:,:,:) :: densityNew, temperatureNew, salinityNew, tracer1New -real, allocatable, dimension(:) :: temperatureRestoreNew, salinityRestoreNew -real, allocatable, dimension(:) :: boundaryLayerDepthNew - -! mapping variables -integer, allocatable, dimension(:) :: kmt, maxLevelCellNew -integer, allocatable, dimension(:) :: cellMap, edgeMap, vertexMap - -! work variables -integer :: i,j,jNew,k,jEdge,jEdgeNew,iVertex1New,iVertex2New,iCell1New,iCell2New -integer :: iCell, iCell1, iCell2, iCell3, iEdge, iVertex, iVertex1, iVertex2 -integer :: iCellNew, iEdgeNew, iVertexNew, ndata, jCell1, jCell2, jCell, iter -real :: xin, yin, zin, ulon, ulat, ux, uy, uz, rlon, rlat, temp_t, temp_s - -integer :: iMonth -character(len=80) :: fileNameT, fileNameS, fileNameU - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! -! Namelist variables -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - -! Variables in namelist file -character (len=32) :: on_a_sphere, zLevel_thickness,bottom_topography, initial_conditions -logical :: expand_from_unit_sphere, eliminate_inland_seas, load_woce_IC, & - write_OpenDX_flag, check_mesh, & - cut_domain_from_sphere, solid_boundary_in_y, solid_boundary_in_x, & - amplify_acc_wind, load_phc_IC - -integer :: nVertLevelsMOD, top_layers_without_land -real (kind=8) :: sphere_radius, layer_thickness_total_max, f0, beta, omega, Lx, & - uniform_T, uniform_S, uniform_tracer1, & - amp_wind_factor, amp_wind_center_lat, amp_wind_spread_lat, surfaceWindStressMax, & - iceShelfCavityThickness, iceShelfEdgeWidth - -! specify namelist -namelist /basin/ nVertLevelsMOD, on_a_sphere, sphere_radius, & - expand_from_unit_sphere, & - zLevel_thickness, bottom_topography, initial_conditions, & - eliminate_inland_seas, load_woce_IC, load_phc_IC, write_OpenDX_flag, check_mesh, & - cut_domain_from_sphere, solid_boundary_in_y, solid_boundary_in_x, & - top_layers_without_land, layer_thickness_total_max, f0, beta, omega, Lx, & - uniform_T, uniform_S, uniform_tracer1, surfaceWindStressMax, & - amplify_acc_wind, amp_wind_factor, amp_wind_center_lat, amp_wind_spread_lat, & - iceShelfCavityThickness, iceShelfEdgeWidth - -! Default namelist values. Default set for realistic global IC. -nVertLevelsMOD = 40 -on_a_sphere = 'YES' -sphere_radius = 6.37122e6 -expand_from_unit_sphere = .true. - -! zLevel thickness options: -! 'POP_40_zLevel', 'equally_spaced', 'zero' -zLevel_thickness = 'POP_40_zLevel' - -! bottom topography options: -! 'realistic_ETOPO', 'flat_bottom', 'Ilicak2_overflow', 'DOME_3D_overflow' -bottom_topography = 'realistic_ETOPO' - -! initial temperature and salinity options: -! 'realistic_WOCE', 'realistic_PHC', 'uniform_TS', 'lock_exchange_Ilicak1', 'Ilicak2_overflow', 'Ilicak2_overflow_sigma', 'DOME_3D_overflow', -! 'internal_wave_Ilicak3', 'baroclinic_channel_Ilicak4' -initial_conditions = 'realistic_WOCE' -uniform_T = 10.0 -uniform_S = 34.0 -uniform_tracer1 = 1.0 - -eliminate_inland_seas=.true. -load_woce_IC = .true. -load_phc_IC = .false. -write_OpenDX_flag = .false. -check_mesh = .true. -cut_domain_from_sphere = .false. -solid_boundary_in_y = .false. -solid_boundary_in_x = .false. - -! Set the number of top layers that are not allowed to have land, usually three. -top_layers_without_land = 3 - -layer_thickness_total_max = 2000.0 ! total layer thickness, for equally spaced case -surfaceWindStressMax = 0.1 ! max wind stress, N/m2 -f0 = -1.1e-4 ! Coriolis parameter -beta = 1.4e-11 -omega = 7.29212e-5 ! rotation rate of earth - -! This needs to be changed for correct periodic boundaries -! Lx is the TOTAL domain width, and needs to be exact for correct periodic -! boundaries in x. -Lx = 3200.0e3 ! 40x80km=3200km - -! amplify wind stress in acc, for study with Valis -amplify_acc_wind = .false. -amp_wind_factor = 2.0 -amp_wind_center_lat = -35.0 -amp_wind_spread_lat = 3.0 - -iceShelfCavityThickness = 100 -iceShelfEdgeWidth = 15.0e3 - -! Read in namelist - open(20,file='namelist.basin',status='old') - read(20,basin) - close(20) - -if(load_woce_IC .and. load_phc_IC) then - write(0,*) 'Error. PHC and WOCE cannot both be loaded.' - write(0,*) 'Please edit namelist.basin and update load_woce_IC or load_phc_IC.' - stop -end if - -allocate (hZLevel(nVertLevelsMOD), refBottomDepth(nVertLevelsMOD)) - - -nMonths = 1 - -if (load_woce_IC) then - fileNameT = 'TS/annual/woce_t_ann.3600x2431x42interp.r4.nc' - fileNameS = 'TS/annual/woce_s_ann.3600x2431x42interp.r4.nc' -else if (load_phc_IC) then - fileNameT = 'PHC/PT.01.filled.60levels.nc' - fileNameS = 'PHC/Salt.01.filled.60levels.nc' -end if -fileNameU = 'TS/annual/ws.old_ncep_1958-2000avg.interp3600x2431.nc' - -! get to work -write(6,*) ' starting' -write(6,*) - -! get depth profile for later -write(6,*) ' calling get_dz' -call get_dz - -! get grid -write(6,*) ' calling read_grid' -write(6,*) -call read_grid -write(6,*) ' xCell 1: ',minval(xCell), maxval(xCell) - -! copy dimensions -write(6,*) ' copy dimensions' -write(6,*) -call copy_dimensions -write(6,*) ' xCell 1: ',minval(xCell), maxval(xCell) - -! define the kmt array -write(6,*) ' calling define_kmt' -write(6,*) -call define_kmt - -! define the mapping between original and new cells, edges and vertices -write(6,*) ' calling define_mapping' -write(6,*) -call define_mapping - -! copy the vector arrays form the original to new arrays -write(6,*) ' calling map_vectors' -write(6,*) -call map_vectors - -! define the new connectivity variables -write(6,*) ' calling map_connectivity' -write(6,*) -call map_connectivity - -! check the mesh -if (check_mesh) then - call error_checking -endif - -if (load_woce_IC .or. load_phc_IC) then - write(6,*) ' getting woce t and s ' - - call read_TS_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS) - write(6,*) ' TS INIT ', nt_lon, nt_lat, nt_depth - allocate(t_lon(nt_lon), t_lat(nt_lat), depth_t(nt_depth), TEMP(nt_lon,nt_lat,nt_depth), SALT(nt_lon,nt_lat,nt_depth)) - allocate(mTEMP(nt_lat,nt_depth), mSALT(nt_lat,nt_depth)) - call read_TS_fields(t_lon, t_lat, depth_t, TEMP, SALT) - call read_TS_finalize() - - write(6,*) ' getting woce windStress ' - call read_U_init(nu_lon, nu_lat, nu_depth, fileNameU) - write(6,*) ' WINDSTRESS INIT ', nu_lon, nu_lat, nu_depth - allocate(u_lon(nu_lon), u_lat(nu_lat), depth_u(nu_depth)) - allocate(TAUX(nu_lon,nu_lat), TAUY(nu_lon,nu_lat)) - call read_U_fields(u_lon, u_lat, depth_u, TAUX, TAUY) - call read_U_finalize() - - - do k=1,nt_depth - ndata = 0; temp_t=0; temp_s=0 - do j=1,nt_lat - do i=1,nt_lon - if(TEMP(i,j,k).gt.-10.0) then - ndata = ndata + 1 - temp_t = temp_t + TEMP(i,j,k) - temp_s = temp_s + SALT(i,j,k) - endif - enddo - enddo - mTEMP(:,k) = temp_t / float(ndata) - mSALT(:,k) = temp_s / float(ndata) - write(6,*) ndata,mTemp(1,k),mSalt(1,k) - enddo - -endif - -! allocate(SST_MONTHLY(nt_lon,nt_lat,nMonths), SSS_MONTHLY(nt_lon,nt_lat,nMonths)) -! allocate(TAUX_MONTHLY(nt_lon,nt_lat,nMonths), TAUY_MONTHLY(nt_lon,nt_lat,nMonths)) -! SST_MONTHLY=0; SSS_MONTHLY=0; TAUX_MONTHLY=0; TAUY_MONTHLY=0 -! iMonth=1 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.01.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly01.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.01.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=2 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.02.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly02.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.02.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=3 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.03.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly03.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.03.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=4 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.04.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly04.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.04.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=5 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.05.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly05.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.05.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=6 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.06.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly06.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.06.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=7 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.07.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly07.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.07.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=8 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.08.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly08.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.08.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=9 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.09.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly09.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.09.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=10 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.10.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly10.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.10.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=11 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.11.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly11.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.11.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=12 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.12.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly12.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.12.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() - -! generate initial conditions -call get_init_conditions - -! dump new grid to netCDF -write(6,*) ' calling write_grid' -write(6,*) -call write_grid - -! dump graph for partioning -write(6,*) ' call write_graph' -write(6,*) -call write_graph - -! write OpenDx file -if (write_OpenDX_flag) then - write(6,*) ' calling write_OpenDX' - write(6,*) - call write_OpenDX( on_a_sphere, & - nCellsNew, & - nVerticesNew, & - nEdgesNew, & - vertexDegreeNew, & - maxEdgesNew, & - xCellNew, & - yCellNew, & - zCellNew, & - xVertexNew, & - yVertexNew, & - zVertexNew, & - xEdgeNew, & - yEdgeNew, & - zEdgeNew, & - nEdgesOnCellNew, & - verticesOnCellNew, & - verticesOnEdgeNew, & - cellsOnVertexNew, & - edgesOnCellNew, & - areaCellNew, & - maxLevelCellNew, & - meshDensityNew, & - bottomDepthNew, & - temperatureNew(1,1,:), & - kiteAreasOnVertexNew ) -endif - -!do iCell=1,nCellsNew - !ulon = 1.0; ulat = 0.0 - !xin = xCellNew(iCell); yin = yCellNew(iCell); zin = zCellNew(iCell) - !call transform_from_lonlat_to_xyz(xin, yin, zin, ulon, ulat, ux, uy, uz) - !if(abs(ux).lt.1.0e-10) ux=0.0 - !if(abs(uy).lt.1.0e-10) uy=0.0 - !if(abs(uz).lt.1.0e-10) uz=0.0 - !write(20,10) ux, uy, uz - !10 format(3e25.10) -!enddo - -write(6,*) ' finished' - -contains - -subroutine write_graph -implicit none -integer :: m,itmp(maxEdgesNew),k - - m=nEdgesNew - do i=1,nCellsNew - do j=1,nEdgesOnCellNew(i) - if(cellsOnCellNew(j,i).eq.0) m=m-1 - enddo - enddo - - open(42,file='graph.info',form='formatted') - write(42,*) nCellsNew, m - do i=1,nCellsNew - itmp = 0; k = 0; - do j=1,nEdgesOnCellNew(i) - if(cellsOnCellNew(j,i).gt.0) then - k=k+1; itmp(k)=cellsOnCellNew(j,i) - endif - enddo - write(42,'(1x,12i8)',advance='no') (itmp(m),m=1,k) - write(42,'(1x)') - end do - close(42) -end subroutine write_graph - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! -! Step 3: Check get_init_conditions routine for initial T&S, thickness, etc. -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine get_init_conditions -implicit none -real :: halfwidth, dtr, pi, p(3), q(3), xin, yin, zin, ulon, ulat, stress, n1, n2, distance, r, temp_t, temp_s -real :: dotProd, rho_ref, rho_delta, work, factor, r1, deltay, b -real :: y0_embayment, surfaceTemperature, bottomTemperature, betaTemperature,bottomMinTemp, & - y_a,y_0,A_0, midDepth(nVertLevelsMod), maxMidDepth -real :: bottomSalinity, surfaceSalinity, cavitySalinity, refSalinity, maxDepth, xWindStress, yWindStress -real :: x_0, x_1,x_2,x_3,width,cff1, deltaTemperature -real :: totalSubIceThickness, y1,y2,y3, d1,d2,d3 -real :: lat1, lat2, lat3, lat4, T1, T2 -integer :: iTracer, ix, iy, ndata, i, j, k, ixt, iyt, ncull, jcount, iNoData, kdata(nVertLevelsMod), iMonth, kMax -logical :: flag_lat - -pi = 4.0*atan(1.0) -dtr = pi/180.0 - -! defaults -layerThicknessNew = 100.0 -temperatureNew = 1.0 -salinityNew = 1.0 -tracer1New = 1.0 -normalVelocityNew = 0 -surfaceWindStressNew = 0 -densityNew = 1025.0 - -! initialize boundary layer fields to reasonable values -! specific cases can overwrite as desired -boundaryLayerDepthNew(:) = hZLevel(1) + hZLevel(2) - 1.0e-4 - -if (initial_conditions.eq.'uniform_TS') then - - temperatureNew = uniform_T !10.0 - salinityNew = uniform_S !34.0 - tracer1New = uniform_tracer1 !1.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - do iCell=1,nCellsNew - do k=1, nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - -elseif (initial_conditions.eq.'gradT_lat') then - -! This is a very simple test case that can test grid connectivity, Coriolis -! force, and pressure gradient on a sphere. It is simply a temperature -! gradient in latitude, which produces two zonal jets. - - salinityNew = uniform_S - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - do iCell=1,nCellsNew - do k=1, nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - - lat1 = 30.0*pi/180.0 - lat2 = 60.0*pi/180.0 - lat3 = 80.0*pi/180.0 - lat4 = 85.0*pi/180.0 - T1 = 100.0 - T2 = 0.0 - do iCell = 1,nCellsNew - if(abs(latCellNew(iCell)) < lat1 ) then - temperatureNew(1,:,iCell) = T1 - elseif(abs(latCellNew(iCell)) < lat2 ) then - temperatureNew(1,:,iCell) = T1 + (T2-T1)*(abs(latCellNew(iCell)) - lat1)/(lat2-lat1) - else - temperatureNew(1,:,iCell) = T2 - endif - - if(abs(latCellNew(iCell)) < lat3 ) then - tracer1New(1,:,iCell) = sin(lonCellNew(iCell)*10) - elseif(abs(latCellNew(iCell)) > lat4 ) then - tracer1New(1,:,iCell) = 0 - else - tracer1New(1,:,iCell) = sin(lonCellNew(iCell)*10) * (abs(latCellNew(iCell)) - lat4)/(lat3-lat4) - endif - - enddo - -elseif (initial_conditions.eq.'lock_exchange_Ilicak1') then - - do i = 1,nCellsNew - if(yCellNew(i) < 32.0e3) then - temperatureNew(1,:,i) = 5.0 - else - temperatureNew(1,:,i) = 30.0 - endif - enddo - - salinityNew(1,:,:) = 35.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - -elseif (initial_conditions.eq.'Ilicak2_overflow') then - - do i = 1,nCellsNew - if(yCellNew(i) < 20000) then - temperatureNew(1,:,i) = 10.0 - else - temperatureNew(1,:,i) = 20.0 - endif - enddo - - salinityNew(1,:,:) = 35.0 - Tracer1New(1,:,:) = 1.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - -elseif (initial_conditions.eq.'Ilicak2_overflow_sigma') then - - do i = 1,nCellsNew - if(yCellNew(i) < 20000) then - temperatureNew(1,:,i) = 10.0 - else - temperatureNew(1,:,i) = 20.0 - endif - enddo - - salinityNew(1,:,:) = 35.0 - Tracer1New(1,:,:) = 1.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = bottomDepthNew(iCell) / nVertLevelsMOD - enddo - enddo - -elseif (initial_conditions.eq.'internal_wave_Ilicak3') then - - salinityNew(1,:,:) = 35.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - surfaceTemperature = 20.1 - bottomTemperature = 10.1 - - midDepth(1) = hZLevel(1)/2.0 - do k=2,nVertLevelsMod - midDepth(k) = midDepth(k-1) + 0.5*(hZLevel(k-1) + hZLevel(k)) - enddo - - maxMidDepth = maxval(midDepth) - do k = 1, nVertLevelsMOD - temperatureNew(1,k,:) = (surfaceTemperature - bottomTemperature) & - * ((maxMidDepth - midDepth(k))/maxMidDepth) + bottomTemperature - enddo - - A_0 = 2.0 - y_0 = 125.0e3 - y_a = 50.0e3 - do i = 1, nCellsNew - if ( abs(yCellNew(i) - y_0) < y_a) then - do k = 2, nVertLevelsMOD - betaTemperature = -A_0*cos(0.5*pi*(yCellNew(i)-y_0)/y_a) & - *sin(pi*refBottomDepth(k-1)/(maxMidDepth - midDepth(1))) - temperatureNew(1, k, i) = temperatureNew(1, k, i) + betaTemperature - end do - endif - enddo - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - -elseif (initial_conditions.eq.'baroclinic_channel_Ilicak4') then - - salinityNew(1,:,:) = 35.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - midDepth(1) = hZLevel(1)/2.0 - do k=2,nVertLevelsMod - midDepth(k) = midDepth(k-1) + 0.5*(hZLevel(k-1) + hZLevel(k)) - enddo - - maxMidDepth = maxval(midDepth) - - ! Set up stratification on northern half - surfaceTemperature = 13.1 - bottomTemperature = 10.1 - deltaTemperature = 1.2 - do k = 1, nVertLevelsMOD - temperatureNew(1,k,:) = bottomTemperature & - + (surfaceTemperature - bottomTemperature) * ((-midDepth(k)+maxMidDepth)/maxMidDepth) - enddo - - y_0 = 250.0e3 - x_0 = 0.0e3 - x_1 = 160.0e3 - x_2 = 110.0e3 - x_3 = 130.0e3 - width = 40.0e3 - do i = 1, nCellsNew - ! sine wave defines southern boundary between two temperatures. - cff1 = width * sin (6.0 * 3.141592 * (xCellNew(i) - x_0)/(x_1 - x_0)) - - ! stratification on south half - if( yCellNew(i) < y_0 - cff1 ) then - do k = 1, nVertLevelsMOD - temperatureNew(1,k,i) = temperatureNew(1,k,i) - deltaTemperature - end do - - ! linear interpolation between two halves - else if( yCellNew(i) .ge. y_0 - cff1 .and. yCellNew(i) .le. y_0 - cff1+width) then - do k = 1, nVertLevelsMOD - temperatureNew(1,k,i) = temperatureNew(1,k,i) & - - deltaTemperature*(1.0 -( yCellNew(i) - (y_0 - cff1)) / (1.0 * width)) - end do - endif - enddo - - ! Add an additional perturbation to southern end of third wave. - do i = 1, nCellsNew - cff1 = 0.5 * width * sin(1.0 * 3.141592 * (xCellNew(i) - x_2)/(x_3 - x_2)) - if( yCellNew(i) .ge. y_0 - cff1-0.5*width .and. & - yCellNew(i) .le. y_0 - cff1+0.5*width .and. & - xCellNew(i) .ge. x_2 .and. xCellNew(i) .le. x_3) then - do k = 1, nVertLevelsMOD - temperatureNew(1,k,i) = temperatureNew(1,k,i) + 0.3 * (1.0 - ( (yCellNew(i)-(y_0-cff1))/(0.5*width))) - end do - endif - end do - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - -elseif (initial_conditions.eq.'sub_ice_shelf_test1') then - - ! points 1 and 2 are where angles on ice shelf are located. - ! point 3 is at the surface. - ! d variables are total water thickness below ice shelf. - y1=30.0e3 - y2=60.0e3 - y3=60.0e3+iceShelfEdgeWidth - d1=iceShelfCavityThickness - d2=iceShelfCavityThickness+500 - d3=layer_thickness_total_max - - bottomSalinity = 34.7 - surfaceSalinity = 34.5 - - temperatureNew(1,:,:) = 1.0 - normalVelocityNew = 0.0 - - surfaceWindStressNew = 0.0 - - ! set up sub ice shelf thicknesses - do iCell=1,nCellsNew - if (yCellNew(iCell) < y1 ) then - totalSubIceThickness = d1 - elseif (yCellNew(iCell) < y2 ) then - totalSubIceThickness = d1 + (d2-d1)*(yCellNew(iCell)-y1)/(y2-y1) - elseif (yCellNew(iCell) < y3 ) then - totalSubIceThickness = d2 + (d3-d2)*(yCellNew(iCell)-y2)/(y3-y2) - else - totalSubIceThickness = d3 - endif - layerThicknessNew(1,:,iCell) = totalSubIceThickness/nVertLevelsMOD - - enddo - - ! Set up salinity stratification - do iCell=1,nCellsNew - midDepth(nVertLevelsMod) = layer_thickness_total_max - 0.5*layerThicknessNew(1,nVertLevelsMod,iCell) - do k=nVertLevelsMod-1,1,-1 - midDepth(k) = midDepth(k+1) - 0.5*(layerThicknessNew(1,k+1,iCell)+layerThicknessNew(1,k,iCell)) - enddo - - ! Set up salinity stratification - do k = 1, nVertLevelsMOD - salinityNew(1,k,iCell) = surfaceSalinity & - + (bottomSalinity - surfaceSalinity) * (midDepth(k)/layer_thickness_total_max) - enddo - - enddo - -elseif (initial_conditions.eq.'sub_ice_shelf_test2') then - - ! points 1 and 2 are where angles on ice shelf are located. - ! point 3 is at the surface. - ! d variables are total water thickness below ice shelf. - y1=30.0e3 - y2=60.0e3 - y3=60.0e3+iceShelfEdgeWidth - d1=iceShelfCavityThickness - d2=iceShelfCavityThickness+500 - d3=layer_thickness_total_max - - bottomSalinity = 34.7 - surfaceSalinity = 34.5 - cavitySalinity = 34.3 - - temperatureNew(1,:,:) = 1.0 - normalVelocityNew = 0.0 - temperatureRestoreNew(:) = 1.0 - salinityRestoreNew(:) = surfaceSalinity - - ! set up sub ice shelf thicknesses - do iCell=1,nCellsNew - if (yCellNew(iCell) < y1 ) then - totalSubIceThickness = d1 - elseif (yCellNew(iCell) < y2 ) then - totalSubIceThickness = d1 + (d2-d1)*(yCellNew(iCell)-y1)/(y2-y1) - elseif (yCellNew(iCell) < y3 ) then - totalSubIceThickness = d2 + (d3-d2)*(yCellNew(iCell)-y2)/(y3-y2) - else - totalSubIceThickness = d3 - endif - layerThicknessNew(1,:,iCell) = totalSubIceThickness/nVertLevelsMOD - enddo - - ! Set up salinity stratification - do iCell=1,nCellsNew - midDepth(nVertLevelsMod) = layer_thickness_total_max - 0.5*layerThicknessNew(1,nVertLevelsMod,iCell) - do k=nVertLevelsMod-1,1,-1 - midDepth(k) = midDepth(k+1) - 0.5*(layerThicknessNew(1,k+1,iCell)+layerThicknessNew(1,k,iCell)) - enddo - - ! Set up salinity stratification - if (yCellNew(iCell) < y1 ) then - salinityNew(1,:,iCell) = cavitySalinity - elseif (yCellNew(iCell) < y2 ) then - do k = 1, nVertLevelsMOD - ! Salinity of stratified column in open ocean - refSalinity = surfaceSalinity & - + (bottomSalinity - surfaceSalinity) * (midDepth(k)/layer_thickness_total_max) - ! linearly interpolate horizontally between cavity and open ocean - salinityNew(1,k,iCell) = cavitySalinity & - + (refSalinity - cavitySalinity) * (yCellNew(iCell) - y1)/(y2 - y1) - enddo - else - do k = 1, nVertLevelsMOD - salinityNew(1,k,iCell) = surfaceSalinity & - + (bottomSalinity - surfaceSalinity) * (midDepth(k)/layer_thickness_total_max) - enddo - endif - - enddo - - ! set up wind stress - xWindStress = 0.0; - yWindStress = surfaceWindStressMax; - do iEdge=1,nEdgesNew - if (yEdgeNew(iEdge) < y3 ) then - surfaceWindStressNew(iEdge) = 0.0 - else - surfaceWindStressNew(iEdge) = & - xWindStress*cos(angleEdgeNew(iEdge)) & - + yWindStress*sin(angleEdgeNew(iEdge)) - endif - enddo - -elseif (initial_conditions.eq.'sub_ice_shelf_test3') then - - ! points 1 and 2 are where angles on ice shelf are located. - ! point 3 is at the surface. - ! d variables are total water thickness below ice shelf. - y1=30.0e3 - y2=60.0e3 - y3=60.0e3+iceShelfEdgeWidth - d1=iceShelfCavityThickness - d2=iceShelfCavityThickness+500 - d3=layer_thickness_total_max - - bottomSalinity = 34.7 - surfaceSalinity = 34.5 - cavitySalinity = 34.3 - - temperatureNew(1,:,:) = 1.0 - normalVelocityNew = 0.0 - temperatureRestoreNew(:) = 1.0 - salinityRestoreNew(:) = surfaceSalinity - - ! set up sub ice shelf thicknesses - do iCell=1,nCellsNew - if (yCellNew(iCell) < y1 ) then - totalSubIceThickness = d1 - elseif (yCellNew(iCell) < y2 ) then - totalSubIceThickness = d1 + (d2-d1)*(yCellNew(iCell)-y1)/(y2-y1) - elseif (yCellNew(iCell) < y3 ) then - totalSubIceThickness = d2 + (d3-d2)*(yCellNew(iCell)-y2)/(y3-y2) - else - totalSubIceThickness = d3 - endif - ! subtract out the bottom land cells, divide by remaining number of cells. - layerThicknessNew(1,1:maxLevelCellNew(iCell),iCell) = (totalSubIceThickness - (layer_thickness_total_max - bottomDepthNew(iCell))) / maxLevelCellNew(iCell) - layerThicknessNew(1,maxLevelCellNew(iCell)+1:nVertLevelsMod,iCell) = layer_thickness_total_max/nVertLevelsMod - enddo - - ! Set up salinity stratification - do iCell=1,nCellsNew - kMax = maxLevelCellNew(iCell) - midDepth(kMax) = bottomDepthNew(iCell) - 0.5*layerThicknessNew(1,kMax,iCell) - do k=kMax-1,1,-1 - midDepth(k) = midDepth(k+1) - 0.5*(layerThicknessNew(1,k+1,iCell)+layerThicknessNew(1,k,iCell)) - enddo - - ! Set up salinity stratification - if (yCellNew(iCell) < y1 ) then - salinityNew(1,:,iCell) = cavitySalinity - elseif (yCellNew(iCell) < y2 ) then - do k = 1, kMax - ! Salinity of stratified column in open ocean - refSalinity = surfaceSalinity & - + (bottomSalinity - surfaceSalinity) * (midDepth(k)/layer_thickness_total_max) - ! linearly interpolate horizontally between cavity and open ocean - salinityNew(1,k,iCell) = cavitySalinity & - + (refSalinity - cavitySalinity) * (yCellNew(iCell) - y1)/(y2 - y1) - enddo - else - do k = 1, kMax - salinityNew(1,k,iCell) = surfaceSalinity & - + (bottomSalinity - surfaceSalinity) * (midDepth(k)/layer_thickness_total_max) - enddo - endif - - enddo - - ! set up wind stress - xWindStress = 0.0; - yWindStress = surfaceWindStressMax; - do iEdge=1,nEdgesNew - if (yEdgeNew(iEdge) < y3 ) then - surfaceWindStressNew(iEdge) = 0.0 - else - surfaceWindStressNew(iEdge) = & - xWindStress*cos(angleEdgeNew(iEdge)) & - + yWindStress*sin(angleEdgeNew(iEdge)) - endif - enddo - - -elseif (initial_conditions.eq.'unitTestCVMixConvection') then - - salinityNew(1,:,:) = 35.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - midDepth(1) = hZLevel(1)/2.0 - do k=2,nVertLevelsMod - midDepth(k) = midDepth(k-1) + 0.5*(hZLevel(k-1) + hZLevel(k)) - enddo - - maxMidDepth = maxval(midDepth) - - ! Set up stratification on northern half - surfaceTemperature = 13.0 - bottomTemperature = 10.0 - do k = 1, nVertLevelsMOD - temperatureNew(1,k,:) = bottomTemperature & - + (surfaceTemperature - bottomTemperature) * ((-midDepth(k)+maxMidDepth)/maxMidDepth) - enddo - - temperatureRestoreNew(:) = surfaceTemperature - 10.0 - salinityRestoreNew(:) = salinityNew(1,1,:) - boundaryLayerDepthNew(:) = hZLevel(1) + hZLevel(2) - 1.0e-4 - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - - do iEdge=1,nEdgesNew - surfaceWindStressNew(iEdge) = 0.001*cos(angleEdgeNew(iEdge)) - enddo - -elseif (initial_conditions.eq.'unitTestCVMixShear') then - - salinityNew(1,:,:) = 35.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - midDepth(1) = hZLevel(1)/2.0 - do k=2,nVertLevelsMod - midDepth(k) = midDepth(k-1) + 0.5*(hZLevel(k-1) + hZLevel(k)) - enddo - - maxMidDepth = maxval(midDepth) - - ! Set up stratification on northern half - surfaceTemperature = 15.0 - bottomTemperature = 5.0 - do k = 1, nVertLevelsMOD - temperatureNew(1,k,:) = bottomTemperature & - + (surfaceTemperature - bottomTemperature) * ((-midDepth(k)+maxMidDepth)/maxMidDepth) - enddo - - temperatureRestoreNew(:) = surfaceTemperature + 10.0 - salinityRestoreNew(:) = salinityNew(1,1,:) - boundaryLayerDepthNew(:) = hZLevel(1) + hZLevel(2) - 1.0e-4 - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - - do iEdge=1,nEdgesNew - surfaceWindStressNew(iEdge) = 0.10*cos(angleEdgeNew(iEdge)) - enddo - -elseif (initial_conditions.eq.'DOME_3D_overflow') then - - y0_embayment = 600.0e3 ! y location of beginning of embayment - Tracer1New(1,:,:) = 0.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - do k=1,nVertLevelsMOD - temperatureNew(1,k,:) = 20-0.5*k - salinityNew(1,k,:) = 35.0 - enddo - - ! Change embayment fluid to have tracer 1 and cold water. - ! This is only used for a plug of cold water in the initial conditions, - ! rather than forced cold water at the inlet. - !do iCell=1,nCellsNew - ! if (yCellNew(iCell).gt.y0_embayment) then - ! temperatureNew(1,:,iCell) = 0.0 - ! tracer1New(1,:,iCell) = 1.0 - ! endif - !enddo - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - -elseif (initial_conditions.eq.'SOMA_TS') then - - temperatureNew = -99.0 - salinityNew = -99.0 - rho_ref=1000.0 - rho_delta=5.0 - do iCell=1,nCellsNew - do k = 1,maxLevelCellNew(iCell) - layerThicknessNew(1,k,iCell) = hZLevel(k) - if(k.eq.1) r1 = -refBottomDepth(k)/2.0 - if(k.ne.1) r1 = -(refBottomDepth(k)+refBottomDepth(k-1))/2.0 - work = rho_ref - (1.0-0.05)*rho_delta*tanh(r1/300) - 0.05*rho_delta*r1/2500 - densityNew(1,k,iCell) = work - factor = (rho_ref-work)/2.5e-1 - temperatureNew(1,k,iCell) = 20.0 + factor - factor = -r1/1250.0 - salinityNew(1,k,iCell) = 34.0 + factor ! salinity - enddo - enddo - - surfaceWindStressNew = 0.0 - do iEdge=1,nEdgesNew - xin = xEdgeNew(iEdge) - yin = yEdgeNew(iEdge) - zin = zEdgeNew(iEdge) - rlon = lonEdgeNew(iEdge) - rlat = latEdgeNew(iEdge) - - b=1.25e6 - deltay = sphere_radius * ( rlat - 35.0*dtr) - factor = 1.0-0.5*deltay/b - r1 = factor * 0.1 * exp( -(deltay/b)**2 ) * cos(pi*deltay/b) - ulon = r1 - ulat = 0.0 - call transform_from_lonlat_to_xyz(xin,yin,zin,ulon,ulat,ux,uy,uz) - if(boundaryEdgeNew(1,iEdge).eq.1) then - surfaceWindStressNew(iEdge) = 0.0 - else - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - p(1) = xCellNew(iCell1); p(2) = yCellNew(iCell1); p(3) = zCellNew(iCell1) - q(1) = xCellNew(iCell2); q(2) = yCellNew(iCell2); q(3) = zCellNew(iCell2) - q = q - p - call unit_vector_in_3space(q) - surfaceWindStressNew(iEdge) = ux*q(1) + uy*q(2) + uz*q(3) - endif - - enddo - - ! set up some restoring in case we want diabatic forcing - ! T gradient is 0.5C per degree - do iCell=1,nCellsNew - rlat = latCellNew(iCell) - temperatureRestoreNew(iCell) = 15.0 - 0.5*(rlat/dtr - 35.0) - salinityRestoreNew(iCell) = salinityNew(1,1,iCell) - enddo - - ! initialized boundary layer fields - boundaryLayerDepthNew(:) = hZLevel(1) + hZLevel(2) - 1.0e-4 - -elseif (initial_conditions.eq.'isopycnal_3layer') then - - fCellNew(:) = 0.0 - fEdgeNew(:) = 0.0 - fVertexNew(:) = 0.0 - bottomDepthNew(:) = 0.0 - normalVelocityNew(:,:,:) = 0.0 - - ! setting for three levels - Set h values for isopycnal system - write(6,*) ' setting three levels for isopycnal system' - layerThicknessNew(1,1,:) = 500.0 - layerThicknessNew(1,2,:) = 1250.0 - layerThicknessNew(1,3,:) = 3250.0 - bottomDepthNew(:) = -( layerThicknessNew(1,1,:) + layerThicknessNew(1,2,:) + layerThicknessNew(1,3,:) ) - - ! Noise is meant to make the flow unstable at some point - ! Not needed for all simulations - write(6,*) ' adding noise to layer thickness' - r = 0.0 - do i=1,nCellsNew - work1(i) = float(i) / float(nCellsNew) - call random_number(work1(i)) - r = r + work1(i) - enddo - r = r/float(nCells) - work1(:) = work1(:) - r - layerThicknessNew(1,1,:) = layerThicknessNew(1,1,:) + 1.0*work1(:) - layerThicknessNew(1,2,:) = layerThicknessNew(1,2,:) - 1.0*work1(:) - - ! Specify Density values for isopycnal levels - write(6,*) ' setting density - depricate soon' - densityNew(1,:,:) = 1010.0 - densityNew(1,2,:) = 1011.0 - densityNew(1,3,:) = 1012.0 - - eos_linear_alpha = 2.55e-1 - eos_linear_beta = 7.64e-1 - eos_linear_Tref = 19.0 - eos_linear_Sref = 35.0 - eos_linear_densityref = 1025.022 - - ! set salinity for isopycnal levels - salinityNew = eos_linear_Sref - - ! set temperature for isopycnal levels. Just invert linear eos. - write(6,*) ' setting temperature' - do k=1,nVertLevelsMOD - temperatureNew(1,k,:) = eos_linear_Tref + (eos_linear_densityref - densityNew(1,k,:))/eos_linear_alpha - enddo - - ! set forcing for isopycnal levels - write(6,*) 'setting surfaceWindStressNew - wind forcing' - surfaceWindStressNew = 0.0 - if(on_a_sphere.eq.'YES') then - latmin = -60*dtr - latmax = -10*dtr - latmid = -35*dtr - latmin = minval(latEdgeNew) - latmax = maxval(latEdgeNew) - latmid = (latmin+latmax)/2.0 - r = 10.0*dtr - - write(6,*) 'surfaceWindStressNew info', latmin, latmax, latmid, r - do i = 1,nEdgesNew - lattmp = latEdgeNew(i) - iCell1 = cellsOnEdgeNew(1,i) - iCell2 = cellsOnEdgeNew(2,i) - if(iCell1>0.and.iCell2>0) then - pert = surfaceWindStressMax * exp(-(lattmp-latmid)**2/(r**2)) - - ulat = latEdgeNew(i) - ulon = lonEdgeNew(i) + 0.05 - - call convert_lx(xin, yin, zin, 1.0, ulat, ulon) - - xin = xin - xEdgeNew(i) - yin = yin - yEdgeNew(i) - zin = zin - zEdgeNew(i) - - dotProd = sqrt(xin**2 + yin**2 + zin**2) - xin = xin/dotProd - yin = yin/dotProd - zin = zin/dotProd - - dotProd = normalsNew(1,i)*xin + normalsNew(2,i)*yin + normalsNew(3,i)*zin - - surfaceWindStressNew(i) = pert * dotProd - write(8,*) lattmp,pert,dotProd - endif - enddo - else - ymin = minval(yEdgeNew) - ymax = maxval(yEdgeNew) - r = 3.0e5 - ymid = (ymax+ymin)/2 - do i = 1,nEdgesNew - ytmp = yEdgeNew(i) - iCell1 = cellsOnEdgeNew(1,i) - iCell2 = cellsOnEdgeNew(2,i) - if(iCell1>0.and.iCell2>0) then - pert = surfaceWindStressMax * exp(-(ytmp-ymid)**2/(r**2)) - write(8,*) ytmp,pert - surfaceWindStressNew(i) = pert * normalsNew(1,i) - endif - enddo - endif - write(6,*) ' surfaceWindStressNew ', minval(surfaceWindStressNew), maxval(surfaceWindStressNew) - -elseif (initial_conditions.eq.'realistic_PHC') then - -surfaceWindStressNew = 0.0 -surfaceWindStressNew = 0.0 -do iEdge=1,nEdgesNew - xin = xEdgeNew(iEdge) - yin = yEdgeNew(iEdge) - zin = zEdgeNew(iEdge) - rlon = lonEdgeNew(iEdge)/dtr - rlat = latEdgeNew(iEdge)/dtr - ix = nint(rlon/0.1 - 0.05) + nu_lon + 1 - ix = mod(ix,nu_lon)+1 - iy = nu_lat - do jcount=1,nu_lat - if(u_lat(jcount).gt.rlat) then - iy = jcount - exit - endif - enddo - ulon = TAUX(ix,iy) - ulat = TAUY(ix,iy) - !write(6,*) rlon, t_lon(ix), rlat, t_lat(iy) - - call transform_from_lonlat_to_xyz(xin,yin,zin,ulon,ulat,ux,uy,uz) - if(boundaryEdgeNew(1,iEdge).eq.1) then - surfaceWindStressNew(iEdge) = 0.0 - surfaceWindStressNewZonal(iEdge) = 0.0 - surfaceWindStressNewMeridional(iEdge) = 0.0 - else - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - p(1) = xCellNew(iCell1); p(2) = yCellNew(iCell1); p(3) = zCellNew(iCell1) - q(1) = xCellNew(iCell2); q(2) = yCellNew(iCell2); q(3) = zCellNew(iCell2) - q = q - p - call unit_vector_in_3space(q) - surfaceWindStressNew(iEdge) = ux*q(1) + uy*q(2) + uz*q(3) - surfaceWindStressNewZonal(iEdge) = cos(angleEdgeNew(iEdge)) * surfaceWindStressNew(iEdge) - surfaceWindStressNewMeridional(iEdge) = sin(angleEdgeNew(iEdge)) * surfaceWindStressNew(iEdge) - endif - -enddo - - -! for acc runs, increase wind strength for Southern Ocean -if (amplify_acc_wind) then - print *, 'amplify_acc_wind, amp_wind_factor, amp_wind_center_lat, amp_wind_spread_lat' - print *, amplify_acc_wind, amp_wind_factor, amp_wind_center_lat, amp_wind_spread_lat - do iEdge=1,nEdgesNew - surfaceWindStressNew(iEdge) = surfaceWindStressNew(iEdge) & - * (1.0 + (amp_wind_factor-1.0)*0.5 & - *(1.0-tanh( (latEdgeNew(iEdge)/dtr-amp_wind_center_lat)/amp_wind_spread_lat) ) ) - enddo -endif - -!set tracers at a first guess -temperatureNew = -99.0 -salinityNew = -99.0 -do iCell=1,nCellsNew -do k = 1,maxLevelCellNew(iCell) - temperatureNew(1,k,iCell) = 20.0 - 10.0*k/nVertLevelsMod - salinityNew(1,k,iCell) = 34.0 ! salinity -enddo -enddo - -! update T and S field with PHC data -if( load_phc_IC) then -iNoData = 0 -do iCell=1,nCellsNew - layerThicknessNew(1,:,iCell) = dz(:) - ! if(mod(iCell,100).eq.0) write(6,*) 'load_phc_IC t and s',iCell - rlon = lonCellNew(iCell)/dtr - rlat = latCellNew(iCell)/dtr - do j = 1, nt_lon - if(t_lon(j).gt.rlon) then - ix = j - exit - end if - enddo - iy = nt_lat - do j=1,nt_lat - if(t_lat(j).gt.rlat) then - iy = j - exit - endif - enddo - do k=1,maxLevelCellNew(iCell) - ndata = 0; temp_t = 0; temp_s = 0; kdata(:) = 0 - - ndata = ndata + 1 - temp_t = temp_t + TEMP(ix,iy,k) - temp_s = temp_s + SALT(ix,iy,k) - - if(ndata.gt.0) then - temperatureNew(1,k,iCell) = temp_t / float(ndata) - salinityNEW(1,k,iCell) = temp_s / float(ndata) - kdata(k) = 1 - else - if(k.eq.1) iNoData = iNoData + 1 - if(k.ge.3) then - if(kdata(k-1).eq.1) maxLevelCellNew(iCell) = k-1 - endif - endif - enddo -enddo - -! do a couple of smoothing passes -do iter=1,5 -do iCell=1,nCellsNew -do k=1,maxLevelCellNew(iCell) - ndata=1 - temp_t = temperatureNew(1,k,iCell) - temp_s = salinityNew(1,k,iCell) - do j=1,nEdgesOnCellNew(iCell) - jCell = cellsOnCellNew(j,iCell) - if(jCell.gt.0) then - if(maxLevelCellNew(jCell).ge.k) then - temp_t = temp_t + temperatureNew(1,k,jCell) - temp_s = temp_s + salinityNew(1,k,jCell) - ndata = ndata + 1 - endif - endif - enddo - temperatureNew(1,k,iCell) = temp_t / ndata - salinityNew(1,k,iCell) = temp_s / ndata -enddo -enddo -write(6,*) maxval(temperatureNew(1,1,:)),maxval(salinityNew(1,1,:)) -enddo - -write(6,*) iNoData, nCellsNew - -temperatureRestoreNew(:) = temperatureNew(1,1,:) -salinityRestoreNew(:) = salinityNew(1,1,:) -boundaryLayerDepthNew(:) = hZLevel(1) + hZLevel(2) - 1.0e-4 - -endif ! load_phc_IC - -elseif (initial_conditions.eq.'realistic_WOCE') then - -surfaceWindStressNew = 0.0 -surfaceWindStressNew = 0.0 -do iEdge=1,nEdgesNew - xin = xEdgeNew(iEdge) - yin = yEdgeNew(iEdge) - zin = zEdgeNew(iEdge) - rlon = lonEdgeNew(iEdge)/dtr - rlat = latEdgeNew(iEdge)/dtr - ix = nint(rlon/0.1 - 0.05) + nu_lon + 1 - ix = mod(ix,nu_lon)+1 - iy = nu_lat - do jcount=1,nu_lat - if(t_lat(jcount).gt.rlat) then - iy = jcount - exit - endif - enddo - ulon = TAUX(ix,iy) - ulat = TAUY(ix,iy) - !write(6,*) rlon, t_lon(ix), rlat, t_lat(iy) - - call transform_from_lonlat_to_xyz(xin,yin,zin,ulon,ulat,ux,uy,uz) - if(boundaryEdgeNew(1,iEdge).eq.1) then - surfaceWindStressNew(iEdge) = 0.0 - surfaceWindStressNewZonal(iEdge) = 0.0 - surfaceWindStressNewMeridional(iEdge) = 0.0 - else - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - p(1) = xCellNew(iCell1); p(2) = yCellNew(iCell1); p(3) = zCellNew(iCell1) - q(1) = xCellNew(iCell2); q(2) = yCellNew(iCell2); q(3) = zCellNew(iCell2) - q = q - p - call unit_vector_in_3space(q) - surfaceWindStressNew(iEdge) = ux*q(1) + uy*q(2) + uz*q(3) - surfaceWindStressNewZonal(iEdge) = cos(angleEdgeNew(iEdge)) * surfaceWindStressNew(iEdge) - surfaceWindStressNewMeridional(iEdge) = sin(angleEdgeNew(iEdge)) * surfaceWindStressNew(iEdge) - endif - -! if(monthly_forcing) then -! do iMonth=1,nMonths -! ulon = TAUX_MONTHLY(ix,iy,iMonth) -! ulat = TAUY_MONTHLY(ix,iy,iMonth) -! call transform_from_lonlat_to_xyz(xin,yin,zin,ulon,ulat,ux,uy,uz) -! if(boundaryEdgeNew(1,iEdge).eq.1) then -! surfaceWindStressNew(iEdge) = 0.0 -! else -! iCell1 = cellsOnEdgeNew(1,iEdge) -! iCell2 = cellsOnEdgeNew(2,iEdge) -! p(1) = xCellNew(iCell1); p(2) = yCellNew(iCell1); p(3) = zCellNew(iCell1) -! q(1) = xCellNew(iCell2); q(2) = yCellNew(iCell2); q(3) = zCellNew(iCell2) -! q = q - p -! call unit_vector_in_3space(q) -! ! repeat -! surfaceWindStressNew(iEdge) = ux*q(1) + uy*q(2) + uz*q(3) -! endif -! enddo -! else -! surfaceWindStressNew(:) = 0.0 -! end if - -enddo - - -! for acc runs, increase wind strength for Southern Ocean -if (amplify_acc_wind) then - print *, 'amplify_acc_wind, amp_wind_factor, amp_wind_center_lat, amp_wind_spread_lat' - print *, amplify_acc_wind, amp_wind_factor, amp_wind_center_lat, amp_wind_spread_lat - do iEdge=1,nEdgesNew - surfaceWindStressNew(iEdge) = surfaceWindStressNew(iEdge) & - * (1.0 + (amp_wind_factor-1.0)*0.5 & - *(1.0-tanh( (latEdgeNew(iEdge)/dtr-amp_wind_center_lat)/amp_wind_spread_lat) ) ) - enddo -endif - -!set tracers at a first guess -temperatureNew = -99.0 -salinityNew = -99.0 -do iCell=1,nCellsNew -do k = 1,maxLevelCellNew(iCell) - temperatureNew(1,k,iCell) = 20.0 - 10.0*k/nVertLevelsMod - salinityNew(1,k,iCell) = 34.0 ! salinity -enddo -enddo - -! update T and S field with WOCE data -if(load_woce_IC) then -iNoData = 0 -do iCell=1,nCellsNew - layerThicknessNew(1,:,iCell) = dz(:) - ! if(mod(iCell,100).eq.0) write(6,*) 'load_woce_IC t and s',iCell - rlon = lonCellNew(iCell)/dtr - rlat = latCellNew(iCell)/dtr - ix = nint(rlon/0.1 - 0.05) + nt_lon + 1 - ix = mod(ix,nt_lon)+1 - iy = nt_lat - do j=1,nt_lat - if(t_lat(j).gt.rlat) then - iy = j - exit - endif - enddo - do k=1,maxLevelCellNew(iCell) - ndata = 0; temp_t = 0; temp_s = 0; kdata(:) = 0 - - do i=-15,15 - ixt = ix + 8*i - if(ixt.lt.1) then - ixt = ixt + nt_lon - elseif(ixt.gt.nt_lon) then - ixt = ixt - nt_lon - endif - do j=-15,15 - iyt = iy + 8*j - flag_lat = .true. - if(iyt.lt.1.or.iyt.gt.nt_lat) then - iyt = 1 - flag_lat = .false. - endif - if(TEMP(ixt,iyt,k).gt.-10.0.and.flag_lat) then - ndata = ndata + 1 - temp_t = temp_t + TEMP(ixt,iyt,k) - temp_s = temp_s + SALT(ixt,iyt,k) - endif - enddo - enddo - - if(ndata.gt.0) then - temperatureNew(1,k,iCell) = temp_t / float(ndata) - salinityNEW(1,k,iCell) = temp_s / float(ndata) - kdata(k) = 1 - else - if(k.eq.1) iNoData = iNoData + 1 - if(k.ge.3) then - if(kdata(k-1).eq.1) maxLevelCellNew(iCell) = k-1 - endif - endif - enddo -enddo - -! do a couple of smoothing passes -do iter=1,5 -do iCell=1,nCellsNew -do k=1,maxLevelCellNew(iCell) - ndata=1 - temp_t = temperatureNew(1,k,iCell) - temp_s = salinityNew(1,k,iCell) - do j=1,nEdgesOnCellNew(iCell) - jCell = cellsOnCellNew(j,iCell) - if(jCell.gt.0) then - if(maxLevelCellNew(jCell).ge.k) then - temp_t = temp_t + temperatureNew(1,k,jCell) - temp_s = temp_s + salinityNew(1,k,jCell) - ndata = ndata + 1 - endif - endif - enddo - temperatureNew(1,k,iCell) = temp_t / ndata - salinityNew(1,k,iCell) = temp_s / ndata -enddo -enddo -write(6,*) maxval(temperatureNew(1,1,:)),maxval(salinityNew(1,1,:)) -enddo - -write(6,*) iNoData, nCellsNew - -temperatureRestoreNew(:) = temperatureNew(1,1,:) -salinityRestoreNew(:) = salinityNew(1,1,:) -boundaryLayerDepthNew(:) = hZLevel(1) + hZLevel(2) - 1.0e-4 - -!if(monthly_forcing) then - !do iMonth=1,nMonths - !iNoData = 0 - !do iCell=1,nCellsNew - !! if(mod(iCell,100).eq.0) write(6,*) 'load_woce_IC t and s RESTORE',iCell - !rlon = lonCellNew(iCell)/dtr - !rlat = latCellNew(iCell)/dtr - !ix = nint(rlon/0.1 - 0.05) + nt_lon + 1 - !ix = mod(ix,nt_lon)+1 - !iy = nt_lat - !do j=1,nt_lat - !if(t_lat(j).gt.rlat) then - !iy = j - !exit - !endif - !!enddo ! j - !k=1 - !ndata = 0; temp_t = 0; temp_s = 0 - !do i=-15,15 - !ixt = ix + 8*i - !if(ixt.lt.1) then - !ixt = ixt + nt_lon - !elseif(ixt.gt.nt_lon) then - !ixt = ixt - nt_lon - !endif - !do j=-15,15 - !iyt = iy + 8*j - !flag_lat = .true. - !if(iyt.lt.1.or.iyt.gt.nt_lat) then - !iyt = 1 - !flag_lat = .false. - !endif - !if(SST_MONTHLY(ixt,iyt,iMonth).gt.-10.0.and.flag_lat) then - !ndata = ndata + 1 - !temp_t = temp_t + SST_MONTHLY(ixt,iyt,iMonth) - !temp_s = temp_s + SSS_MONTHLY(ixt,iyt,iMonth) - !endif - !enddo !j - !enddo !i - ! - !if(ndata.gt.0) then - !temperatureRestoreMonthlyNew(iMonth,iCell) = temp_t / float(ndata) - !salinityRestoreMonthlyNew(iMonth,iCell) = temp_s / float(ndata) - !else - !temperatureRestoreMonthlyNew(iMonth,iCell) = temperatureNew(1,1,iCell) - !salinityRestoreMonthlyNew(iMonth,iCell) = salinityNew(1,1,iCell) - !endif - ! - !enddo ! iCell - !enddo ! iMonth - ! - !! do a couple of smoothing passes - !do iter=1,5 - !do iCell=1,nCellsNew - !k=1 - !ndata=1 - !temp_t = temperatureRestoreMonthlyNew(iMonth,iCell) - !temp_s = salinityRestoreMonthlyNew(iMonth,iCell) - !do j=1,nEdgesOnCellNew(iCell) - !jCell = cellsOnCellNew(j,iCell) - !if(jCell.gt.0) then - !if(maxLevelCellNew(jCell).ge.k) then - !temp_t = temp_t + temperatureRestoreMonthlyNew(iMonth,iCell) - !temp_s = temp_s + salinityRestoreMonthlyNew(iMonth,iCell) - !ndata = ndata + 1 - !endif - !endif - !enddo ! j - !temperatureRestoreMonthlyNew(iMonth,iCell) = temp_t / ndata - !salinityRestoreMonthlyNew(iMonth,iCell) = temp_s / ndata - !enddo ! iCell - !enddo ! iter -!else -! temperatureRestoreMonthlyNew(:,:) = 0.0 -! salinityRestoreMonthlyNew(:,:) = 0.0 -!end if - -endif ! load_woce_IC - -!repeat -!do iMonth=1,12 -! temperatureRestoreMonthlyNew(iMonth,:) = temperatureRestoreNew(:) -! salinityRestoreMonthlyNew(iMonth,:) = salinityRestoreNew(:) -!enddo - -else - - print *, ' Incorrect choice of initial_conditions: ',initial_conditions - stop - -endif ! initial_conditions - - ! set coriolis parameter for grid - write(6,*) ' setting Coriolis parameter' - if(on_a_sphere.eq.'YES') then - do i = 1,nVerticesNew - fVertexNew(i) = 2.0 * omega * sin(latVertexNew(i)) - enddo - - do i = 1,nEdgesNew - fEdgeNew(i) = 2.0 * omega * sin(latEdgeNew(i)) - enddo - - do i = 1,nCellsNew - fCellNew(i) = 2.0 * omega * sin(latCellNew(i)) - enddo - else - do i = 1,nVerticesNew - fVertexNew(i) = f0 + (yVertexNew(i) - ymid) * beta - enddo - - do i = 1,nEdgesNew - fEdgeNew(i) = f0 + (yEdgeNew(i) - ymid) * beta - enddo - - do i = 1,nCellsNew - fCellNew(i) = f0 + (yCellNew(i) - ymid) * beta - enddo - endif - -write(6,*) ' done get_init_conditions' - -end subroutine get_init_conditions - - -subroutine error_checking -real :: p(3), q(3), r(3), angle, s(3), t(3), dot, mindot, maxdot, b(vertexDegree) -real :: work(nCellsNew) - - -! write -write(6,*) -write(6,*) ' error checking ' -write(6,*) - -! check to see if every edge is normal to associated cells -mindot = 2 -maxdot = -2 -do iEdge=1,nEdgesNew - if(boundaryEdgeNew(1,iEdge).eq.1) cycle - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - p(1)=xCellNew(iCell1); p(2)=yCellNew(iCell1); p(3)=zCellNew(iCell1) - q(1)=xCellNew(iCell2); q(2)=yCellNew(iCell2); q(3)=zCellNew(iCell2) - r(1)=xEdgeNew(iEdge); r(2)=yEdgeNew(iEdge); r(3)=zEdgeNew(iEdge) - call unit_vector_in_3space(p) - call unit_vector_in_3space(q) - call unit_vector_in_3space(r) - t = q - p - s = r - p - call unit_vector_in_3space(t) - call unit_vector_in_3space(s) - dot = s(1)*t(1)+s(2)*t(2)+s(3)*t(3) - if(dot.lt.mindot) mindot=dot - if(dot.gt.maxdot) maxdot=dot -enddo -write(6,10) 'alignment of edges and cells (should be ones)', mindot, maxdot -10 format(a60,5x,2e15.5) - -! check to see if every segments connecting cells and vertices are orothogonal' -mindot = 2 -maxdot = -2 -do iEdge=1,nEdgesNew - if(boundaryEdgeNew(1,iEdge).eq.1) cycle - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - iVertex1 = verticesOnEdgeNew(1,iEdge) - iVertex2 = verticesOnEdgeNew(2,iEdge) - p(1)=xCellNew(iCell1); p(2)=yCellNew(iCell1); p(3)=zCellNew(iCell1) - q(1)=xCellNew(iCell2); q(2)=yCellNew(iCell2); q(3)=zCellNew(iCell2) - r(1)=xVertexNew(iVertex1); r(2)=yVertexNew(iVertex1); r(3)=zVertexNew(iVertex1) - s(1)=xVertexNew(iVertex2); s(2)=yVertexNew(iVertex2); s(3)=zVertexNew(iVertex2) - call unit_vector_in_3space(p) - call unit_vector_in_3space(q) - call unit_vector_in_3space(r) - call unit_vector_in_3space(s) - t = q - p - s = s - r - call unit_vector_in_3space(t) - call unit_vector_in_3space(s) - dot = s(1)*t(1)+s(2)*t(2)+s(3)*t(3) - if(dot.lt.mindot) mindot=dot - if(dot.gt.maxdot) maxdot=dot -enddo -write(6,10) 'orthogonality of cell and vertex edges (should be zeros)', mindot, maxdot - -! check that the kiteareas sum to the areatriangle -mindot = 2 -maxdot = -2 -do iVertex=1,nVerticesNew - b = 0 - do i=1,vertexDegree - b(i) = kiteAreasOnVertexNew(i,iVertex) - enddo - angle = sum(b) - if(angle - areaTriangleNew(iVertex).lt.mindot) mindot = angle - areaTriangleNew(iVertex) - if(angle - areaTriangleNew(iVertex).gt.maxdot) maxdot = angle - areaTriangleNew(iVertex) -enddo -write(6,10) ' error in sum of kites and triangles (should be zeroes)', mindot, maxdot - -! check that the kiteareas sum to the areaCell -mindot = 2 -maxdot = -2 -work = 0 -do iVertex=1,nVerticesNew - iCell1 = cellsOnVertexNew(1,iVertex) - iCell2 = cellsOnVertexNew(2,iVertex) - iCell3 = cellsOnVertexNew(3,iVertex) - if(iCell1.ne.0) work(iCell1) = work(iCell1) + kiteAreasOnVertexNew(1,iVertex) - if(iCell2.ne.0) work(iCell2) = work(iCell2) + kiteAreasOnVertexNew(2,iVertex) - if(iCell3.ne.0) work(iCell3) = work(iCell3) + kiteAreasOnVertexNew(3,iVertex) -enddo -mindot = minval(areaCellNew - work) -maxdot = maxval(areaCellNew - work) -write(6,10) ' error in sum of kites and cells (should be zeroes)', mindot, maxdot - -!check for connectivity inverses for cells/edges -do iCell=1,nCellsNew - do i=1,nEdgesOnCellNew(iCell) - iEdge=edgesOnCellNew(i,iCell) - if(iEdge.le.0) stop ' iEdge le 0' - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - if(iCell1.ne.iCell.and.iCell2.ne.iCell) stop ' cells/edges inverse failed' - enddo -enddo -write(6,*) ' cellsOnEdge and edgesOnCell are duals for every cell/edge combination' - -!check for connectivity inverses for cells/vertices -do iCell=1,nCellsNew - do i=1,nEdgesOnCellNew(iCell) - iVertex = verticesOnCellNew(i,iCell) - if(iVertex.le.0) stop ' iVertex le 0' - iCell1 = cellsOnVertexNew(1,iVertex) - iCell2 = cellsOnVertexNew(2,iVertex) - iCell3 = cellsOnVertexNew(3,iVertex) - ! This line may be commented out for quad grids: - if(iCell1.ne.iCell.and.iCell2.ne.iCell.and.iCell3.ne.iCell) stop ' cells/vertices inverse failed' - enddo -enddo -write(6,*) ' cellsOnVertex and verticesOnCell are duals for every cell/vertex combination' - -!check edgesOnEdge -do iEdge=1,nEdgesNew - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - if(nEdgesOnEdgeNew(iEdge).eq.0) then - if(boundaryEdgeNew(1,iEdge).ne.1) stop ' stopping boundaryEdgeNew' - endif - do i=1,nEdgesOnEdgeNew(iEdge) - jEdge = edgesOnEdgeNew(i,iEdge) - jCell1 = cellsOnEdgeNew(1,jEdge) - jCell2 = cellsOnEdgeNew(2,jEdge) - if(jCell1.ne.iCell1.and.jCell1.ne.iCell2) then - if(jCell2.ne.iCell1.and.jCell2.ne.iCell2) then - write(6,*) 'error in edgesOnEdge' - write(6,*) iCell1, iCell2, jCell1, jCell2 - stop - endif - endif - enddo -enddo -write(6,*) ' edgesOnEdge is consistent with cellsOnEdge' - -end subroutine error_checking - - -subroutine copy_dimensions - -maxEdgesNew = maxEdges -maxEdges2New = maxEdges2 -TWONew = TWO -vertexDegreeNew = vertexDegree -nVertLevelsNew = nVertLevelsMod - -write(6,*) -write(6,*) ' new dimensions ' -write(6,*) ' maxEdgesNew : ', maxEdgesNew -write(6,*) ' maxEdges2New : ', maxEdges2New -write(6,*) ' TWONew : ', TWONew -write(6,*) ' vertexDegreeNew : ', vertexDegreeNew -write(6,*) ' nVertLevelsNew : ', nVertLevelsNew - -end subroutine copy_dimensions - - - -subroutine read_grid -implicit none - -call read_netcdf_init(nCells, nEdges, nVertices, maxEdges,maxEdges2,& - nVertLevels,TWO,vertexDegree) - -write(6,*) ' init from grid ' -write(6,*) 'nCells :', nCells -write(6,*) 'nEdges :', nEdges -write(6,*) 'nVertices :', nVertices -write(6,*) 'maxEdges :', maxEdges -write(6,*) 'maxEdges2 :', maxEdges2 -write(6,*) 'nVertLevels :', nVertLevels -write(6,*) 'vertexDegree :', vertexDegree -write(6,*) 'TWO :', TWO - -allocate(xCell(nCells)) -allocate(yCell(nCells)) -allocate(zCell(nCells)) -allocate(latCell(nCells)) -allocate(lonCell(nCells)) -allocate(meshDensity(nCells)) -allocate(xEdge(nEdges)) -allocate(yEdge(nEdges)) -allocate(zEdge(nEdges)) -allocate(latEdge(nEdges)) -allocate(lonEdge(nEdges)) -allocate(xVertex(nVertices)) -allocate(yVertex(nVertices)) -allocate(zVertex(nVertices)) -allocate(latVertex(nVertices)) -allocate(lonVertex(nVertices)) -allocate(dcEdge(nEdges)) -allocate(dvEdge(nEdges)) - -allocate(indexToCellID(nCells)) -allocate(indexToEdgeID(nEdges)) -allocate(indexToVertexID(nVertices)) - -allocate(cellsOnEdge(TWO,nEdges)) -allocate(nEdgesOnCell(nCells)) -allocate(nEdgesOnEdge(nEdges)) -allocate(edgesOnCell(maxEdges,nCells)) -allocate(edgesOnEdge(maxEdges2,nEdges)) -allocate(weightsOnEdge(maxEdges2,nEdges)) - -allocate(angleEdge(nEdges)) -allocate(areaCell(nCells)) -allocate(areaTriangle(nVertices)) -allocate(cellsOnCell(maxEdges,nCells)) -allocate(verticesOnCell(maxEdges,nCells)) -allocate(verticesOnEdge(TWO,nEdges)) -allocate(edgesOnVertex(vertexDegree,nVertices)) -allocate(cellsOnVertex(vertexDegree,nVertices)) -allocate(kiteAreasOnVertex(vertexDegree,nVertices)) - -allocate(fCell(nEdges)) -allocate(fEdge(nEdges)) -allocate(fVertex(nVertices)) -allocate(bottomDepth(nCells)) -allocate(work1(nCells)) -allocate(normalVelocity(1,nVertLevels,nEdges)) -allocate(surfaceWindStress(nEdges)) -allocate(tangentialVelocity(1,nVertLevels,nEdges)) -allocate(layerThickness(1,nVertLevels,nCells)) -allocate(density(1,nVertLevels,nCells)) - -xCell=0; yCell=0; zCell=0; latCell=0; lonCell=0; meshDensity=1.0 -xEdge=0; yEdge=0; zEdge=0; latEdge=0; lonEdge=0 -xVertex=0; yVertex=0; zVertex=0; latVertex=0; lonVertex=0 - -indexToCellID=0; indexToEdgeID=0; indexToVertexID=0 -cellsOnEdge=0; nEdgesOnCell=0; edgesOnCell=0 -edgesOnEdge=0; weightsOnEdge=0 -angleEdge=0; areaCell=0; areaTriangle=0 -cellsOnCell=0; verticesOnCell=0; verticesOnEdge=0 -edgesOnVertex=0; cellsOnVertex=0; kiteAreasOnVertex=0 - -fEdge=0; fVertex=0; bottomDepth=0; surfaceWindStress=0; work1=0 -normalVelocity=0; tangentialVelocity=0; layerThickness=0; density=0 - - -call read_netcdf_fields( & - time, & - latCell, & - lonCell, & - meshDensity, & - xCell, & - yCell, & - zCell, & - indexToCellID, & - latEdge, & - lonEdge, & - xEdge, & - yEdge, & - zEdge, & - indexToEdgeID, & - latVertex, & - lonVertex, & - xVertex, & - yVertex, & - zVertex, & - indexToVertexID, & - cellsOnEdge, & - nEdgesOnCell, & - nEdgesOnEdge, & - edgesOnCell, & - edgesOnEdge, & - weightsOnEdge, & - dvEdge, & - dcEdge, & - angleEdge, & - areaCell, & - areaTriangle, & - cellsOnCell, & - verticesOnCell, & - verticesOnEdge, & - edgesOnVertex, & - cellsOnVertex, & - kiteAreasOnVertex, & - fEdge, & - fVertex, & - bottomDepth, & - normalVelocity, & - tangentialVelocity, & - layerThickness & - ) - -write(6,*) ' values from read grid, min/max' -write(6,*) ' latCell : ', minval(latCell), maxval(latCell) -write(6,*) ' lonCell : ', minval(lonCell), maxval(lonCell) -write(6,*) ' meshDensity : ', minval(meshDensity),maxval(meshDensity) -write(6,*) ' xCell : ', minval(xCell), maxval(xCell) -write(6,*) ' yCell : ', minval(yCell), maxval(yCell) -write(6,*) ' zCell : ', minval(zCell), maxval(zCell) -write(6,*) ' indexToCellID : ', minval(indexToCellID), maxval(indexToCellID) -write(6,*) ' latEdge : ', minval(latEdge), maxval(latEdge) -write(6,*) ' lonEdge : ', minval(lonEdge), maxval(lonEdge) -write(6,*) ' xEdge : ', minval(xEdge), maxval(xEdge) -write(6,*) ' yEdge : ', minval(yEdge), maxval(yEdge) -write(6,*) ' zEdge : ', minval(zEdge), maxval(zEdge) -write(6,*) ' indexToEdgeID : ', minval(indexToEdgeID), maxval(indexToEdgeID) -write(6,*) ' latVertex : ', minval(latVertex), maxval(latVertex) -write(6,*) ' lonVertex : ', minval(lonVertex), maxval(lonVertex) -write(6,*) ' xVertex : ', minval(xVertex), maxval(xVertex) -write(6,*) ' yVertex : ', minval(yVertex), maxval(yVertex) -write(6,*) ' zVertex : ', minval(zVertex), maxval(zVertex) -write(6,*) ' indexToVertexID : ', minval(indexToVertexID), maxval(indexToVertexID) -write(6,*) ' cellsOnEdge : ', minval(cellsOnEdge), maxval(cellsOnEdge) -write(6,*) ' nEdgesOnCell : ', minval(nEdgesOnCell), maxval(nEdgesOnCell) -write(6,*) ' nEdgesOnEdge : ', minval(nEdgesOnEdge), maxval(nEdgesOnEdge) -write(6,*) ' edgesOnCell : ', minval(edgesOnCell), maxval(edgesOnCell) -write(6,*) ' edgesOnEdge : ', minval(edgesOnEdge), maxval(edgesOnEdge) -write(6,*) ' weightsOnEdge : ', minval(weightsOnEdge), maxval(weightsOnEdge) -write(6,*) ' dvEdge : ', minval(dvEdge), maxval(dvEdge) -write(6,*) ' dcEdge : ', minval(dcEdge), maxval(dcEdge) -write(6,*) ' angleEdge : ', minval(angleEdge), maxval(angleEdge) -write(6,*) ' areaCell : ', minval(areaCell), maxval(areaCell) -write(6,*) ' areaTriangle : ', minval(areaTriangle), maxval(areaTriangle) -write(6,*) ' cellsOnCell : ', minval(cellsOnCell), maxval(cellsOnCell) -write(6,*) ' verticesOnCell : ', minval(verticesOnCell), maxval(verticesOnCell) -write(6,*) ' verticesOnEdge : ', minval(verticesOnEdge), maxval(verticesOnEdge) -write(6,*) ' edgesOnVertex : ', minval(edgesOnVertex), maxval(edgesOnVertex) -write(6,*) ' cellsOnVertex : ', minval(cellsOnVertex), maxval(cellsOnVertex) -write(6,*) ' kiteAreasOnVertex : ', minval(kiteAreasOnVertex), maxval(kiteAreasOnVertex) -write(6,*) ' fEdge : ', minval(fEdge), maxval(fEdge) -write(6,*) ' fVertex : ', minval(fVertex), maxval(fVertex) -write(6,*) ' bottomDepth : ', minval(bottomDepth), maxval(bottomDepth) -write(6,*) ' normalVelocity : ', minval(normalVelocity), maxval(normalVelocity) -write(6,*) ' tangentialVelocity : ', minval(tangentialVelocity), maxval(tangentialVelocity) -write(6,*) ' layerThickness : ', minval(layerThickness), maxval(layerThickness) - -end subroutine read_grid - - -subroutine write_grid -implicit none - -if (expand_from_unit_sphere) then - xCellNew = xCellNew * sphere_radius - yCellNew = yCellNew * sphere_radius - zCellNew = zCellNew * sphere_radius - xEdgeNew = xEdgeNew * sphere_radius - yEdgeNew = yEdgeNew * sphere_radius - zEdgeNew = zEdgeNew * sphere_radius - xVertexNew = xVertexNew * sphere_radius - yVertexNew = yVertexNew * sphere_radius - zVertexNew = zVertexNew * sphere_radius - dcEdgeNew = dcEdgeNew * sphere_radius - dvEdgeNew = dvEdgeNew * sphere_radius - areaCellNew = areaCellNew * (sphere_radius)**2 - areaTriangleNew = areaTriangleNew * (sphere_radius)**2 - kiteAreasOnVertexNew = kiteAreasOnVertexNew * (sphere_radius)**2 -endif - -call write_netcdf_init( & - nCellsNew, & - nEdgesNew, & - nVerticesNew, & - maxEdgesNew, & - nVertLevelsNew, & - vertexDegreeNew, & - sphere_radius, & - on_a_sphere & - ) - -call write_netcdf_fields( & - 1, & - latCellNew, & - lonCellNew, & - meshDensityNew, & - xCellNew, & - yCellNew, & - zCellNew, & - indexToCellIDNew, & - latEdgeNew, & - lonEdgeNew, & - xEdgeNew, & - yEdgeNew, & - zEdgeNew, & - indexToEdgeIDNew, & - latVertexNew, & - lonVertexNew, & - xVertexNew, & - yVertexNew, & - zVertexNew, & - indexToVertexIDNew, & - maxLevelCellNew, & - cellsOnEdgeNew, & - nEdgesOnCellNew, & - nEdgesOnEdgeNew, & - edgesOnCellNew, & - edgesOnEdgeNew, & - weightsOnEdgeNew, & - dvEdgeNew, & - dcEdgeNew, & - angleEdgeNew, & - areaCellNew, & - areaTriangleNew, & - cellsOnCellNew, & - verticesOnCellNew, & - verticesOnEdgeNew, & - edgesOnVertexNew, & - cellsOnVertexNew, & - kiteAreasOnVertexNew, & - fEdgeNew, & - fVertexNew, & - fCellNew, & - bottomDepthNew, & - boundaryEdgeNew, & - boundaryVertexNew, & - surfaceWindStressNew, & - surfaceWindStressNewZonal, & - surfaceWindStressNewMeridional, & - normalVelocityNew, & - layerThicknessNew, & - densityNew, & - temperatureNew, & - salinityNew, & - tracer1New, & - temperatureRestoreNew, & - salinityRestoreNew, & - boundaryLayerDepthNew, & - refBottomDepth & - ) - -call write_netcdf_finalize - -if (expand_from_unit_sphere) then - xCellNew = xCellNew / sphere_radius - yCellNew = yCellNew / sphere_radius - zCellNew = zCellNew / sphere_radius - xEdgeNew = xEdgeNew / sphere_radius - yEdgeNew = yEdgeNew / sphere_radius - zEdgeNew = zEdgeNew / sphere_radius - xVertexNew = xVertexNew / sphere_radius - yVertexNew = yVertexNew / sphere_radius - zVertexNew = zVertexNew / sphere_radius - dcEdgeNew = dcEdgeNew / sphere_radius - dvEdgeNew = dvEdgeNew / sphere_radius - areaCellNew = areaCellNew / (sphere_radius)**2 - areaTriangleNew = areaTriangleNew / (sphere_radius)**2 - kiteAreasOnVertexNew = kiteAreasOnVertexNew / (sphere_radius)**2 -endif - -end subroutine write_grid - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! -! Step 4: Check define_kmt routine for bottomDepth and kmt (maxLevelCell) variables -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine define_kmt -implicit none -real (kind=4), allocatable, dimension(:) :: x,y, work_kmt -real (kind=4), allocatable, dimension(:,:) :: ztopo -integer :: nx, ny, inx, iny, ix, iy, kmt_neighbor_max -integer :: kEmbayment -real :: pi, dtr, zdata, rlon, rlat, r, ymin, ymax, xmin, xmax -real :: latmin, latmax, lonmin, lonmax, ridgeDepth, maxdc -real :: b, H0, phi, gamma, distance, rlonC, rlatC -real :: y0_embayment, embayment_depth, embayment_slope, z -logical :: flag, kmt_flag -real :: y1,y2,y3, d1,d2,d3 - -pi = 4.0*atan(1.0) -dtr = pi / 180.0 - -allocate(kmt(nCells)) -kmt = 0 - -if (bottom_topography.eq.'realistic_ETOPO') then - - nx = 10800 - ny = 5400 - allocate(x(nx)) - allocate(y(ny)) - allocate(ztopo(nx,ny)) - x = 0.0 - y = 0.0 - ztopo = 0.0 - write(6,*) ' ztopo ', minval(ztopo), maxval(ztopo) - call read_topo_init( inx, iny) - if(inx.ne.nx) stop ' nx topo' - if(iny.ne.ny) stop ' ny topo' - call read_topo_fields(x,y,ztopo) - call read_topo_finalize() - write(6,*) minval(x), maxval(x), x(1) - write(6,*) minval(y), maxval(y), y(1) - write(6,*) minval(ztopo), maxval(ztopo) - - do iCell=1,nCells - - ! Convert from radians to degrees - rlon = lonCell(iCell) / dtr - rlat = latCell(iCell) / dtr - - ! Find nearest coordinate in topo file. - ! This is 1/30th degree topo data, so multiply degrees by 30 and - ! round to get index. - ix = nint((rlon+180)*30) + nx - ix = mod(ix,nx)+1 - iy = nint((rlat+90 )*30) - ix = max(1,ix); ix = min(nx,ix) - iy = max(1,iy); iy = min(ny,iy) - - zdata = ztopo(ix,iy) - - ! zdata is less than zero for ocean points. - if(zdata.lt.0.0) then - zdata = -zdata - bottomDepth(iCell) = zdata - r = 0 - kmt_flag=.false. - do k=1,nVertLevelsMod - if(.not.kmt_flag) then - r = r + dz(k) - if(r.gt.zdata) then - kmt(iCell) = k - kmt_flag = .true. - endif - endif - enddo - - ! zdata is deeper than deepest cell - if (kmt(iCell).eq.0) then - kmt(iCell)=nVertLevelsMod - bottomDepth(iCell) = refBottomDepth(nVertLevelsMod) - endif - - !write(6,*) kmt(iCell) - - endif - - ! if(zdata.lt.0.0) kmt(iCell) = nVertLevelsMod - - enddo - - deallocate(x) - deallocate(y) - deallocate(ztopo) - -elseif (bottom_topography.eq.'Ilicak2_overflow') then - - kmt = nVertLevelsMOD - - ridgeDepth = 500.0 - do iCell = 1,nCells - ! From Mehmet Ilicak: - ! depth=2000 - ! val1 = 500 is top of ridge - ! h(i,j) = val1 + 0.5*(depth-val1) * (1.0+TANH((lon(i,j)-40000.0)/7000.0)) - bottomDepth(iCell) = ridgeDepth + 0.5*(layer_thickness_total_max-ridgeDepth) * (1.0+tanh((yCell(iCell)-40000.0)/7000.0)) - - if (bottomDepth(iCell).lt.0.0.or. & - bottomDepth(iCell).gt.refBottomDepth(nVertLevelsMOD)) then - print *, 'error: bottomDepth cannot be less than zero or greater than refBottomDepth(nVertLevels)' - print *, 'iCell, bottomDepth(iCell):', iCell, bottomDepth(iCell) - exit - end if - - do k=1,nVertLevelsMOD - if (bottomDepth(iCell).le.refBottomDepth(k)) then - kmt(iCell) = k - exit - endif - end do - - enddo - -elseif (bottom_topography.eq.'Ilicak2_overflow_sigma') then - - ridgeDepth = 500.0 - do iCell = 1,nCells - ! From Mehmet Ilicak: - ! depth=2000 - ! val1 = 500 is top of ridge - ! h(i,j) = val1 + 0.5*(depth-val1) * (1.0+TANH((lon(i,j)-40000.0)/7000.0)) - bottomDepth(iCell) = ridgeDepth + 0.5*(layer_thickness_total_max-ridgeDepth) * (1.0+tanh((yCell(iCell)-40000.0)/7000.0)) - - if (bottomDepth(iCell).lt.0.0.or. & - bottomDepth(iCell).gt.refBottomDepth(nVertLevelsMOD)) then - print *, 'error: bottomDepth cannot be less than zero or greater than refBottomDepth(nVertLevels)' - print *, 'iCell, bottomDepth(iCell):', iCell, bottomDepth(iCell) - exit - end if - - enddo - - ! for sigma coordinates, set kmt to the max level. - kmt = nVertLevelsMOD - -elseif (bottom_topography.eq.'sub_ice_shelf_test3') then - - ! points 1 and 2 are where angles on ice shelf are located. - ! point 3 is at the surface. - ! d variables are total water thickness below ice shelf. - y1= 30.0e3 - y2= 90.0e3 - y3=150.0e3 - d1=refBottomDepth(nVertLevelsMOD) - d2=refBottomDepth(nVertLevelsMOD) - 250.0 - d3=refBottomDepth(nVertLevelsMOD) - - ! set up sub ice shelf thicknesses - do iCell=1,nCells - if (yCell(iCell) < y1 .or. yCell(iCell) > y3) then - kmt(iCell) = nVertLevelsMOD - bottomDepth(iCell) = refBottomDepth(nVertLevelsMOD) - else - if (yCell(iCell) < y2 ) then - bottomDepth(iCell) = d1 + (d2-d1)*(yCell(iCell)-y1)/(y2-y1) - else - bottomDepth(iCell) = d2 + (d3-d2)*(yCell(iCell)-y2)/(y3-y2) - endif - - do k=1,nVertLevelsMOD - if (bottomDepth(iCell).le.refBottomDepth(k)) then - kmt(iCell) = k - exit - endif - end do - ! reset bottomDepth to it's full cell value - bottomDepth(iCell) = refBottomDepth(kmt(iCell)) - endif - - - enddo - -elseif (bottom_topography.eq.'DOME_3D_overflow') then - - y0_embayment = 600.0e3 ! y location of beginning of embayment - embayment_depth =600 ! depth of embayment, m - embayment_slope = 0.01 ! slope - - ! The first level where z is deeper than zmid, set k-level of embayment - do k=2,nVertLevelsMod - if ((refBottomDepth(k)+refBottomDepth(k-1))/2.0.gt.embayment_depth) then - kEmbayment = k - exit - endif - enddo - print *, 'kEmbayment',kEmbayment - do iCell=1,nCells - if (yCell(iCell).gt.y0_embayment) then - ! note: I made embayment 200km wide for the 50km rez hex case, so - ! it is always wider than 1 cell. For higher rez, change this - ! to 1700 to 1800. - if (xCell(iCell).lt.1600.0e3.or. & - xCell(iCell).gt.1800.0e3 ) then - kmt(iCell)=0 - else - kmt(iCell) = kEmbayment - endif - bottomDepth(iCell) = embayment_depth - - else - ! compute depth based on y coordinate - bottomDepth(iCell) = min(embayment_depth + embayment_slope*(y0_embayment - yCell(iCell)),refBottomDepth(nVertLevelsMOD)) - ! default depth is - kmt(iCell) = nVertLevelsMod - do k=2,nVertLevelsMod - ! The first level where z is deeper than zmid, set kmt. - if ((refBottomDepth(k)+refBottomDepth(k-1))/2.0.gt.bottomDepth(iCell)) then - kmt(iCell) = k - exit - endif - enddo - endif - - ! If using no partial bottom cells, use this: - if (kmt(iCell).gt.0) then - bottomDepth(iCell) = refBottomDepth(kmt(iCell)) - endif - - enddo - - - -elseif (bottom_topography.eq.'SOMA_Circular_Basin') then - - pi = 4.0*atan(1.0) - dtr = pi / 180.0 - b = 1.25e6 - phi = 0.1 - H0 = 2500.0 - gamma = -0.4 - - kmt = 0 - - do iCell=1,nCells - rlon = lonCell(iCell) - rlat = latCell(iCell) - rlonC = 0.0 - rlatC = 35.0*dtr - - r = sqrt( sin(0.5*(rlatC-rlat))**2 + & - cos(rlat)*cos(rlatC)*sin(0.5*(rlonC-rlon))**2 ) - distance = 2.*sphere_radius*asin(r) - - r = 1.0 - distance**2 / b**2 - - if(r.gt.gamma) then - zdata = -100.0 - (H0-100.0)/2.0 * (1.0 + tanh(r/phi)) - else - zdata = 100.0 - endif - bottomDepth(iCell) = -zdata - - - if(zdata.lt.0.0) then - zdata = -zdata - r = 0 - kmt_flag=.false. - do k=1,nVertLevelsMod - if(.not.kmt_flag) then - r = r + dz(k) - if(r.gt.zdata) then - kmt(iCell) = k - bottomDepth(iCell) = refBottomDepth(k) - kmt_flag = .true. - endif - endif - enddo - if(kmt(iCell).eq.0) kmt(iCell)=nVertLevelsMod - endif - - enddo - - -elseif (bottom_topography.eq.'flat_bottom') then - - kmt = nVertLevelsMOD - bottomDepth = refBottomDepth(nVertLevelsMOD) - -else - - print *, ' Incorrect choice of bottom_topography: ',bottom_topography - stop - -endif - -if (cut_domain_from_sphere) then - latmin = -30*dtr - latmax = +30*dtr - lonmin = +10*dtr - lonmax = +70*dtr - write(6,*) ' lat min ', latmin - write(6,*) ' lat max ', latmax - where(latCell.lt.latmin) kmt = 0 - where(latCell.gt.latmax) kmt = 0 - where(lonCell.lt.lonmin) kmt = 0 - where(lonCell.gt.lonmax) kmt = 0 -endif - -if (solid_boundary_in_y) then - ymin = minval(yCell) - write(6,*) ' minimum yCell ', ymin - ymax = maxval(yCell) - write(6,*) ' maximum yCell ', ymax - where(yCell.lt.1.001*ymin) kmt = 0 - where(yCell.gt.0.999*ymax) kmt = 0 -endif - -if (solid_boundary_in_x) then - maxdc = maxval(dcEdge) - xmin = minval(xCell) - write(6,*) ' minimum xCell ', xmin - xmax = maxval(xCell) - write(6,*) ' maximum xCell ', xmax - where(xCell.lt.xmin+maxdc/1.5) kmt = 0 - where(xCell.gt.xmax-maxdc/1.5) kmt = 0 -endif - - -allocate(work_kmt(nCells)) -work_kmt = 0.0 -where(kmt.eq.0) work_kmt=1.0 -write(6,*) 'number of cells culled ',sum(work_kmt) -deallocate(work_kmt) - - -! Eliminate isolated ocean cells, and make these isolated deep cells -! flush with the deepest neighbor. -do iCell=1,nCells - kmt_neighbor_max = 0 - do j=1,nEdgesOnCell(iCell) - iCell1 = cellsOnCell(j,iCell) - kmt_neighbor_max = max(kmt_neighbor_max,kmt(iCell1)) - enddo - if (kmt(iCell).gt.kmt_neighbor_max) then - kmt(iCell) = kmt_neighbor_max - bottomDepth(iCell) = refBottomDepth(kmt(iCell)) - endif -enddo - -if(eliminate_inland_seas) then -call eliminateLoops(nCells,nEdges,nVertices,maxEdges,vertexDegree, & - nEdgesOnCell, cellsOnCell, verticesOnEdge, cellsOnVertex, edgesOnCell, lonCell, latCell, & - xCell, yCell, zCell, xEdge, yEdge, zEdge, xVertex, yVertex, zVertex, & - KMT) -endif - -! do not allow land or PBCs in top layers -k = min(top_layers_without_land,nVertLevelsMOD) -where(kmt.gt.0.and.kmt.le.k) - bottomDepth = refBottomDepth(k) - kmt=k -endwhere - -end subroutine define_kmt - - - -subroutine define_mapping -implicit none - -allocate(cellMap(nCells)) -allocate(edgeMap(nEdges)) -allocate(vertexMap(nVertices)) -cellMap = 0; edgeMap = 0; vertexMap = 0 - -j=1 -do i=1,nCells -if(kmt(i).ne.0) then - cellMap(i) = j - j=j+1 -endif -write(10,*) i, cellMap(i) -enddo - -j=1 -do i=1,nEdges -iCell1 = cellsOnEdge(1,i) -iCell2 = cellsOnEdge(2,i) -if(kmt(iCell1).ne.0.or.kmt(iCell2).ne.0) then - edgeMap(i)=j - j=j+1 -endif -write(11,*) i,edgeMap(i) -enddo - -j=1 -do i=1,nVertices -iCell1 = cellsOnVertex(1,i) -iCell2 = cellsOnVertex(2,i) -iCell3 = cellsOnVertex(3,i) -if(kmt(iCell1).ne.0.or.kmt(iCell2).ne.0.or.kmt(iCell3).ne.0) then - vertexMap(i)=j - j=j+1 -endif -write(12,*) i,vertexMap(i) -enddo - -nCellsNew = 0 -do i=1,nCells -if(cellMap(i).ne.0) nCellsNew = nCellsNew + 1 -enddo - -nEdgesNew = 0 -do i=1,nEdges -if(edgeMap(i).ne.0) nEdgesNew = nEdgesNew + 1 -enddo - -nVerticesNew = 0 -do i=1,nVertices -if(vertexMap(i).ne.0) nVerticesNew = nVerticesNew + 1 -enddo - -write(6,*) ' mesh mapping found ' -write(6,*) nCells, nCellsNew -write(6,*) nEdges, nEdgesNew -write(6,*) nVertices, nVerticesNew - -allocate(indexToCellIDNew(nCellsNew)) -allocate(indexToEdgeIDNew(nEdgesNew)) -allocate(indexToVertexIDNew(nVerticesNew)) -indextoCellIDNew = 0; indexToEdgeIDNew = 0; indexToVertexIDNew = 0 - -do i=1,nCellsNew -indexToCellIDNew(i)=i -enddo - -do i=1,nEdgesNew -indexToEdgeIDNew(i)=i -enddo - -do i=1,nVerticesNew -indexToVertexIDNew(i)=i -enddo - -end subroutine define_mapping - - -subroutine map_vectors -implicit none - -allocate(xCellNew(nCellsNew)) -allocate(yCellNew(nCellsNew)) -allocate(zCellNew(nCellsNew)) -allocate(normalsNew(3,nEdgesNew)) -allocate(latCellNew(nCellsNew)) -allocate(lonCellNew(nCellsNew)) -allocate(meshDensityNew(nCellsNew)) -allocate(meshSpacingNew(nCellsNew)) -allocate(xEdgeNew(nEdgesNew)) -allocate(yEdgeNew(nEdgesNew)) -allocate(zEdgeNew(nEdgesNew)) -allocate(latEdgeNew(nEdgesNew)) -allocate(lonEdgeNew(nEdgesNew)) -allocate(xVertexNew(nVerticesNew)) -allocate(yVertexNew(nVerticesNew)) -allocate(zVertexNew(nVerticesNew)) -allocate(latVertexNew(nVerticesNew)) -allocate(lonVertexNew(nVerticesNew)) -allocate(dcEdgeNew(nEdgesNew)) -allocate(dvEdgeNew(nEdgesNew)) -allocate(angleEdgeNew(nEdgesNew)) -allocate(areaCellNew(nCellsNew)) -allocate(areaTriangleNew(nVerticesNew)) -allocate(maxLevelCellNew(nCellsNew)) - -allocate(fCellNew(nEdgesNew)) -allocate(fEdgeNew(nEdgesNew)) -allocate(fVertexNew(nVerticesNew)) -allocate(bottomDepthNew(nCellsNew)) -allocate(surfaceWindStressNew(nEdgesNew)) -allocate(surfaceWindStressNewZonal(nEdgesNew)) -allocate(surfaceWindStressNewMeridional(nEdgesNew)) -allocate(normalVelocityNew(1,nVertLevelsNew,nEdgesNew)) -allocate(layerThicknessNew(1,nVertLevelsNew,nCellsNew)) -allocate(densityNew(1,nVertLevelsNew,nCellsNew)) -allocate(temperatureNew(1,nVertLevelsNew,nCellsNew)) -allocate(salinityNew(1,nVertLevelsNew,nCellsNew)) -allocate(tracer1New(1,nVertLevelsNew,nCellsNew)) - -allocate(temperatureRestoreNew(nCellsNew)) -allocate(salinityRestoreNew(nCellsNew)) -allocate(boundaryLayerDepthNew(nCellsNew)) - -xCellNew=0; yCellNew=0; zCellNew=0; latCellNew=0; lonCellNew=0; meshDensityNew=1.0; meshSpacingNew=0.0 -xEdgeNew=0; yEdgeNew=0; zEdgeNew=0; latEdgeNew=0; lonEdgeNew=0 -xVertexNew=0; yVertexNew=0; zVertexNew=0; latVertexNew=0; lonVertexNew=0 - -fCellNew=0; fEdgeNew=0; fVertexNew=0; bottomDepthNew=0; surfaceWindStressNew = 0; surfaceWindStressNewZonal = 0.0; surfaceWindStressNewMeridional = 0.0; -normalVelocityNew=0; layerThicknessNew=0; densityNew=0 -temperatureNew=0; salinityNew=0; tracer1New=0; - -temperatureRestoreNew = 0.0 -salinityRestoreNew = 0.0 -boundaryLayerDepthNew = 0.0 - -do i=1,nCells -jNew = cellMap(i) -if(jNew.ne.0) then - xCellNew(jNew)=xCell(i) - yCellNew(jNew)=yCell(i) - zCellNew(jNew)=zCell(i) - latCellNew(jNew)=latCell(i) - lonCellNew(jNew)=lonCell(i) - meshDensityNew(jNew)=meshDensity(i) - areaCellNew(jNew)=areaCell(i) - maxLevelCellNew(jNew) = kmt(i) - bottomDepthNew(jNew) = bottomDepth(i) -endif -enddo - -do i=1,nEdges -jNew = edgeMap(i) -if(jNew.ne.0) then - xEdgeNew(jNew)=xEdge(i) - yEdgeNew(jNew)=yEdge(i) - zEdgeNew(jNew)=zEdge(i) - latEdgeNew(jNew)=latEdge(i) - lonEdgeNew(jNew)=lonEdge(i) - dcEdgeNew(jNew) = dcEdge(i) - dvEdgeNew(jNew) = dvEdge(i) - fCellNew(jNew) = fCell(i) - fEdgeNew(jNew) = fEdge(i) - angleEdgeNew(jNew) = angleEdge(i) -endif -enddo - -do i=1,nVertices -jNew = vertexMap(i) -if(jNew.ne.0) then - xVertexNew(jNew)=xVertex(i) - yVertexNew(jNew)=yVertex(i) - zVertexNew(jNew)=zVertex(i) - latVertexNew(jNew)=latVertex(i) - lonVertexNew(jNew)=lonVertex(i) - fVertexNew(jNew)=fVertex(i) - areaTriangleNew(jNew)=areaTriangle(i) -endif -enddo - -deallocate(xCell) -deallocate(yCell) -deallocate(zCell) -deallocate(latCell) -deallocate(lonCell) -deallocate(meshDensity) -deallocate(xEdge) -deallocate(yEdge) -deallocate(zEdge) -deallocate(latEdge) -deallocate(lonEdge) -deallocate(xVertex) -deallocate(yVertex) -deallocate(zVertex) -deallocate(latVertex) -deallocate(lonVertex) -deallocate(dcEdge) -deallocate(dvEdge) -!deallocate(bottomDepth) - -end subroutine map_vectors - - - -subroutine map_connectivity -implicit none - -allocate(cellsOnEdgeNew(TWONew,nEdgesNew)) -allocate(boundaryEdgeNew(nVertLevelsNew,nEdgesNew)) -allocate(flipVerticesOnEdgeOrdering(nEdgesNew)) -cellsOnEdgeNew(:,:) = 0 -boundaryEdgeNew(:,:) = 0 -flipVerticesOnEdgeOrdering(:) = 0 -do iEdge=1,nEdges -if(edgeMap(iEdge).eq.0) cycle -iEdgeNew = edgeMap(iEdge) -iCell1 = cellsOnEdge(1,iEdge) -iCell2 = cellsOnEdge(2,iEdge) -iCell1New = cellMap(iCell1) -iCell2New = cellMap(iCell2) -cellsOnEdgeNew(1,iEdgeNew) = iCell1New -cellsOnEdgeNew(2,iEdgeNew) = iCell2New -if(iCell1New.eq.0.or.iCell2New.eq.0) boundaryEdgeNew(:,iEdgeNew) = 1 -if(iCell1New.eq.0.and.iCell2New.eq.0) stop "cellsOnEdge" -if(iCell1New.eq.0) then - cellsOnEdgeNew(1,iEdgeNew) = iCell2New - cellsOnEdgeNew(2,iEdgeNew) = iCell1New - flipVerticesOnEdgeOrdering(iEdgeNew) = 1 -endif -enddo -deallocate(cellsOnEdge) - -allocate(verticesOnEdgeNew(TWONew,nEdgesNew)) -allocate(boundaryVertexNew(nVertLevelsNew,nVerticesNew)) -verticesOnEdgeNew(:,:) = 0 -boundaryVertexNew(:,:) = 0 -do iEdge=1,nEdges -if(edgeMap(iEdge).eq.0) cycle -iEdgeNew = edgeMap(iEdge) -iVertex1 = VerticesOnEdge(1,iEdge) -iVertex2 = VerticesOnEdge(2,iEdge) -iVertex1New = vertexMap(iVertex1) -iVertex2New = vertexMap(iVertex2) -if(iVertex1New.eq.0.or.iVertex2New.eq.0) stop "verticesOnEdge" -if(flipVerticesOnEdgeOrdering(iEdgeNew).eq.0) then - verticesOnEdgeNew(1,iEdgeNew) = iVertex1New - verticesOnEdgeNew(2,iEdgeNew) = iVertex2New -else - verticesOnEdgeNew(1,iEdgeNew) = iVertex2New - verticesOnEdgeNew(2,iEdgeNew) = iVertex1New -endif -if(boundaryEdgeNew(1,iEdgeNew).eq.1) then - boundaryVertexNew(:,iVertex1New)=1 - boundaryVertexNew(:,iVertex2New)=1 -endif -enddo -deallocate(verticesOnEdge) - -allocate(nEdgesOnEdgeNew(nEdgesNew)) -allocate(edgesOnEdgeNew(maxEdges2,nEdgesNew)) -allocate(weightsOnEdgeNew(maxEdges2,nEdgesNew)) -nEdgesOnEdgeNew(:) = 0 -edgesOnEdgeNew(:,:) = 0 -weightsOnEdgeNew(:,:) = 0.0 -do iEdge=1,nEdges -if(edgeMap(iEdge).eq.0) cycle -iEdgeNew = edgeMap(iEdge) -if(boundaryEdgeNew(1,iEdgeNew).eq.1) then - nEdgesOnEdgeNew(iEdgeNew) = 0 - edgesOnEdgeNew(:,iEdgeNew) = 0 - weightsOnEdgeNew(:,iEdgeNew) = 0.0 -else - nEdgesOnEdgeNew(iEdgeNew) = nEdgesOnEdge(iEdge) - do i=1,nEdgesOnEdgeNew(iEdgeNew) - jEdge = edgesOnEdge(i,iEdge) - jEdgeNew = edgeMap(jEdge) - if(jEdgeNew.eq.0) stop "jEdgeNew" - edgesOnEdgeNew(i,iEdgeNew)=jEdgeNew - weightsOnEdgeNew(i,iEdgeNew) = weightsOnEdge(i,iEdge) - enddo -endif -enddo -deallocate(nEdgesOnEdge) -deallocate(edgesOnEdge) -deallocate(weightsOnEdge) - -allocate(cellsOnCellNew(maxEdges,nCellsNew)) -allocate(nEdgesOnCellNew(nCellsNew)) -cellsOnCellNew = 0 -nEdgesOnCellNew = 0 -do iCell=1,nCells -if(cellMap(iCell).eq.0) cycle -iCellNew = cellMap(iCell) -nEdgesOnCellNew(iCellNew)=nEdgesOnCell(iCell) -do i=1,nEdgesOnCellNew(iCellNew) -j = cellsOnCell(i,iCell) -jNew = cellMap(j) -cellsOnCellNew(i,iCellNew) = jNew -enddo -enddo -deallocate(cellsOnCell) -deallocate(nEdgesOnCell) - -allocate(edgesOnCellNew(maxEdgesNew,nCellsNew)) -edgesOnCellNew(:,:) = 0 -meshSpacingNew(:) = 0.0 -do iCell=1,nCells -if(cellMap(iCell).eq.0) cycle -iCellNew = cellMap(iCell) -do i=1,nEdgesOnCellNew(iCellNew) -j = edgesOnCell(i,iCell) -jNew = edgeMap(j) -if(jNew.eq.0) stop "edgesOnCell" -edgesOnCellNew(i,iCellNew) = jNew -meshSpacingNew(iCellNew) = meshSpacingNew(iCellNew) + dcEdgeNew(jNew)/nEdgesOnCellNew(iCellNew) -enddo -enddo -deallocate(edgesOnCell) - -allocate(verticesOnCellNew(maxEdgesNew,nCellsNew)) -verticesOnCellNew(:,:)=0 -do iCell=1,nCells -if(cellMap(iCell).eq.0) cycle -iCellNew = cellMap(iCell) -do i=1,nEdgesOnCellNew(iCellNew) -j=verticesOnCell(i,iCell) -jNew = vertexMap(j) -if(jNew.eq.0) stop "verticesOnCell" -verticesOnCellNew(i,iCellNew) = jNew -enddo -enddo -deallocate(verticesOnCell) - -allocate(cellsOnVertexNew(vertexDegreeNew,nVerticesNew)) -allocate(kiteAreasOnVertexNew(vertexDegreeNew,nVerticesNew)) -cellsOnVertexNew = 0 -kiteAreasOnVertexNew = 0 -do iVertex=1,nVertices -if(vertexMap(iVertex).eq.0) cycle -iVertexNew = vertexMap(iVertex) -do i=1,vertexDegree -j=cellsOnVertex(i,iVertex) -jNew=cellMap(j) -if(jNew.eq.0) then - kiteAreasOnVertexNew(i,iVertexNew)=0 -else - kiteAreasOnVertexNew(i,iVertexNew)=kiteAreasOnVertex(i,iVertex) -endif -cellsOnVertexNew(i,iVertexNew)=jNew -enddo -enddo -deallocate(cellsOnVertex) -deallocate(kiteAreasOnVertex) - -areaTriangleNew = 0 -do iVertex=1,nVerticesNew -do i=1,vertexDegree -areaTriangleNew(iVertex) = areaTriangleNew(iVertex) + kiteAreasOnVertexNew(i,iVertex) -enddo -enddo - -allocate(edgesOnVertexNew(vertexDegreeNew, nVerticesNew)) -edgesOnVertexNew = 0 -do iVertex=1,nVertices -if(vertexMap(iVertex).eq.0) cycle -iVertexNew = vertexMap(iVertex) -do i=1,vertexDegree -j=edgesOnVertex(i,iVertex) -jNew=edgeMap(j) -edgesOnVertexNew(i,iVertexNew)=jNew -enddo -enddo -deallocate(edgesOnVertex) - -! find normals -normalsNew = 0.0 -do iEdge=1,nEdgesNew -cell1 = cellsOnEdgeNew(1,iEdge) -cell2 = cellsOnEdgeNew(2,iEdge) -if(cell1.eq.0.or.cell2.eq.0) cycle -c1(1) = xCellNew(cell1); c1(2) = yCellNew(cell1); c1(3) = zCellNew(cell1) -c2(1) = xCellNew(cell2); c2(2) = yCellNew(cell2); c2(3) = zCellNew(cell2) -distance = sqrt( (c1(1)-c2(1))**2 + (c1(2)-c2(2))**2 + (c1(3)-c2(3))**2 ) - -if(on_a_sphere.eq.'YES') then - normalsNew(1,iEdge) = c2(1) - c1(1) - normalsNew(2,iEdge) = c2(2) - c1(2) - normalsNew(3,iEdge) = c2(3) - c1(3) - distance = sqrt( (c1(1)-c2(1))**2 + (c1(2)-c2(2))**2 + (c1(3)-c2(3))**2 ) - normalsNew(:,iEdge) = normalsNew(:,iEdge) / distance -else - if(distance.gt.0.5*Lx) then - write(6,*) ' periodic edge ', iEdge, distance - write(6,10) ' c1 ', c1(:) - write(6,10) ' c2 ', c2(:) - r = c2(1) - c1(1) - if(r.gt.0) c2(1) = c2(1) - Lx - if(r.lt.0) c2(1) = c2(1) + Lx - distance = sqrt( (c1(1)-c2(1))**2 + (c1(2)-c2(2))**2 + (c1(3)-c2(3))**2 ) - write(6,*) ' periodic edge fix ', iEdge, r, distance - endif - normalsNew(1,iEdge) = c2(1) - c1(1) - normalsNew(2,iEdge) = c2(2) - c1(2) - normalsNew(3,iEdge) = c2(3) - c1(3) - distance = sqrt( (c1(1)-c2(1))**2 + (c1(2)-c2(2))**2 + (c1(3)-c2(3))**2 ) - normalsNew(:,iEdge) = normalsNew(:,iEdge) / distance -endif -enddo -10 format(a20,3e15.5) - -end subroutine map_connectivity - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! -! Step 5: Check get_dz routine for hZLevel variable -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine get_dz -integer k - -if (zLevel_thickness.eq.'equally_spaced') then - - write(6,*) ' equally spaced zLevels' - do i = 1, nVertLevelsMOD - hZLevel(i) = layer_thickness_total_max / nVertLevelsMOD - end do - -elseif(zLevel_thickness.eq.'SOMA_40_Level') then - - allocate(dz(40)) - dz( 1) = 4.6074 - dz( 2) = 5.1112 - dz( 3) = 5.6694 - dz( 4) = 6.2880 - dz( 5) = 6.9732 - dz( 6) = 7.7322 - dz( 7) = 8.5730 - dz( 8) = 9.5041 - dz( 9) = 10.5353 - dz(10) = 11.6770 - dz(11) = 12.9411 - dz(12) = 14.3404 - dz(13) = 15.8894 - dz(14) = 17.6038 - dz(15) = 19.5011 - dz(16) = 21.6005 - dz(17) = 23.9235 - dz(18) = 26.4934 - dz(19) = 29.3363 - dz(20) = 32.4807 - dz(21) = 35.9585 - dz(22) = 39.8044 - dz(23) = 44.0570 - dz(24) = 48.7588 - dz(25) = 53.9568 - dz(26) = 59.7026 - dz(27) = 66.0534 - dz(28) = 73.0722 - dz(29) = 80.8284 - dz(30) = 89.3985 - dz(31) = 98.8672 - dz(32) = 109.3274 - dz(33) = 120.8818 - dz(34) = 133.6436 - dz(35) = 147.7376 - dz(36) = 163.3011 - dz(37) = 180.4858 - dz(38) = 199.4585 - dz(39) = 220.4032 - dz(40) = 243.5224 - - hZLevel = dz - -elseif(zLevel_thickness.eq.'POP_40_zLevel') then - - allocate(dz(40)) - dz( 1) = 1001.244 ! 5.006218 10.01244 - dz( 2) = 1011.258 ! 15.06873 20.12502 - dz( 3) = 1031.682 ! 25.28342 30.44183 - dz( 4) = 1063.330 ! 35.75848 41.07513 - dz( 5) = 1107.512 ! 46.61269 52.15025 - dz( 6) = 1166.145 ! 57.98098 63.81171 - dz( 7) = 1241.928 ! 70.02135 76.23099 - dz( 8) = 1338.612 ! 82.92405 89.61711 - dz( 9) = 1461.401 ! 96.92412 104.2311 - dz(10) = 1617.561 ! 112.3189 120.4067 - dz(11) = 1817.368 ! 129.4936 138.5804 - dz(12) = 2075.558 ! 148.9582 159.3360 - dz(13) = 2413.680 ! 171.4044 183.4728 - dz(14) = 2863.821 ! 197.7919 212.1110 - dz(15) = 3474.644 ! 229.4842 246.8575 - dz(16) = 4320.857 ! 268.4617 290.0660 - dz(17) = 5516.812 ! 317.6501 345.2342 - dz(18) = 7230.458 ! 381.3865 417.5388 - dz(19) = 9674.901 ! 465.9133 514.2878 - dz(20) = 13003.92 ! 579.3074 644.3270 - dz(21) = 17004.89 ! 729.3514 814.3759 - dz(22) = 20799.33 ! 918.3725 1022.369 - dz(23) = 23356.94 ! 1139.154 1255.939 - dz(24) = 24527.19 ! 1378.574 1501.210 - dz(25) = 24898.04 ! 1625.701 1750.191 - dz(26) = 24983.22 ! 1875.107 2000.023 - dz(27) = 24997.87 ! 2125.012 2250.002 - dz(28) = 24999.79 ! 2375.000 2500.000 - dz(29) = 24999.98 ! 2625.000 2749.999 - dz(30) = 25000.00 ! 2874.999 2999.999 - dz(31) = 25000.00 ! 3124.999 3249.999 - dz(32) = 25000.00 ! 3374.999 3499.999 - dz(33) = 25000.00 ! 3624.999 3749.999 - dz(34) = 25000.00 ! 3874.999 3999.999 - dz(35) = 25000.00 ! 4124.999 4249.999 - dz(36) = 25000.00 ! 4374.999 4499.999 - dz(37) = 25000.00 ! 4624.999 4749.999 - dz(38) = 25000.00 ! 4874.999 4999.999 - dz(39) = 25000.00 ! 5124.999 5249.999 - dz(40) = 25000.00 ! 5374.999 5499.999 - - dz = dz / 100.0 - - hZLevel = dz - -elseif(zLevel_thickness.eq.'scaled_100Level') then - - allocate(dz(100)) - dz( 1) = 0.75707E-03 - dz( 2) = 0.76186E-03 - dz( 3) = 0.76675E-03 - dz( 4) = 0.77174E-03 - dz( 5) = 0.77685E-03 - dz( 6) = 0.78207E-03 - dz( 7) = 0.78740E-03 - dz( 8) = 0.79286E-03 - dz( 9) = 0.79844E-03 - dz( 10) = 0.80416E-03 - dz( 11) = 0.81001E-03 - dz( 12) = 0.81600E-03 - dz( 13) = 0.82215E-03 - dz( 14) = 0.82844E-03 - dz( 15) = 0.83490E-03 - dz( 16) = 0.84152E-03 - dz( 17) = 0.84832E-03 - dz( 18) = 0.85529E-03 - dz( 19) = 0.86246E-03 - dz( 20) = 0.86983E-03 - dz( 21) = 0.87740E-03 - dz( 22) = 0.88519E-03 - dz( 23) = 0.89320E-03 - dz( 24) = 0.90145E-03 - dz( 25) = 0.90996E-03 - dz( 26) = 0.91872E-03 - dz( 27) = 0.92776E-03 - dz( 28) = 0.93709E-03 - dz( 29) = 0.94672E-03 - dz( 30) = 0.95667E-03 - dz( 31) = 0.96696E-03 - dz( 32) = 0.97761E-03 - dz( 33) = 0.98864E-03 - dz( 34) = 0.10001E-02 - dz( 35) = 0.10119E-02 - dz( 36) = 0.10242E-02 - dz( 37) = 0.10370E-02 - dz( 38) = 0.10503E-02 - dz( 39) = 0.10642E-02 - dz( 40) = 0.10786E-02 - dz( 41) = 0.10936E-02 - dz( 42) = 0.11093E-02 - dz( 43) = 0.11258E-02 - dz( 44) = 0.11430E-02 - dz( 45) = 0.11610E-02 - dz( 46) = 0.11799E-02 - dz( 47) = 0.11999E-02 - dz( 48) = 0.12208E-02 - dz( 49) = 0.12430E-02 - dz( 50) = 0.12663E-02 - dz( 51) = 0.12911E-02 - dz( 52) = 0.13174E-02 - dz( 53) = 0.13454E-02 - dz( 54) = 0.13752E-02 - dz( 55) = 0.14071E-02 - dz( 56) = 0.14413E-02 - dz( 57) = 0.14781E-02 - dz( 58) = 0.15178E-02 - dz( 59) = 0.15609E-02 - dz( 60) = 0.16077E-02 - dz( 61) = 0.16590E-02 - dz( 62) = 0.17154E-02 - dz( 63) = 0.17778E-02 - dz( 64) = 0.18472E-02 - dz( 65) = 0.19252E-02 - dz( 66) = 0.20136E-02 - dz( 67) = 0.21149E-02 - dz( 68) = 0.22323E-02 - dz( 69) = 0.23706E-02 - dz( 70) = 0.25367E-02 - dz( 71) = 0.27410E-02 - dz( 72) = 0.30005E-02 - dz( 73) = 0.33448E-02 - dz( 74) = 0.38321E-02 - dz( 75) = 0.45991E-02 - dz( 76) = 0.60952E-02 - dz( 77) = 0.14875E-01 - dz( 78) = 0.23276E-01 - dz( 79) = 0.24181E-01 - dz( 80) = 0.24731E-01 - dz( 81) = 0.25319E-01 - dz( 82) = 0.25952E-01 - dz( 83) = 0.26635E-01 - dz( 84) = 0.27375E-01 - dz( 85) = 0.28180E-01 - dz( 86) = 0.29061E-01 - dz( 87) = 0.30030E-01 - dz( 88) = 0.31103E-01 - dz( 89) = 0.32300E-01 - dz( 90) = 0.33647E-01 - dz( 91) = 0.35178E-01 - dz( 92) = 0.36940E-01 - dz( 93) = 0.38998E-01 - dz( 94) = 0.41445E-01 - dz( 95) = 0.44422E-01 - dz( 96) = 0.48154E-01 - dz( 97) = 0.53032E-01 - dz( 98) = 0.59808E-01 - dz( 99) = 0.70207E-01 - dz(100) = 0.89718E-01 - - hZLevel = dz * layer_thickness_total_max - -elseif(zLevel_thickness.eq.'POP_60_zLevel') then - - allocate(dz(60)) - dz( 1) = 5.00 - dz( 2) = 10.00 - dz( 3) = 10.00 - dz( 4) = 10.00 - dz( 5) = 10.00 - dz( 6) = 10.00 - dz( 7) = 10.00 - dz( 8) = 10.00 - dz( 9) = 10.00 - dz(10) = 10.00 - dz(11) = 10.00 - dz(12) = 10.00 - dz(13) = 10.00 - dz(14) = 10.00 - dz(15) = 10.00 - dz(16) = 10.00 - dz(17) = 10.10 - dz(18) = 10.38 - dz(19) = 10.81 - dz(20) = 11.37 - dz(21) = 12.05 - dz(22) = 12.87 - dz(23) = 13.83 - dz(24) = 14.96 - dz(25) = 16.28 - dz(26) = 17.83 - dz(27) = 19.64 - dz(28) = 21.75 - dz(29) = 24.23 - dz(30) = 27.13 - dz(31) = 30.56 - dz(32) = 34.59 - dz(33) = 39.36 - dz(34) = 44.99 - dz(35) = 51.64 - dz(36) = 59.49 - dz(37) = 68.70 - dz(38) = 79.44 - dz(39) = 91.82 - dz(40) = 105.88 - dz(41) = 121.50 - dz(42) = 138.36 - dz(43) = 155.93 - dz(44) = 173.45 - dz(45) = 190.06 - dz(46) = 204.94 - dz(47) = 217.51 - dz(48) = 227.51 - dz(49) = 235.03 - dz(50) = 240.38 - dz(51) = 244.02 - dz(52) = 246.39 - dz(53) = 247.88 - dz(54) = 248.78 - dz(55) = 249.32 - dz(56) = 249.62 - dz(57) = 249.80 - dz(58) = 249.89 - dz(59) = 249.94 - dz(60) = 249.97 - - hZLevel = dz - -elseif(zLevel_thickness.eq.'isopycnal_3layer') then - - allocate(dz(3)) - dz( 1) = 500.0 - dz( 2) = 1250.0 - dz( 3) = 3250.0 - - hZLevel = dz - -else - - print *, ' Incorrect choice of zLevel_thickness: ',zLevel_thickness - stop - -endif - - refBottomDepth(1) = hZLevel(1) - do k = 2,nVertLevelsMod - refBottomDepth(k) = refBottomDepth(k-1) + hZLevel(k) - end do - - write(6,*) ' k hZLevel refBottomDepth' - do k=1,nVertLevelsMod - write(6,'(i5,2f10.2)') k,hZLevel(k), refBottomDepth(k) - enddo - write(6,*) - -end subroutine get_dz - -end program map_to_basin diff --git a/grid_gen/basin/src/module_cullLoops.F b/grid_gen/basin/src/module_cullLoops.F deleted file mode 100644 index e070a1ce2..000000000 --- a/grid_gen/basin/src/module_cullLoops.F +++ /dev/null @@ -1,84 +0,0 @@ -module cullLoops - - public :: eliminateLoops - - contains - - subroutine eliminateLoops(nCells,nEdges,nVertices,maxEdges,vertexDegree, & - nEdgesOnCell, cellsOnCell, verticesOnEdge, cellsOnVertex, edgesOnCell, lonCell, latCell, & - xCell, yCell, zCell, xEdge, yEdge, zEdge, xVertex, yVertex, zVertex, & - KMT) - - implicit none - - ! intent (in) - integer :: nCells, nEdges, nVertices, maxEdges, vertexDegree - integer :: nEdgesOnCell(nCells), cellsOnCell(maxEdges,nCells), verticesOnEdge(2,nEdges) - integer :: cellsOnVertex(vertexDegree,nVertices), edgesOnCell(maxEdges,nCells) - real :: lonCell(nCells), latCell(nCells) - real :: xCell(nCells), yCell(nCells), zCell(nCells) - real :: xEdge(nEdges), yEdge(nEdges), zEdge(nEdges) - real :: xVertex(nVertices), yVertex(nVertices), zVertex(nVertices) - integer :: edgeList(nEdges), iCellMask(nCells) - - ! intent(inout) - integer, intent(inout) :: KMT(ncells) - - ! local workspace - integer :: iCell, jCell, oCell, lCell, iEdge, i, kCell, iSharedEdge, iStartEdge, iSave, iSweep - integer :: iEdgeCounter, nEdgesInLoop(nCells), iCellAhead, LeftTurns, RightTurns - logical :: connected, atBoundary, moveSouth, moveEast, atGrenwich - real :: lat, rlat, rlon, rCenter(3), s(3), t(3), q(3), rCross, mylon, mylat, pi - - integer, dimension(:), pointer :: cellStack - integer, dimension(:), pointer :: oceanMask - integer :: iCellStart, nStack, addedCells - real :: latStart, lonStart - - write(6,*) 'Culling inland seas.....' - - allocate(cellStack(nCells/2)) - allocate(oceanMask(nCells)) - - oceanMask = 0 - addedCells = 0 - - iCellStart = maxloc(kmt, dim=1) - - write(6,*) 'Starting index. ', iCellStart - write(6,*) 'lat, lon: ', latCell(iCellStart), lonCell(iCellStart) - write(6,*) 'Starting kmt: ', kmt(iCellStart) - - nStack = 1 - cellStack(nStack) = iCellStart - oceanMask(iCellStart) = 1 - addedCells = 1 - - do while(nStack > 0) - oCell = cellStack(nStack) - nStack = nStack - 1 - !write(6,*) ' Working on cell ', oCell, addedCells, nStack - - do i = 1, nEdgesOnCell(oCell) - iCell = cellsOnCell(i, oCell) - - if(kmt(iCell) > 0 .and. oceanMask(iCell) == 0) then - nStack = nStack + 1 - cellStack(nStack) = iCell - oceanMask(iCell) = 1 - addedCells = addedCells + 1 - end if - end do - end do - - where(oceanMask == 0) kmt(:) = 0 - - write(6,*) addedCells, ' total cells have been in the stack.' - write(6,*) 'Done culling inland seas.....' - - deallocate(cellStack) - deallocate(oceanMask) - - end subroutine eliminateLoops - -end module cullLoops diff --git a/grid_gen/basin/src/module_read_TS.F b/grid_gen/basin/src/module_read_TS.F deleted file mode 100644 index 10bc4350f..000000000 --- a/grid_gen/basin/src/module_read_TS.F +++ /dev/null @@ -1,143 +0,0 @@ -module read_TS - - integer :: rd_ncid, rd_ncids - integer :: rdDimIDt_lon - integer :: rdDimIDt_lat - integer :: rdDimIDdepth_t - integer :: rdVarIDt_lon - integer :: rdVarIDt_lat - integer :: rdVarIDdepth_t - integer :: rdVarIDTEMP - integer :: rdVarIDSALT - - integer :: rdLocalt_lon - integer :: rdLocalt_lat - integer :: rdLocaldepth_t - - contains - - subroutine read_TS_init(nx, ny, nz, fileNameT, fileNameS) - - implicit none - - include 'netcdf.inc' - - character(len=80), intent(in) :: fileNameT, fileNameS - integer, intent(out) :: nx, ny, nz - - integer :: nferr, nferrs, nferru - - nferr = nf_open(fileNameT, NF_SHARE, rd_ncid) - write(6,*) ' nferr ', nferr, rd_ncid - - ! - ! Get IDs for variable dimensions - ! - nferr = nf_inq_dimid(rd_ncid, 't_lon', rdDimIDt_lon) - write(6,*) ' nferr ', nferr, rdDimIDt_lon - nferr = nf_inq_dimlen(rd_ncid, rdDimIDt_lon, rdLocalt_lon) - write(6,*) ' nferr ', nferr, rdLocalt_lon - nferr = nf_inq_dimid(rd_ncid, 't_lat', rdDimIDt_lat) - write(6,*) ' nferr ', nferr, rdDimIDt_lat - nferr = nf_inq_dimlen(rd_ncid, rdDimIDt_lat, rdLocalt_lat) - write(6,*) ' nferr ', nferr, rdLocalt_lat - nferr = nf_inq_dimid(rd_ncid, 'depth_t', rdDimIDdepth_t) - write(6,*) ' nferr ', nferr, rdDimIDdepth_t - nferr = nf_inq_dimlen(rd_ncid, rdDimIDdepth_t, rdLocaldepth_t) - write(6,*) ' nferr ', nferr, rdLocaldepth_t - - nx = rdLocalt_lon - ny = rdLocalt_lat - nz = rdLocaldepth_t - - write(6,*) nx, ny, nz - - ! - ! Get IDs for variables - ! - nferr = nf_inq_varid(rd_ncid, 't_lon', rdVarIDt_lon) - write(6,*) ' nferr ', nferr, rdVarIDt_lon - nferr = nf_inq_varid(rd_ncid, 't_lat', rdVarIDt_lat) - write(6,*) ' nferr ', nferr, rdVarIDt_lat - nferr = nf_inq_varid(rd_ncid, 'depth_t', rdVarIDdepth_t) - write(6,*) ' nferr ', nferr, rdVarIDdepth_t - nferr = nf_inq_varid(rd_ncid, 'TEMP', rdVarIDTEMP) - write(6,*) ' nferr ', nferr, rdVarIDTEMP - - nferrs = nf_open(fileNameS, NF_SHARE, rd_ncids) - nferrs = nf_inq_varid(rd_ncids, 'SALT', rdVarIDSALT) - write(6,*) ' nferrs ', nferrs, rdVarIDSALT - - end subroutine read_TS_init - - subroutine read_TS_fields(t_lon, t_lat, depth_t, TEMP, SALT) - - implicit none - - include 'netcdf.inc' - - real (kind=4), dimension(:), intent(out) :: t_lon, t_lat, depth_t - real (kind=4), dimension(:,:,:), intent(out) :: TEMP, SALT - - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - integer :: nferr, nferrs, nferru - - start1(1) = 1 - count1(1) = rdLocalt_lon - nferr = nf_get_vara_real(rd_ncid, rdVarIDt_lon, start1, count1, t_lon) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDt_lon - - start1(1) = 1 - count1(1) = rdLocalt_lat - nferr = nf_get_vara_real(rd_ncid, rdVarIDt_lat, start1, count1, t_lat) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDt_lat - - start1(1) = 1 - count1(1) = rdLocaldepth_t - nferr = nf_get_vara_real(rd_ncid, rdVarIDdepth_t, start1, count1, depth_t) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDdepth_t - - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - count3(1) = rdLocalt_lon - count3(2) = rdLocalt_lat - count3(3) = rdLocaldepth_t - nferr = nf_get_vara_real(rd_ncid, rdVarIDTEMP, start3, count3, TEMP) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDTEMP - write(6,*) ' temperature' , minval(TEMP), maxval(TEMP) - - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - count3(1) = rdLocalt_lon - count3(2) = rdLocalt_lat - count3(3) = rdLocaldepth_t - nferrs = nf_get_vara_real(rd_ncids, rdVarIDSALT, start3, count3, SALT) - write(6,*) ' nferrs ', nferrs, rd_ncids, rdVarIDSALT - write(6,*) ' salinity' , minval(SALT), maxval(SALT) - - end subroutine read_TS_fields - - - subroutine read_TS_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr, nferrs - - nferr = nf_close(rd_ncid) - write(6,*) ' nferr ', nferr - - nferrs = nf_close(rd_ncids) - write(6,*) ' nferrs ', nferrs - - end subroutine read_TS_finalize - -end module read_TS diff --git a/grid_gen/basin/src/module_read_U.F b/grid_gen/basin/src/module_read_U.F deleted file mode 100644 index b794e5ae2..000000000 --- a/grid_gen/basin/src/module_read_U.F +++ /dev/null @@ -1,130 +0,0 @@ -module read_U - - integer :: rd_ncidu - integer :: rdDimIDu_lon - integer :: rdDimIDu_lat - integer :: rdDimIDdepth_t - integer :: rdVarIDu_lon - integer :: rdVarIDu_lat - integer :: rdVarIDdepth_t - integer :: rdVarIDTAUX - integer :: rdVarIDTAUY - - integer :: rdLocalu_lon - integer :: rdLocalu_lat - integer :: rdLocaldepth_t - - contains - - subroutine read_U_init(nx, ny, nz, fileNameU) - - implicit none - - include 'netcdf.inc' - - character(len=80), intent(in) :: fileNameU - integer, intent(out) :: nx, ny, nz - - integer :: nferru - - nferru = nf_open(fileNameU, NF_SHARE, rd_ncidu) - write(6,*) ' nferru ', nferru, rd_ncidu - - ! - ! Get IDs for variable dimensions - ! - nferru = nf_inq_dimid(rd_ncidu, 'u_lon', rdDimIDu_lon) - write(6,*) ' nferru ', nferru, rdDimIDu_lon - nferru = nf_inq_dimlen(rd_ncidu, rdDimIDu_lon, rdLocalu_lon) - write(6,*) ' nferru ', nferru, rdLocalu_lon - nferru = nf_inq_dimid(rd_ncidu, 'u_lat', rdDimIDu_lat) - write(6,*) ' nferru ', nferru, rdDimIDu_lat - nferru = nf_inq_dimlen(rd_ncidu, rdDimIDu_lat, rdLocalu_lat) - write(6,*) ' nferru ', nferru, rdLocalu_lat - nferru = nf_inq_dimid(rd_ncidu, 'depth_t', rdDimIDdepth_t) - write(6,*) ' nferru ', nferru, rdDimIDdepth_t - nferru = nf_inq_dimlen(rd_ncidu, rdDimIDdepth_t, rdLocaldepth_t) - write(6,*) ' nferru ', nferru, rdLocaldepth_t - - nx = rdLocalu_lon - ny = rdLocalu_lat - nz = rdLocaldepth_t - - write(6,*) nx, ny, nz - - ! - ! Get IDs for variables - ! - nferru = nf_inq_varid(rd_ncidu, 'u_lon', rdVarIDu_lon) - write(6,*) ' nferru ', nferru, rdVarIDu_lon - nferru = nf_inq_varid(rd_ncidu, 'u_lat', rdVarIDu_lat) - write(6,*) ' nferru ', nferru, rdVarIDu_lat - nferru = nf_inq_varid(rd_ncidu, 'depth_t', rdVarIDdepth_t) - write(6,*) ' nferru ', nferru, rdVarIDdepth_t - - nferru = nf_inq_varid(rd_ncidu, 'TAUX', rdVarIDTAUX) - nferru = nf_inq_varid(rd_ncidu, 'TAUY', rdVarIDTAUY) - write(6,*) ' nferru ', nferru, rdVarIDTAUX, rdVarIDTAUY - - end subroutine read_U_init - - subroutine read_U_fields(u_lon, u_lat, depth_t, TAUX, TAUY) - - implicit none - - include 'netcdf.inc' - - real (kind=4), dimension(:), intent(out) :: u_lon, u_lat, depth_t - real (kind=4), dimension(:,:), intent(out) :: TAUX, TAUY - - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - integer :: nferru - - start1(1) = 1 - count1(1) = rdLocalu_lon - nferru = nf_get_vara_real(rd_ncidu, rdVarIDu_lon, start1, count1, u_lon) - write(6,*) ' nferru ', nferru, rd_ncidu, rdVarIDu_lon - - start1(1) = 1 - count1(1) = rdLocalu_lat - nferru = nf_get_vara_real(rd_ncidu, rdVarIDu_lat, start1, count1, u_lat) - write(6,*) ' nferru ', nferru, rd_ncidu, rdVarIDu_lat - - start1(1) = 1 - count1(1) = rdLocaldepth_t - nferru = nf_get_vara_real(rd_ncidu, rdVarIDdepth_t, start1, count1, depth_t) - write(6,*) ' nferru ', nferru, rd_ncidu, rdVarIDdepth_t - - start2(1) = 1 - start2(2) = 1 - count2(1) = rdLocalu_lon - count2(2) = rdLocalu_lat - nferru = nf_get_vara_real(rd_ncidu, rdVarIDTAUX, start2, count2, TAUX) - nferru = nf_get_vara_real(rd_ncidu, rdVarIDTAUY, start2, count2, TAUY) - write(6,*) ' nferru ', nferru, rd_ncidu, rdVarIDTAUX, rdVarIDTAUY - write(6,*) ' TAUX' , minval(TAUX), maxval(TAUX) - write(6,*) ' TAUY' , minval(TAUY), maxval(TAUY) - - - end subroutine read_U_fields - - - subroutine read_U_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferru - - nferru = nf_close(rd_ncidu) - write(6,*) ' nferru ', nferru - - - end subroutine read_U_finalize - -end module read_U diff --git a/grid_gen/basin/src/module_read_monthly.F b/grid_gen/basin/src/module_read_monthly.F deleted file mode 100644 index dbb962125..000000000 --- a/grid_gen/basin/src/module_read_monthly.F +++ /dev/null @@ -1,154 +0,0 @@ -module read_MONTHLY - - private - - public :: read_MONTHLY_init, read_MONTHLY_fields, read_MONTHLY_finalize - - integer :: rd_ncid, rd_ncids, rd_ncidu - integer :: rdDimIDt_lon - integer :: rdDimIDt_lat - integer :: rdDimIDdepth_t - integer :: rdVarIDt_lon - integer :: rdVarIDt_lat - integer :: rdVarIDdepth_t - integer :: rdVarIDTEMP - integer :: rdVarIDSALT - integer :: rdVarIDTAUX - integer :: rdVarIDTAUY - - integer :: rdLocalt_lon - integer :: rdLocalt_lat - integer :: rdLocaldepth_t - - contains - - subroutine read_MONTHLY_init(nx, ny, nz, fileNameT, fileNameS, fileNameU) - - implicit none - - include 'netcdf.inc' - - character(len=80), intent(in) :: fileNameT, fileNameS, fileNameU - integer, intent(out) :: nx, ny, nz - - integer :: nferr, nferrs, nferru - - nferr = nf_open(fileNameT, NF_SHARE, rd_ncid) - write(6,*) ' nferr ', nferr, rd_ncid - - write(6,*) fileNameT - write(6,*) fileNameS - write(6,*) fileNameU - - ! - ! Get IDs for variable dimensions - ! - nferr = nf_inq_dimid(rd_ncid, 't_lon', rdDimIDt_lon) - write(6,*) ' nferr ', nferr, rdDimIDt_lon - nferr = nf_inq_dimlen(rd_ncid, rdDimIDt_lon, rdLocalt_lon) - write(6,*) ' nferr ', nferr, rdLocalt_lon - nferr = nf_inq_dimid(rd_ncid, 't_lat', rdDimIDt_lat) - write(6,*) ' nferr ', nferr, rdDimIDt_lat - nferr = nf_inq_dimlen(rd_ncid, rdDimIDt_lat, rdLocalt_lat) - write(6,*) ' nferr ', nferr, rdLocalt_lat - nferr = nf_inq_dimid(rd_ncid, 'depth_t', rdDimIDdepth_t) - write(6,*) ' nferr ', nferr, rdDimIDdepth_t - nferr = nf_inq_dimlen(rd_ncid, rdDimIDdepth_t, rdLocaldepth_t) - write(6,*) ' nferr ', nferr, rdLocaldepth_t - - nx = rdLocalt_lon - ny = rdLocalt_lat - nz = rdLocaldepth_t - - write(6,*) nx, ny, nz - - ! - ! Get IDs for variables - ! - nferr = nf_inq_varid(rd_ncid, 't_lon', rdVarIDt_lon) - write(6,*) ' nferr long ', nferr, rdVarIDt_lon - nferr = nf_inq_varid(rd_ncid, 't_lat', rdVarIDt_lat) - write(6,*) ' nferr lat ', nferr, rdVarIDt_lat - nferr = nf_inq_varid(rd_ncid, 'depth_t', rdVarIDdepth_t) - write(6,*) ' nferr depth ', nferr, rdVarIDdepth_t - nferr = nf_inq_varid(rd_ncid, 'TEMP', rdVarIDTEMP) - write(6,*) ' nferr TEMP ', nferr, rdVarIDTEMP - - nferrs = nf_open(fileNameS, NF_SHARE, rd_ncids) - nferrs = nf_inq_varid(rd_ncids, 'SALT', rdVarIDSALT) - write(6,*) ' nferrs SALT ', nferrs, rdVarIDSALT - - nferru = nf_open(fileNameU, NF_SHARE, rd_ncidu) - nferru = nf_inq_varid(rd_ncidu, 'TAUX', rdVarIDTAUX) - nferru = nf_inq_varid(rd_ncidu, 'TAUY', rdVarIDTAUY) - write(6,*) ' nferru ', nferru, rdVarIDTAUX, rdVarIDTAUY - - end subroutine read_MONTHLY_init - - subroutine read_MONTHLY_fields(TEMP, SALT, TAUX, TAUY) - - implicit none - - include 'netcdf.inc' - - real (kind=4), dimension(:,:), intent(out) :: TEMP, SALT - real (kind=4), dimension(:,:), intent(out) :: TAUX, TAUY - - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - integer :: nferr, nferrs, nferru - - start2(1) = 1 - start2(2) = 1 - count2(1) = rdLocalt_lon - count2(2) = rdLocalt_lat - nferr = nf_get_vara_real(rd_ncid, rdVarIDTEMP, start2, count2, TEMP) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDTEMP - write(6,*) ' temperature' , minval(TEMP), maxval(TEMP) - - start2(1) = 1 - start2(2) = 1 - count2(1) = rdLocalt_lon - count2(2) = rdLocalt_lat - nferrs = nf_get_vara_real(rd_ncids, rdVarIDSALT, start2, count2, SALT) - write(6,*) ' nferrs ', nferrs, rd_ncids, rdVarIDSALT - write(6,*) ' salinity' , minval(SALT), maxval(SALT) - - start2(1) = 1 - start2(2) = 1 - count2(1) = rdLocalt_lon - count2(2) = rdLocalt_lat - nferru = nf_get_vara_real(rd_ncidu, rdVarIDTAUX, start2, count2, TAUX) - nferru = nf_get_vara_real(rd_ncidu, rdVarIDTAUY, start2, count2, TAUY) - write(6,*) ' nferru ', nferru, rd_ncidu, rdVarIDTAUX, rdVarIDTAUY - write(6,*) ' TAUX' , minval(TAUX), maxval(TAUX) - write(6,*) ' TAUY' , minval(TAUY), maxval(TAUY) - - - end subroutine read_MONTHLY_fields - - - subroutine read_MONTHLY_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr, nferrs, nferru - - nferr = nf_close(rd_ncid) - write(6,*) ' nferr ', nferr - - nferrs = nf_close(rd_ncids) - write(6,*) ' nferrs ', nferrs - - nferru = nf_close(rd_ncidu) - write(6,*) ' nferru ', nferru - - - end subroutine read_MONTHLY_finalize - -end module read_MONTHLY diff --git a/grid_gen/basin/src/module_read_netcdf.F b/grid_gen/basin/src/module_read_netcdf.F deleted file mode 100644 index b4072c3e5..000000000 --- a/grid_gen/basin/src/module_read_netcdf.F +++ /dev/null @@ -1,523 +0,0 @@ -module read_netcdf - - integer :: rd_ncid - integer :: rdDimIDTime - integer :: rdDimIDnCells - integer :: rdDimIDnEdges - integer :: rdDimIDnVertices - integer :: rdDimIDmaxEdges - integer :: rdDimIDmaxEdges2 - integer :: rdDimIDnVertLevels - integer :: rdDimIDTWO - integer :: rdDimIDvertexDegree - integer :: rdVarIDlatCell - integer :: rdVarIDlonCell - integer :: rdVarIDmeshDensity - integer :: rdVarIDxCell - integer :: rdVarIDyCell - integer :: rdVarIDzCell - integer :: rdVarIDindexToCellID - integer :: rdVarIDlatEdge - integer :: rdVarIDlonEdge - integer :: rdVarIDxEdge - integer :: rdVarIDyEdge - integer :: rdVarIDzEdge - integer :: rdVarIDindexToEdgeID - integer :: rdVarIDlatVertex - integer :: rdVarIDlonVertex - integer :: rdVarIDxVertex - integer :: rdVarIDyVertex - integer :: rdVarIDzVertex - integer :: rdVarIDindexToVertexID - integer :: rdVarIDcellsOnEdge - integer :: rdVarIDnEdgesOnCell - integer :: rdVarIDnEdgesOnEdge - integer :: rdVarIDedgesOnCell - integer :: rdVarIDedgesOnEdge - integer :: rdVarIDweightsOnEdge - integer :: rdVarIDdvEdge - integer :: rdVarIDdcEdge - integer :: rdVarIDangleEdge - integer :: rdVarIDareaCell - integer :: rdVarIDareaTriangle - integer :: rdVarIDcellsOnCell - integer :: rdVarIDverticesOnCell - integer :: rdVarIDverticesOnEdge - integer :: rdVarIDedgesOnVertex - integer :: rdVarIDcellsOnVertex - integer :: rdVarIDkiteAreasOnVertex - integer :: rdVarIDfEdge - integer :: rdVarIDfVertex - integer :: rdVarIDbottomDepth - integer :: rdVarIDnormalVelocity - integer :: rdVarIDtangentialVelocity - integer :: rdVarIDlayerThickness - - integer :: rdLocalnCells - integer :: rdLocalnEdges - integer :: rdLocalnVertices - integer :: rdLocalmaxEdges - integer :: rdLocalmaxEdges2 - integer :: rdLocalnVertLevels - integer :: rdLocalTWO - integer :: rdLocalvertexDegree - - contains - - subroutine read_netcdf_init( & - nCells, & - nEdges, & - nVertices, & - maxEdges, & - maxEdges2, & - nVertLevels, & - TWO, & - vertexDegree & - ) - - implicit none - - include 'netcdf.inc' - - integer, intent(out) :: nCells - integer, intent(out) :: nEdges - integer, intent(out) :: nVertices - integer, intent(out) :: maxEdges - integer, intent(out) :: maxEdges2 - integer, intent(out) :: nVertLevels - integer, intent(out) :: TWO - integer, intent(out) :: vertexDegree - - integer :: nferr - - - nferr = nf_open('grid.nc', NF_SHARE, rd_ncid) - - ! - ! Get IDs for variable dimensions - ! - nferr = nf_inq_unlimdim(rd_ncid, rdDimIDTime) - nferr = nf_inq_dimid(rd_ncid, 'nCells', rdDimIDnCells) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDnCells, rdLocalnCells) - nferr = nf_inq_dimid(rd_ncid, 'nEdges', rdDimIDnEdges) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDnEdges, rdLocalnEdges) - nferr = nf_inq_dimid(rd_ncid, 'nVertices', rdDimIDnVertices) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDnVertices, rdLocalnVertices) - nferr = nf_inq_dimid(rd_ncid, 'maxEdges', rdDimIDmaxEdges) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDmaxEdges, rdLocalmaxEdges) - nferr = nf_inq_dimid(rd_ncid, 'maxEdges2', rdDimIDmaxEdges2) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDmaxEdges2, rdLocalmaxEdges2) - nferr = nf_inq_dimid(rd_ncid, 'nVertLevels', rdDimIDnVertLevels) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDnVertLevels, rdLocalnVertLevels) - nferr = nf_inq_dimid(rd_ncid, 'vertexDegree', rdDimIDvertexDegree) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDvertexDegree, rdLocalvertexDegree) - nferr = nf_inq_dimid(rd_ncid, 'TWO', rdDimIDTWO) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDTWO, rdLocalTWO) - - - nCells = rdLocalnCells - nEdges = rdLocalnEdges - nVertices = rdLocalnVertices - maxEdges = rdLocalmaxEdges - maxEdges2 = rdLocalmaxEdges2 - nVertLevels = rdLocalnVertLevels - vertexDegree = rdLocalvertexDegree - TWO = rdLocalTWO - - ! - ! Get IDs for variables - ! - nferr = nf_inq_varid(rd_ncid, 'latCell', rdVarIDlatCell) - nferr = nf_inq_varid(rd_ncid, 'lonCell', rdVarIDlonCell) - nferr = nf_inq_varid(rd_ncid, 'meshDensity', rdVarIDmeshDensity) - nferr = nf_inq_varid(rd_ncid, 'xCell', rdVarIDxCell) - nferr = nf_inq_varid(rd_ncid, 'yCell', rdVarIDyCell) - nferr = nf_inq_varid(rd_ncid, 'zCell', rdVarIDzCell) - nferr = nf_inq_varid(rd_ncid, 'indexToCellID', rdVarIDindexToCellID) - nferr = nf_inq_varid(rd_ncid, 'latEdge', rdVarIDlatEdge) - nferr = nf_inq_varid(rd_ncid, 'lonEdge', rdVarIDlonEdge) - nferr = nf_inq_varid(rd_ncid, 'xEdge', rdVarIDxEdge) - nferr = nf_inq_varid(rd_ncid, 'yEdge', rdVarIDyEdge) - nferr = nf_inq_varid(rd_ncid, 'zEdge', rdVarIDzEdge) - nferr = nf_inq_varid(rd_ncid, 'indexToEdgeID', rdVarIDindexToEdgeID) - nferr = nf_inq_varid(rd_ncid, 'latVertex', rdVarIDlatVertex) - nferr = nf_inq_varid(rd_ncid, 'lonVertex', rdVarIDlonVertex) - nferr = nf_inq_varid(rd_ncid, 'xVertex', rdVarIDxVertex) - nferr = nf_inq_varid(rd_ncid, 'yVertex', rdVarIDyVertex) - nferr = nf_inq_varid(rd_ncid, 'zVertex', rdVarIDzVertex) - nferr = nf_inq_varid(rd_ncid, 'indexToVertexID', rdVarIDindexToVertexID) - nferr = nf_inq_varid(rd_ncid, 'cellsOnEdge', rdVarIDcellsOnEdge) - nferr = nf_inq_varid(rd_ncid, 'nEdgesOnCell', rdVarIDnEdgesOnCell) - nferr = nf_inq_varid(rd_ncid, 'nEdgesOnEdge', rdVarIDnEdgesOnEdge) - nferr = nf_inq_varid(rd_ncid, 'edgesOnCell', rdVarIDedgesOnCell) - nferr = nf_inq_varid(rd_ncid, 'edgesOnEdge', rdVarIDedgesOnEdge) - nferr = nf_inq_varid(rd_ncid, 'weightsOnEdge', rdVarIDweightsOnEdge) - nferr = nf_inq_varid(rd_ncid, 'dvEdge', rdVarIDdvEdge) - nferr = nf_inq_varid(rd_ncid, 'dcEdge', rdVarIDdcEdge) - nferr = nf_inq_varid(rd_ncid, 'angleEdge', rdVarIDangleEdge) - nferr = nf_inq_varid(rd_ncid, 'areaCell', rdVarIDareaCell) - nferr = nf_inq_varid(rd_ncid, 'areaTriangle', rdVarIDareaTriangle) - nferr = nf_inq_varid(rd_ncid, 'cellsOnCell', rdVarIDcellsOnCell) - nferr = nf_inq_varid(rd_ncid, 'verticesOnCell', rdVarIDverticesOnCell) - nferr = nf_inq_varid(rd_ncid, 'verticesOnEdge', rdVarIDverticesOnEdge) - nferr = nf_inq_varid(rd_ncid, 'edgesOnVertex', rdVarIDedgesOnVertex) - nferr = nf_inq_varid(rd_ncid, 'cellsOnVertex', rdVarIDcellsOnVertex) - nferr = nf_inq_varid(rd_ncid, 'kiteAreasOnVertex', rdVarIDkiteAreasOnVertex) - nferr = nf_inq_varid(rd_ncid, 'fEdge', rdVarIDfEdge) - nferr = nf_inq_varid(rd_ncid, 'fVertex', rdVarIDfVertex) - nferr = nf_inq_varid(rd_ncid, 'bottomDepth', rdVarIDbottomDepth) - nferr = nf_inq_varid(rd_ncid, 'normalVelocity', rdVarIDnormalVelocity) - nferr = nf_inq_varid(rd_ncid, 'tangentialVelocity', rdVarIDtangentialVelocity) - nferr = nf_inq_varid(rd_ncid, 'layerThickness', rdVarIDlayerThickness) - - end subroutine read_netcdf_init - - - subroutine read_netcdf_fields( & - time, & - latCell, & - lonCell, & - meshDensity, & - xCell, & - yCell, & - zCell, & - indexToCellID, & - latEdge, & - lonEdge, & - xEdge, & - yEdge, & - zEdge, & - indexToEdgeID, & - latVertex, & - lonVertex, & - xVertex, & - yVertex, & - zVertex, & - indexToVertexID, & - cellsOnEdge, & - nEdgesOnCell, & - nEdgesOnEdge, & - edgesOnCell, & - edgesOnEdge, & - weightsOnEdge, & - dvEdge, & - dcEdge, & - angleEdge, & - areaCell, & - areaTriangle, & - cellsOnCell, & - verticesOnCell, & - verticesOnEdge, & - edgesOnVertex, & - cellsOnVertex, & - kiteAreasOnVertex, & - fEdge, & - fVertex, & - bottomDepth, & - normalVelocity, & - tangentialVelocity, & - layerThickness & - ) - - implicit none - - include 'netcdf.inc' - - integer, intent(in) :: time - real (kind=8), dimension(:), intent(out) :: latCell - real (kind=8), dimension(:), intent(out) :: lonCell - real (kind=8), dimension(:), intent(out) :: meshDensity - real (kind=8), dimension(:), intent(out) :: xCell - real (kind=8), dimension(:), intent(out) :: yCell - real (kind=8), dimension(:), intent(out) :: zCell - integer, dimension(:), intent(out) :: indexToCellID - real (kind=8), dimension(:), intent(out) :: latEdge - real (kind=8), dimension(:), intent(out) :: lonEdge - real (kind=8), dimension(:), intent(out) :: xEdge - real (kind=8), dimension(:), intent(out) :: yEdge - real (kind=8), dimension(:), intent(out) :: zEdge - integer, dimension(:), intent(out) :: indexToEdgeID - real (kind=8), dimension(:), intent(out) :: latVertex - real (kind=8), dimension(:), intent(out) :: lonVertex - real (kind=8), dimension(:), intent(out) :: xVertex - real (kind=8), dimension(:), intent(out) :: yVertex - real (kind=8), dimension(:), intent(out) :: zVertex - integer, dimension(:), intent(out) :: indexToVertexID - integer, dimension(:,:), intent(out) :: cellsOnEdge - integer, dimension(:), intent(out) :: nEdgesOnCell - integer, dimension(:), intent(out) :: nEdgesOnEdge - integer, dimension(:,:), intent(out) :: edgesOnCell - integer, dimension(:,:), intent(out) :: edgesOnEdge - real (kind=8), dimension(:,:), intent(out) :: weightsOnEdge - real (kind=8), dimension(:), intent(out) :: dvEdge - real (kind=8), dimension(:), intent(out) :: dcEdge - real (kind=8), dimension(:), intent(out) :: angleEdge - real (kind=8), dimension(:), intent(out) :: areaCell - real (kind=8), dimension(:), intent(out) :: areaTriangle - integer, dimension(:,:), intent(out) :: cellsOnCell - integer, dimension(:,:), intent(out) :: verticesOnCell - integer, dimension(:,:), intent(out) :: verticesOnEdge - integer, dimension(:,:), intent(out) :: edgesOnVertex - integer, dimension(:,:), intent(out) :: cellsOnVertex - real (kind=8), dimension(:,:), intent(out) :: kiteAreasOnVertex - real (kind=8), dimension(:), intent(out) :: fEdge - real (kind=8), dimension(:), intent(out) :: fVertex - real (kind=8), dimension(:), intent(out) :: bottomDepth - real (kind=8), dimension(:,:,:), intent(out) :: normalVelocity - real (kind=8), dimension(:,:,:), intent(out) :: tangentialVelocity - real (kind=8), dimension(:,:,:), intent(out) :: layerThickness - - logical :: meshDensityPresent - - integer :: nferr - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - meshDensityPresent = .false. - - start1(1) = 1 - - start2(1) = 1 - start2(2) = 1 - - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDlatCell, start1, count1, latCell) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDlonCell, start1, count1, lonCell) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_inq_varid(rd_ncid, 'meshDensity', rdVarIDmeshDensity) - if(nferr.eq.0) then - nferr = nf_get_vara_double(rd_ncid, rdVarIDmeshDensity, start1, count1, meshDensity) - else - meshDensity=1.0 - write(6,*) ' mesh density not present ', nferr, rdVarIDmeshDensity - endif - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDxCell, start1, count1, xCell) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDyCell, start1, count1, yCell) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDzCell, start1, count1, zCell) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_int(rd_ncid, rdVarIDindexToCellID, start1, count1, indexToCellID) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDlatEdge, start1, count1, latEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDlonEdge, start1, count1, lonEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDxEdge, start1, count1, xEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDyEdge, start1, count1, yEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDzEdge, start1, count1, zEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_int(rd_ncid, rdVarIDindexToEdgeID, start1, count1, indexToEdgeID) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDlatVertex, start1, count1, latVertex) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDlonVertex, start1, count1, lonVertex) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDxVertex, start1, count1, xVertex) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDyVertex, start1, count1, yVertex) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDzVertex, start1, count1, zVertex) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_int(rd_ncid, rdVarIDindexToVertexID, start1, count1, indexToVertexID) - - start2(2) = 1 - count2( 1) = rdLocalTWO - count2( 2) = rdLocalnEdges - nferr = nf_get_vara_int(rd_ncid, rdVarIDcellsOnEdge, start2, count2, cellsOnEdge) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_int(rd_ncid, rdVarIDnEdgesOnCell, start1, count1, nEdgesOnCell) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_int(rd_ncid, rdVarIDnEdgesOnEdge, start1, count1, nEdgesOnEdge) - - start2(2) = 1 - count2( 1) = rdLocalmaxEdges - count2( 2) = rdLocalnCells - nferr = nf_get_vara_int(rd_ncid, rdVarIDedgesOnCell, start2, count2, edgesOnCell) - - start2(2) = 1 - count2( 1) = rdLocalmaxEdges2 - count2( 2) = rdLocalnEdges - nferr = nf_get_vara_int(rd_ncid, rdVarIDedgesOnEdge, start2, count2, edgesOnEdge) - - start2(2) = 1 - count2( 1) = rdLocalmaxEdges2 - count2( 2) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDweightsOnEdge, start2, count2, weightsOnEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDdvEdge, start1, count1, dvEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDdcEdge, start1, count1, dcEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDangleEdge, start1, count1, angleEdge) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDareaCell, start1, count1, areaCell) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDareaTriangle, start1, count1, areaTriangle) - - start2(2) = 1 - count2( 1) = rdLocalmaxEdges - count2( 2) = rdLocalnCells - nferr = nf_get_vara_int(rd_ncid, rdVarIDcellsOnCell, start2, count2, cellsOnCell) - - start2(2) = 1 - count2( 1) = rdLocalmaxEdges - count2( 2) = rdLocalnCells - nferr = nf_get_vara_int(rd_ncid, rdVarIDverticesOnCell, start2, count2, verticesOnCell) - - start2(2) = 1 - count2( 1) = rdLocalTWO - count2( 2) = rdLocalnEdges - nferr = nf_get_vara_int(rd_ncid, rdVarIDverticesOnEdge, start2, count2, verticesOnEdge) - - start2(2) = 1 - count2( 1) = rdLocalvertexDegree - count2( 2) = rdLocalnVertices - nferr = nf_get_vara_int(rd_ncid, rdVarIDedgesOnVertex, start2, count2, edgesOnVertex) - - start2(2) = 1 - count2( 1) = rdLocalvertexDegree - count2( 2) = rdLocalnVertices - nferr = nf_get_vara_int(rd_ncid, rdVarIDcellsOnVertex, start2, count2, cellsOnVertex) - - start2(2) = 1 - count2( 1) = rdLocalvertexDegree - count2( 2) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDkiteAreasOnVertex, start2, count2, kiteAreasOnVertex) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDfEdge, start1, count1, fEdge) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDfVertex, start1, count1, fVertex) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDbottomDepth, start1, count1, bottomDepth) - - start3(3) = time - count3( 1) = rdLocalnVertLevels - count3( 2) = rdLocalnEdges - count3( 3) = 1 - nferr = nf_get_vara_double(rd_ncid, rdVarIDnormalVelocity, start3, count3, normalVelocity) - - start3(3) = time - count3( 1) = rdLocalnVertLevels - count3( 2) = rdLocalnEdges - count3( 3) = 1 - nferr = nf_get_vara_double(rd_ncid, rdVarIDtangentialVelocity, start3, count3, tangentialVelocity) - - start3(3) = time - count3( 1) = rdLocalnVertLevels - count3( 2) = rdLocalnCells - count3( 3) = 1 - nferr = nf_get_vara_double(rd_ncid, rdVarIDlayerThickness, start3, count3, layerThickness) - - end subroutine read_netcdf_fields - - - subroutine read_netcdf_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr - - nferr = nf_close(rd_ncid) - - end subroutine read_netcdf_finalize - -end module read_netcdf diff --git a/grid_gen/basin/src/module_read_topo.F b/grid_gen/basin/src/module_read_topo.F deleted file mode 100644 index 183aa7576..000000000 --- a/grid_gen/basin/src/module_read_topo.F +++ /dev/null @@ -1,109 +0,0 @@ -module read_topo - - integer :: rd_ncid - integer :: rdDimIDnx - integer :: rdDimIDny - integer :: rdVarIDz - integer :: rdVarIDx - integer :: rdVarIDy - - integer :: rdLocalnx - integer :: rdLocalny - - contains - - subroutine read_topo_init( nx, ny) - - implicit none - - include 'netcdf.inc' - - integer, intent(out) :: nx, ny - - integer :: nferr - - - nferr = nf_open('topo/ETOPO2v2c_f4.nc', NF_SHARE, rd_ncid) - write(6,*) ' nferr ', nferr, rd_ncid - - ! - ! Get IDs for variable dimensions - ! - nferr = nf_inq_dimid(rd_ncid, 'x', rdDimIDnx) - write(6,*) ' nferr ', nferr, rdDimIDnx - nferr = nf_inq_dimlen(rd_ncid, rdDimIDnx, rdLocalnx) - write(6,*) ' nferr ', nferr, rdLocalnx - nferr = nf_inq_dimid(rd_ncid, 'y', rdDimIDny) - write(6,*) ' nferr ', nferr, rdDimIDny - nferr = nf_inq_dimlen(rd_ncid, rdDimIDny, rdLocalny) - write(6,*) ' nferr ', nferr, rdLocalny - - nx = rdLocalnx - ny = rdLocalny - - write(6,*) nx, ny - - ! - ! Get IDs for variables - ! - nferr = nf_inq_varid(rd_ncid, 'x', rdVarIDx) - write(6,*) ' nferr ', nferr, rdVarIDx - nferr = nf_inq_varid(rd_ncid, 'y', rdVarIDy) - write(6,*) ' nferr ', nferr, rdVarIDy - nferr = nf_inq_varid(rd_ncid, 'z', rdVarIDz) - write(6,*) ' nferr ', nferr, rdVarIDz - - end subroutine read_topo_init - - - subroutine read_topo_fields(x,y,z) - - implicit none - - include 'netcdf.inc' - - real (kind=4), dimension(:), intent(out) :: x,y - real (kind=4), dimension(:,:), intent(out) :: z - - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - integer :: nferr - - start1(1) = 1 - count1(1) = rdLocalnx - nferr = nf_get_vara_real(rd_ncid, rdVarIDx, start1, count1, x) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDx - - start1(1) = 1 - count1(1) = rdLocalny - nferr = nf_get_vara_real(rd_ncid, rdVarIDy, start1, count1, y) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDy - - start2(1) = 1 - start2(2) = 1 - count2(1) = rdLocalnx - count2(2) = rdLocalny - nferr = nf_get_vara_real(rd_ncid, rdVarIDz, start2, count2, z) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDz, rdLocalnx - - end subroutine read_topo_fields - - - subroutine read_topo_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr - - nferr = nf_close(rd_ncid) - write(6,*) ' nferr ', nferr - - - end subroutine read_topo_finalize - -end module read_topo diff --git a/grid_gen/basin/src/module_write_netcdf.F b/grid_gen/basin/src/module_write_netcdf.F deleted file mode 100644 index 8e6fdb5ad..000000000 --- a/grid_gen/basin/src/module_write_netcdf.F +++ /dev/null @@ -1,694 +0,0 @@ -module write_netcdf - - integer :: wr_ncid - integer :: wrDimIDTime - integer :: wrDimIDnCells - integer :: wrDimIDnEdges - integer :: wrDimIDnVertices - integer :: wrDimIDmaxEdges - integer :: wrDimIDmaxEdges2 - integer :: wrDimIDTWO - integer :: wrDimIDvertexDegree - integer :: wrDimIDnVertLevels - integer :: wrVarIDlatCell - integer :: wrVarIDlonCell - integer :: wrVarIDmeshDensity - integer :: wrVarIDxCell - integer :: wrVarIDyCell - integer :: wrVarIDzCell - integer :: wrVarIDindexToCellID - integer :: wrVarIDlatEdge - integer :: wrVarIDlonEdge - integer :: wrVarIDxEdge - integer :: wrVarIDyEdge - integer :: wrVarIDzEdge - integer :: wrVarIDindexToEdgeID - integer :: wrVarIDlatVertex - integer :: wrVarIDlonVertex - integer :: wrVarIDxVertex - integer :: wrVarIDyVertex - integer :: wrVarIDzVertex - integer :: wrVarIDindexToVertexID - integer :: wrVarIDmaxLevelCell - integer :: wrVarIDcellsOnEdge - integer :: wrVarIDnEdgesOnCell - integer :: wrVarIDnEdgesOnEdge - integer :: wrVarIDedgesOnCell - integer :: wrVarIDedgesOnEdge - integer :: wrVarIDweightsOnEdge - integer :: wrVarIDdvEdge - integer :: wrVarIDdcEdge - integer :: wrVarIDangleEdge - integer :: wrVarIDareaCell - integer :: wrVarIDareaTriangle - integer :: wrVarIDcellsOnCell - integer :: wrVarIDverticesOnCell - integer :: wrVarIDverticesOnEdge - integer :: wrVarIDedgesOnVertex - integer :: wrVarIDcellsOnVertex - integer :: wrVarIDkiteAreasOnVertex - integer :: wrVarIDfEdge - integer :: wrVarIDfVertex - integer :: wrVarIDfCell - integer :: wrVarIDbottomDepth - integer :: wrVarIDnormalVelocity - integer :: wrVarIDboundaryEdge - integer :: wrVarIDboundaryVertex - integer :: wrVarIDsurfaceWindStress - integer :: wrVarIDsurfaceWindStressZonal - integer :: wrVarIDsurfaceWindStressMeridional - integer :: wrVarIDlayerThickness - integer :: wrVarIDdensity - integer :: wrVarIDtemperature - integer :: wrVarIDsalinity - integer :: wrVarIDtracer1 - integer :: wrVarIDtemperatureRestore - integer :: wrVarIDsalinityRestore - integer :: wrVarIDboundaryLayerDepth - integer :: wrVarIDrefBottomDepth - - integer :: wrLocalnCells - integer :: wrLocalnEdges - integer :: wrLocalnVertices - integer :: wrLocalmaxEdges - integer :: wrLocalnVertLevels - integer :: wrLocalvertexDegree - - contains - - subroutine write_netcdf_init( & - nCells, & - nEdges, & - nVertices, & - maxEdges, & - nVertLevels, & - vertexDegree, & - sphere_radius, & - on_a_sphere & - ) - - implicit none - - include 'netcdf.inc' - - integer, intent(in) :: nCells - integer, intent(in) :: nEdges - integer, intent(in) :: nVertices - integer, intent(in) :: maxEdges - integer, intent(in) :: nVertLevels - integer, intent(in) :: vertexDegree - character (len=16) :: on_a_sphere - real*8 :: sphere_radius - - - integer :: nferr - integer, dimension(10) :: dimlist - - - wrLocalnCells = nCells - wrLocalnEdges = nEdges - wrLocalnVertices = nVertices - wrLocalmaxEdges = maxEdges - wrLocalnVertLevels = nVertLevels - wrLocalvertexDegree = vertexDegree - - nferr = nf_create('ocean.nc', IOR(NF_CLOBBER,NF_64BIT_OFFSET), wr_ncid) - - ! - ! Define dimensions - ! - nferr = nf_def_dim(wr_ncid, 'nCells', nCells, wrDimIDnCells) - nferr = nf_def_dim(wr_ncid, 'nEdges', nEdges, wrDimIDnEdges) - nferr = nf_def_dim(wr_ncid, 'nVertices', nVertices, wrDimIDnVertices) - nferr = nf_def_dim(wr_ncid, 'maxEdges', maxEdges, wrDimIDmaxEdges) - nferr = nf_def_dim(wr_ncid, 'maxEdges2', 2*maxEdges, wrDimIDmaxEdges2) - nferr = nf_def_dim(wr_ncid, 'TWO', 2, wrDimIDTWO) - nferr = nf_def_dim(wr_ncid, 'vertexDegree', vertexDegree, wrDimIDvertexDegree) - nferr = nf_def_dim(wr_ncid, 'nVertLevels', nVertLevels, wrDimIDnVertLevels) - nferr = nf_def_dim(wr_ncid, 'Time', NF_UNLIMITED, wrDimIDTime) - - ! - ! Define variables - ! - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'latCell', NF_DOUBLE, 1, dimlist, wrVarIDlatCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'lonCell', NF_DOUBLE, 1, dimlist, wrVarIDlonCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'meshDensity', NF_DOUBLE, 1, dimlist, wrVarIDmeshDensity) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'xCell', NF_DOUBLE, 1, dimlist, wrVarIDxCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'yCell', NF_DOUBLE, 1, dimlist, wrVarIDyCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'zCell', NF_DOUBLE, 1, dimlist, wrVarIDzCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'indexToCellID', NF_INT, 1, dimlist, wrVarIDindexToCellID) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'latEdge', NF_DOUBLE, 1, dimlist, wrVarIDlatEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'lonEdge', NF_DOUBLE, 1, dimlist, wrVarIDlonEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'xEdge', NF_DOUBLE, 1, dimlist, wrVarIDxEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'yEdge', NF_DOUBLE, 1, dimlist, wrVarIDyEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'zEdge', NF_DOUBLE, 1, dimlist, wrVarIDzEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'indexToEdgeID', NF_INT, 1, dimlist, wrVarIDindexToEdgeID) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'latVertex', NF_DOUBLE, 1, dimlist, wrVarIDlatVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'lonVertex', NF_DOUBLE, 1, dimlist, wrVarIDlonVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'xVertex', NF_DOUBLE, 1, dimlist, wrVarIDxVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'yVertex', NF_DOUBLE, 1, dimlist, wrVarIDyVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'zVertex', NF_DOUBLE, 1, dimlist, wrVarIDzVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'indexToVertexID', NF_INT, 1, dimlist, wrVarIDindexToVertexID) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'maxLevelCell', NF_INT, 1, dimlist, wrVarIDmaxLevelCell) - dimlist( 1) = wrDimIDTWO - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'cellsOnEdge', NF_INT, 2, dimlist, wrVarIDcellsOnEdge) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'nEdgesOnCell', NF_INT, 1, dimlist, wrVarIDnEdgesOnCell) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'nEdgesOnEdge', NF_INT, 1, dimlist, wrVarIDnEdgesOnEdge) - dimlist( 1) = wrDimIDmaxEdges - dimlist( 2) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'edgesOnCell', NF_INT, 2, dimlist, wrVarIDedgesOnCell) - dimlist( 1) = wrDimIDmaxEdges2 - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'edgesOnEdge', NF_INT, 2, dimlist, wrVarIDedgesOnEdge) - dimlist( 1) = wrDimIDmaxEdges2 - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'weightsOnEdge', NF_DOUBLE, 2, dimlist, wrVarIDweightsOnEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'dvEdge', NF_DOUBLE, 1, dimlist, wrVarIDdvEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'dcEdge', NF_DOUBLE, 1, dimlist, wrVarIDdcEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'angleEdge', NF_DOUBLE, 1, dimlist, wrVarIDangleEdge) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'areaCell', NF_DOUBLE, 1, dimlist, wrVarIDareaCell) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'areaTriangle', NF_DOUBLE, 1, dimlist, wrVarIDareaTriangle) - dimlist( 1) = wrDimIDmaxEdges - dimlist( 2) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'cellsOnCell', NF_INT, 2, dimlist, wrVarIDcellsOnCell) - dimlist( 1) = wrDimIDmaxEdges - dimlist( 2) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'verticesOnCell', NF_INT, 2, dimlist, wrVarIDverticesOnCell) - dimlist( 1) = wrDimIDTWO - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'verticesOnEdge', NF_INT, 2, dimlist, wrVarIDverticesOnEdge) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'edgesOnVertex', NF_INT, 2, dimlist, wrVarIDedgesOnVertex) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'cellsOnVertex', NF_INT, 2, dimlist, wrVarIDcellsOnVertex) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'kiteAreasOnVertex', NF_DOUBLE, 2, dimlist, wrVarIDkiteAreasOnVertex) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'fEdge', NF_DOUBLE, 1, dimlist, wrVarIDfEdge) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'fVertex', NF_DOUBLE, 1, dimlist, wrVarIDfVertex) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'fCell', NF_DOUBLE, 1, dimlist, wrVarIDfCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'bottomDepth', NF_DOUBLE, 1, dimlist, wrVarIDbottomDepth) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'temperatureRestore', NF_DOUBLE, 1, dimlist, wrVarIDtemperatureRestore) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'salinityRestore', NF_DOUBLE, 1, dimlist, wrVarIDsalinityRestore) - - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'boundaryLayerDepth', NF_DOUBLE, 1, dimlist, wrVarIDboundaryLayerDepth) - - dimlist( 1) = wrDimIDnVertLevels - nferr = nf_def_var(wr_ncid, 'refBottomDepth', NF_DOUBLE, 1, dimlist, wrVarIDrefBottomDepth) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'normalVelocity', NF_DOUBLE, 3, dimlist, wrVarIDnormalVelocity) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'boundaryEdge', NF_INT, 2, dimlist, wrVarIDboundaryEdge) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'boundaryVertex', NF_INT, 2, dimlist, wrVarIDboundaryVertex) - - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'surfaceWindStress', NF_DOUBLE, 1, dimlist, wrVarIDsurfaceWindStress) - - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'surfaceWindStressZonal', NF_DOUBLE, 1, dimlist, wrVarIDsurfaceWindStressZonal) - - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'surfaceWindStressMeridional', NF_DOUBLE, 1, dimlist, wrVarIDsurfaceWindStressMeridional) - - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'layerThickness', NF_DOUBLE, 3, dimlist, & - wrVarIDlayerThickness) - ! dimlist( 1) = wrDimIDnVertLevels - ! dimlist( 2) = wrDimIDnCells - ! dimlist( 3) = wrDimIDTime - ! nferr = nf_def_var(wr_ncid, 'density', NF_DOUBLE, 3, dimlist, wrVarIDdensity) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'temperature', NF_DOUBLE, 3, dimlist, wrVarIDtemperature) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'salinity', NF_DOUBLE, 3, dimlist, wrVarIDsalinity) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - ! If you do not want tracer1 in your input file, simply comment out these two lines (one of two) - nferr = nf_def_var(wr_ncid, 'tracer1', NF_DOUBLE, 3, dimlist, wrVarIDtracer1) - - - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'on_a_sphere', 16, on_a_sphere) - nferr = nf_put_att_double(wr_ncid, NF_GLOBAL, 'sphere_radius', NF_DOUBLE, 1, sphere_radius) - - nferr = nf_enddef(wr_ncid) - - end subroutine write_netcdf_init - - - subroutine write_netcdf_fields( & - time, & - latCell, & - lonCell, & - meshDensity, & - xCell, & - yCell, & - zCell, & - indexToCellID, & - latEdge, & - lonEdge, & - xEdge, & - yEdge, & - zEdge, & - indexToEdgeID, & - latVertex, & - lonVertex, & - xVertex, & - yVertex, & - zVertex, & - indexToVertexID, & - maxLevelCell, & - cellsOnEdge, & - nEdgesOnCell, & - nEdgesOnEdge, & - edgesOnCell, & - edgesOnEdge, & - weightsOnEdge, & - dvEdge, & - dcEdge, & - angleEdge, & - areaCell, & - areaTriangle, & - cellsOnCell, & - verticesOnCell, & - verticesOnEdge, & - edgesOnVertex, & - cellsOnVertex, & - kiteAreasOnVertex, & - fEdge, & - fVertex, & - fCell, & - bottomDepth, & - boundaryEdge, & - boundaryVertex, & - surfaceWindStress, & - surfaceWindStressZonal, & - surfaceWindStressMeridional, & - normalVelocity, & - layerThickness, & - density, & - temperature, & - salinity, & - tracer1, & - temperatureRestore, & - salinityRestore, & - boundaryLayerDepth, & - refBottomDepth & - ) - - implicit none - - include 'netcdf.inc' - - integer, intent(in) :: time - real (kind=8), dimension(:), intent(in) :: latCell - real (kind=8), dimension(:), intent(in) :: lonCell - real (kind=8), dimension(:), intent(in) :: meshDensity - real (kind=8), dimension(:), intent(in) :: xCell - real (kind=8), dimension(:), intent(in) :: yCell - real (kind=8), dimension(:), intent(in) :: zCell - integer, dimension(:), intent(in) :: indexToCellID - real (kind=8), dimension(:), intent(in) :: latEdge - real (kind=8), dimension(:), intent(in) :: lonEdge - real (kind=8), dimension(:), intent(in) :: xEdge - real (kind=8), dimension(:), intent(in) :: yEdge - real (kind=8), dimension(:), intent(in) :: zEdge - integer, dimension(:), intent(in) :: indexToEdgeID - real (kind=8), dimension(:), intent(in) :: latVertex - real (kind=8), dimension(:), intent(in) :: lonVertex - real (kind=8), dimension(:), intent(in) :: xVertex - real (kind=8), dimension(:), intent(in) :: yVertex - real (kind=8), dimension(:), intent(in) :: zVertex - integer, dimension(:), intent(in) :: indexToVertexID - integer, dimension(:), intent(in) :: maxLevelCell - integer, dimension(:,:), intent(in) :: cellsOnEdge - integer, dimension(:), intent(in) :: nEdgesOnCell - integer, dimension(:), intent(in) :: nEdgesOnEdge - integer, dimension(:,:), intent(in) :: edgesOnCell - integer, dimension(:,:), intent(in) :: edgesOnEdge - real (kind=8), dimension(:,:), intent(in) :: weightsOnEdge - real (kind=8), dimension(:), intent(in) :: dvEdge - real (kind=8), dimension(:), intent(in) :: dcEdge - real (kind=8), dimension(:), intent(in) :: angleEdge - real (kind=8), dimension(:), intent(in) :: areaCell - real (kind=8), dimension(:), intent(in) :: areaTriangle - integer, dimension(:,:), intent(in) :: cellsOnCell - integer, dimension(:,:), intent(in) :: verticesOnCell - integer, dimension(:,:), intent(in) :: verticesOnEdge - integer, dimension(:,:), intent(in) :: edgesOnVertex - integer, dimension(:,:), intent(in) :: cellsOnVertex - real (kind=8), dimension(:,:), intent(in) :: kiteAreasOnVertex - real (kind=8), dimension(:), intent(in) :: fEdge - real (kind=8), dimension(:), intent(in) :: fVertex - real (kind=8), dimension(:), intent(in) :: fCell - real (kind=8), dimension(:), intent(in) :: bottomDepth - integer, dimension(:,:), intent(in) :: boundaryEdge - integer, dimension(:,:), intent(in) :: boundaryVertex - real (kind=8), dimension(:), intent(in) :: surfaceWindStress - real (kind=8), dimension(:), intent(in) :: surfaceWindStressZonal - real (kind=8), dimension(:), intent(in) :: surfaceWindStressMeridional - real (kind=8), dimension(:,:,:), intent(in) :: normalVelocity - real (kind=8), dimension(:,:,:), intent(in) :: layerThickness - real (kind=8), dimension(:,:,:), intent(in) :: density - real (kind=8), dimension(:,:,:), intent(in) :: temperature - real (kind=8), dimension(:,:,:), intent(in) :: salinity - real (kind=8), dimension(:,:,:), intent(in) :: tracer1 - real (kind=8), dimension(:), intent(in) :: temperatureRestore - real (kind=8), dimension(:), intent(in) :: salinityRestore - real (kind=8), dimension(:), intent(in) :: boundaryLayerDepth - real (kind=8), dimension(:), intent(in) :: refBottomDepth - - - integer :: nferr - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - start1(1) = 1 - - start2(1) = 1 - start2(2) = 1 - - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - - start4(1) = 1 - start4(2) = 1 - start4(3) = 1 - start4(4) = 1 - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDlatCell, start1, count1, latCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDlonCell, start1, count1, lonCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDmeshDensity, start1, count1, meshDensity) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDxCell, start1, count1, xCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDyCell, start1, count1, yCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDzCell, start1, count1, zCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDindexToCellID, start1, count1, indexToCellID) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDlatEdge, start1, count1, latEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDlonEdge, start1, count1, lonEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDxEdge, start1, count1, xEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDyEdge, start1, count1, yEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDzEdge, start1, count1, zEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDindexToEdgeID, start1, count1, indexToEdgeID) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDlatVertex, start1, count1, latVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDlonVertex, start1, count1, lonVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDxVertex, start1, count1, xVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDyVertex, start1, count1, yVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDzVertex, start1, count1, zVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDindexToVertexID, start1, count1, indexToVertexID) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDmaxLevelCell, start1, count1, maxLevelCell) - - start2(2) = 1 - count2( 1) = 2 - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnEdge, start2, count2, cellsOnEdge) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDnEdgesOnCell, start1, count1, nEdgesOnCell) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDnEdgesOnEdge, start1, count1, nEdgesOnEdge) - - start2(2) = 1 - count2( 1) = wrLocalmaxEdges - count2( 2) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDedgesOnCell, start2, count2, edgesOnCell) - - start2(2) = 1 - count2( 1) = 2*wrLocalmaxEdges - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDedgesOnEdge, start2, count2, edgesOnEdge) - - start2(2) = 1 - count2( 1) = 2*wrLocalmaxEdges - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDweightsOnEdge, start2, count2, weightsOnEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDdvEdge, start1, count1, dvEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDdcEdge, start1, count1, dcEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDangleEdge, start1, count1, angleEdge) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDareaCell, start1, count1, areaCell) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDareaTriangle, start1, count1, areaTriangle) - - start2(2) = 1 - count2( 1) = wrLocalmaxEdges - count2( 2) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnCell, start2, count2, cellsOnCell) - - start2(2) = 1 - count2( 1) = wrLocalmaxEdges - count2( 2) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDverticesOnCell, start2, count2, verticesOnCell) - - start2(2) = 1 - count2( 1) = 2 - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDverticesOnEdge, start2, count2, verticesOnEdge) - - start2(2) = 1 - count2( 1) = wrLocalvertexDegree - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDedgesOnVertex, start2, count2, edgesOnVertex) - - start2(2) = 1 - count2( 1) = wrLocalvertexDegree - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnVertex, start2, count2, cellsOnVertex) - - start2(2) = 1 - count2( 1) = wrLocalvertexDegree - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDkiteAreasOnVertex, start2, count2, kiteAreasOnVertex) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDfEdge, start1, count1, fEdge) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDfVertex, start1, count1, fVertex) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDfCell, start1, count1, fCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDbottomDepth, start1, count1, bottomDepth) - - start2(2) = 1 - count2( 1) = wrLocalnVertLevels - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDboundaryEdge, start2, count2, boundaryEdge) - - start2(2) = 1 - count2( 1) = wrLocalnVertLevels - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDboundaryVertex, start2, count2, boundaryVertex) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDsurfaceWindStress, start1, count1, surfaceWindStress) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDsurfaceWindStressZonal, start1, count1, surfaceWindStressZonal) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDsurfaceWindStressMeridional, start1, count1, surfaceWindStressMeridional) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDtemperatureRestore, start1, count1, temperatureRestore) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDsalinityRestore, start1, count1, salinityRestore) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDboundaryLayerDepth, start1, count1, boundaryLayerDepth) - - start1(1) = 1 - count1( 1) = wrLocalnVertLevels - nferr = nf_put_vara_double(wr_ncid, wrVarIDrefBottomDepth, start1, count1, refBottomDepth) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDnormalVelocity, start3, count3, normalVelocity) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDlayerThickness, start3, count3, layerThickness) - - ! start3(3) = time - ! count3( 1) = wrLocalnVertLevels - ! count3( 2) = wrLocalnCells - ! count3( 3) = 1 - ! nferr = nf_put_vara_double(wr_ncid, wrVarIDdensity, start3, count3, density) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDtemperature, start3, count3, temperature) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDsalinity, start3, count3, salinity) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - ! If you do not want tracer1 in your input file, simply comment out these two lines (two of two) - nferr = nf_put_vara_double(wr_ncid, wrVarIDtracer1, start3, count3, tracer1) - - end subroutine write_netcdf_fields - - - subroutine write_netcdf_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr - - nferr = nf_close(wr_ncid) - - end subroutine write_netcdf_finalize - -end module write_netcdf diff --git a/grid_gen/basin/src/utilities.F b/grid_gen/basin/src/utilities.F deleted file mode 100644 index a5a97a351..000000000 --- a/grid_gen/basin/src/utilities.F +++ /dev/null @@ -1,781 +0,0 @@ -module utilities - -contains - -subroutine write_OpenDX( on_a_sphere, & - nCells, & - nVertices, & - nEdges, & - vertexDegree, & - maxEdges, & - xCell, & - yCell, & - zCell, & - xVertex, & - yVertex, & - zVertex, & - xEdge, & - yEdge, & - zEdge, & - nEdgesOnCell, & - verticesOnCell, & - verticesOnEdge, & - cellsOnVertex, & - edgesOnCell, & - areaCell, & - maxLevelCell, & - meshSpacing, & - depthCell, & - SST, & - kiteAreasOnVertex ) - - implicit none - - character (len=16), intent(in) :: on_a_sphere - integer, intent(in) :: nCells, nVertices, vertexDegree, nEdges, maxEdges - real (kind=8), dimension(nCells), intent(inout) :: xCell - real (kind=8), dimension(nCells), intent(inout) :: yCell - real (kind=8), dimension(nCells), intent(inout) :: zCell - real (kind=8), dimension(nVertices), intent(inout) :: xVertex - real (kind=8), dimension(nVertices), intent(inout) :: yVertex - real (kind=8), dimension(nVertices), intent(inout) :: zVertex - real (kind=8), dimension(nEdges), intent(inout) :: xEdge - real (kind=8), dimension(nEdges), intent(inout) :: yEdge - real (kind=8), dimension(nEdges), intent(inout) :: zEdge - integer, dimension(nCells), intent(in) :: nEdgesOnCell - integer, dimension(maxEdges,nCells), intent(in) :: verticesOnCell - integer, dimension(maxEdges,nCells), intent(in) :: edgesOnCell - integer, dimension(2,nEdges), intent(in) :: verticesOnEdge - integer, dimension(vertexDegree, nVertices), intent(in) :: cellsOnVertex - integer, dimension(nCells), intent(in) :: maxLevelCell - real (kind=8), dimension(nCells), intent(in) :: areaCell - real (kind=8), dimension(nCells), intent(in) :: depthCell, SST, meshSpacing - real (kind=8), dimension(vertexDegree,nVertices), intent(in) :: kiteAreasOnVertex - - character(len=80) :: a, b, c, d, e, f - integer :: i, j, k, nVerticesTotal, iEdge, iLoop, iFace, Vert(4), Edge(4), iVertex, i1, i2, jp1 - integer :: nKitesTotal, iCell, iEdge1, iEdge2, iVertex11, iVertex12, iVertex21, iVertex22, ksave - real (kind=8) :: x1, x2, x3, x4, y1, y2, y3, y4, z1, z2, z3, z4, xscale, work(nCells), work1(nCells), work2(nCells) - real (kind=8) :: xv, yv, zv, xc, yc, zc, dist - logical (kind=8) :: eflag - - if(on_a_sphere.eq.'NO ') then - write(6,*) ' write_dx, not on a sphere ' - endif - - xscale = 1.00 - xCell = xCell*xscale - yCell = yCell*xscale - zCell = zCell*xscale - xVertex = xVertex*xscale - yVertex = yVertex*xscale - zVertex = zVertex*xscale - xEdge = xEdge*xscale - yEdge = yEdge*xscale - zEdge = zEdge*xscale - - write(6,*) 'xCell', minval(xCell), maxval(xCell) - write(6,*) ' nCells', nCells - write(6,*) ' nEdges', nEdges - write(6,*) ' nVertices', nVertices - write(6,*) ' nEdgesOnCell',minval(nEdgesOnCell), maxval(nEdgesOnCell) - - open(unit=1,file='dx/vector.dx',form='formatted',status='unknown') - - a = trim('object "positions list" class array type float rank 1 shape 3 items') - b = trim('ascii data file vector.position.data') - write(1,10) a, nCells - write(1,10) b - write(1,*) - - a = trim('object 0 class array type float rank 1 shape 3 items') - b = trim('ascii data file vector.data') - c = trim('attribute "dep" string "positions"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "vector" class field') - b = trim('component "positions" "positions list"') - c = trim('component "data" 0') - write(1,10) a - write(1,10) b - write(1,10) c - - close(1) - - open(unit=14,file='dx/vector.position.data',form='formatted',status='unknown') - do i=1,nCells - write(14,22) xCell(i), yCell(i), zCell(i) - enddo - close(14) - - - - nVerticesTotal = 0 - do i=1,nCells - nVerticesTotal = nVerticesTotal + nEdgesOnCell(i) - enddo - write(6,*) 'total number of vertices', nVerticesTotal - - open(unit=1,file='dx/ocean.dx',form='formatted',status='unknown') - - a = trim('object "positions list" class array type float rank 1 shape 3 items') - b = trim('ascii data file ocean.position.data') - write(1,10) a, nVerticesTotal - write(1,10) b - write(1,*) - 10 format(a70,i10) - - a = trim('object "edge list" class array type int rank 0 items') - b = trim('ascii data file ocean.edge.data') - c = trim('attribute "ref" string "positions"') - write(1,10) a, nVerticesTotal - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "loops list" class array type int rank 0 items') - b = trim('ascii data file ocean.loop.data') - c = trim('attribute "ref" string "edges"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "face list" class array type int rank 0 items') - b = trim('ascii data file ocean.face.data') - c = trim('attribute "ref" string "loops"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object 0 class array type float rank 0 items') - b = trim('data file ocean.meshSpacing.data') - c = trim('attribute "dep" string "faces"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "area" class field') - b = trim('component "positions" "positions list"') - c = trim('component "edges" "edge list"') - d = trim('component "loops" "loops list"') - e = trim('component "faces" "face list"') - f = trim('component "data" 0') - write(1,10) a - write(1,10) b - write(1,10) c - write(1,10) d - write(1,10) e - write(1,10) f - - close(1) - - - work2 = meshSpacing - work1 = depthCell - work = SST - - open(unit= 8,file='dx/ocean.meshSpacing.data',form='formatted',status='unknown') - open(unit= 9,file='dx/ocean.depth.data',form='formatted',status='unknown') - open(unit=10,file='dx/ocean.area.data',form='formatted',status='unknown') - open(unit=11,file='dx/ocean.face.data',form='formatted',status='unknown') - open(unit=12,file='dx/ocean.loop.data',form='formatted',status='unknown') - open(unit=13,file='dx/ocean.edge.data',form='formatted',status='unknown') - open(unit=14,file='dx/ocean.position.data',form='formatted',status='unknown') - - iLoop = 0 - iEdge = 0 - do i=1,nCells - write(8,20) work2(i) - write(9,20) work1(i) - write(10,20) work(i) - write(11,21) i-1 - write(12,21) iLoop - iLoop = iLoop + nEdgesOnCell(i) - - eflag = .false. - do j=1,nEdgesOnCell(i) - k = verticesOnCell(j,i) - xv = xVertex(k); yv = yVertex(k); zv = zVertex(k) - xc = xCell(i); yc = yCell(i); zc = zCell(i) - dist = sqrt( (xc-xv)**2 + (yc-yv)**2 + (zc-zv)**2 ) - if(dist.gt.5.0e5.and.on_a_sphere.eq.'NO ') then - eflag = .true. - endif - enddo - - if(eflag) then - - do j=1,nEdgesOnCell(i) - write(13,21) iEdge - iEdge = iEdge + 1 - k = verticesOnCell(j,i) - xv = xVertex(k); yv = yVertex(k); zv = zVertex(k) - xc = xCell(i); yc = yCell(i); zc = zCell(i) - dist = sqrt( (xc-xv)**2 + (yc-yv)**2 + (zc-zv)**2 ) - if(dist.gt.5.0e5) then - write(14,22) xc, yc, zc - else - write(14,22) xv, yv, zv - endif - enddo - - else - - do j=1,nEdgesOnCell(i) - write(13,21) iEdge - iEdge = iEdge + 1 - k = verticesOnCell(j,i) - if(k.le.0) write(6,*) ' vert1 ',k, verticesOnCell(:,i) - write(14,22) xVertex(k), yVertex(k), zVertex(k) - write(15,23) j,i,k,xVertex(k), yVertex(k), zVertex(k) - enddo - endif - enddo - - 20 format(e20.10) - 21 format(i20) - 22 format(3e20.10) - 23 format(3i8, 3e20.10) - - close(9) - close(10) - close(11) - close(12) - close(13) - close(14) - - ! nVerticesTotal = 0 - ! nKitesTotal = 0 - ! do i=1,nCells - ! nKitesTotal = nKitesTotal + nEdgesOnCell(i) - ! enddo - ! nVerticesTotal = nKitesTotal*4 - ! write(6,*) nKitesTotal, nVerticesTotal - - ! open(unit=1,file='dx/kite.dx',form='formatted',status='unknown') - - ! a = trim('object "positions list" class array type float rank 1 shape 3 items') - ! b = trim('ascii data file kite.position.data') - ! write(1,10) a, nVerticesTotal - ! write(1,10) b - ! write(1,*) - - ! a = trim('object "edge list" class array type int rank 0 items') - ! b = trim('ascii data file kite.edge.data') - ! c = trim('attribute "ref" string "positions"') - ! write(1,10) a, nVerticesTotal - ! write(1,10) b - ! write(1,10) c - ! write(1,*) - - ! a = trim('object "loops list" class array type int rank 0 items') - ! b = trim('ascii data file kite.loop.data') - ! c = trim('attribute "ref" string "edges"') - ! write(1,10) a, nKitesTotal - ! write(1,10) b - ! write(1,10) c - ! write(1,*) - - ! a = trim('object "face list" class array type int rank 0 items') - ! b = trim('ascii data file kite.face.data') - ! c = trim('attribute "ref" string "loops"') - ! write(1,10) a, nKitesTotal - ! write(1,10) b - ! write(1,10) c - ! write(1,*) - - ! a = trim('object 0 class array type float rank 0 items') - ! b = trim('data file kite.area.data') - ! c = trim('attribute "dep" string "faces"') - ! write(1,10) a, nKitesTotal - ! write(1,10) b - ! write(1,10) c - ! write(1,*) - - ! a = trim('object "area" class field') - ! b = trim('component "positions" "positions list"') - ! c = trim('component "edges" "edge list"') - ! d = trim('component "loops" "loops list"') - ! e = trim('component "faces" "face list"') - ! f = trim('component "data" 0') - ! write(1,10) a - ! write(1,10) b - ! write(1,10) c - ! write(1,10) d - ! write(1,10) e - ! write(1,10) f - - ! close(1) - - ! open(unit=10,file='dx/kite.area.data',form='formatted',status='unknown') - ! open(unit=11,file='dx/kite.face.data',form='formatted',status='unknown') - ! open(unit=12,file='dx/kite.loop.data',form='formatted',status='unknown') - ! open(unit=13,file='dx/kite.edge.data',form='formatted',status='unknown') - ! open(unit=14,file='dx/kite.position.data',form='formatted',status='unknown') - - ! iLoop = 0 - ! iEdge = 0 - ! iFace = 0 - - ! do iCell=1,nCells - ! do j=1,nEdgesOnCell(iCell) - ! iEdge1 = edgesOnCell(j,iCell) - ! jp1 = j+1 - ! if(j.eq.nEdgesOnCell(iCell)) jp1=1 - ! iEdge2 = edgesOnCell(jp1,iCell) - - ! iVertex11 = verticesOnEdge(1,iEdge1) - ! iVertex21 = verticesOnEdge(2,iEdge1) - ! iVertex12 = verticesOnEdge(1,iEdge2) - ! ivertex22 = verticesOnEdge(2,iEdge2) - - ! if(iVertex11.eq.iVertex12.or.iVertex11.eq.iVertex22) then - ! iVertex = iVertex11 - ! elseif(iVertex21.eq.iVertex12.or.iVertex21.eq.iVertex22) then - ! iVertex = iVertex21 - ! else - ! write(6,*) iVertex11, iVertex21, iVertex12, iVertex22 - ! stop - ! endif - - ! ksave = 0 - ! do k=1,vertexDegree - ! if(cellsOnVertex(k,iVertex).eq.iCell) ksave=k - ! enddo - ! if(ksave.eq.0) then - ! write(6,*) ' can not find iCell' - ! write(6,*) cellsOnVertex(:,iVertex) - ! write(6,*) iCell - ! write(6,*) iEdge1, iEdge2 - ! write(6,*) iVertex11, iVertex21, iVertex21, iVertex22 - ! write(6,*) iVertex - ! stop - ! endif - - ! write(11,21) iFace - ! write(12,21) iLoop - ! iFace = iFace + 1 - ! iLoop = iLoop + 4 - ! do k=1,4 - ! write(13,21) iEdge - ! iEdge = iEdge + 1 - ! enddo - ! - ! x1 = xCell(iCell) ; y1 = yCell(iCell) ; z1 = zCell(iCell) - ! x2 = xEdge(iEdge1) ; y2 = yEdge(iEdge1) ; z2 = zEdge(iEdge1) - ! x3 = xVertex(iVertex); y3 = yVertex(iVertex); z3 = zVertex(iVertex) - ! x4 = xEdge(iEdge2) ; y4 = yEdge(iEdge2) ; z4 = zEdge(iEdge2) - ! - ! write(14,22) x1, y1, z1 - ! write(14,22) x2, y2, z2 - ! write(14,22) x3, y3, z3 - ! write(14,22) x4, y4, z4 - ! write(10,22) kiteAreasOnVertex(ksave,iVertex) - - ! enddo - ! enddo - -end subroutine write_OpenDX - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE CONVERT_LX -! -! Convert (lat,lon) to an (x, y, z) location on a sphere with specified radius. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine convert_lx(x, y, z, radius, lat, lon) - - implicit none - - real, intent(in) :: radius - real, intent(in) :: lat, lon - real, intent(out) :: x, y, z - - z = radius * sin(lat) - x = radius * cos(lon) * cos(lat) - y = radius * sin(lon) * cos(lat) - -end subroutine convert_lx - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE CONVERT_XL -! -! Convert (x, y, z) to a (lat, lon) location on a sphere with -! radius sqrt(x^2 + y^2 + z^2). -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine convert_xl(x, y, z, lat,lon) - - implicit none - - real, intent(in) :: x, y, z - real, intent(out) :: lat, lon - - real :: dl, clat, pii, rtod - real :: eps - parameter (eps=1.e-10) - - pii = 2.*asin(1.0) - rtod=180./pii - dl = sqrt(x*x + y*y + z*z) - - lat = asin(z/dl) - -! check for being close to either pole - - if (abs(x) > eps) then - - if (abs(y) > eps) then - - lon = atan(abs(y/x)) - - if ((x <= 0.) .and. (y >= 0.)) then - lon = pii-lon - else if ((x <= 0.) .and. (y < 0.)) then - lon = lon+pii - else if ((x >= 0.) .and. (y <= 0.)) then - lon = 2*pii-lon - end if - - else ! we're either on longitude 0 or 180 - - if (x > 0) then - lon = 0. - else - lon = pii - end if - - end if - - else if (abs(y) > eps) then - - if (y > 0) then - lon = pii/2. - else - lon = 3.*pii/2. - end if - - else ! we are at a pole - - lon = 0. - - end if - -end subroutine convert_xl - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine transform_from_lonlat_to_xyz(xin, yin, zin, ulon, ulat, ux, uy, uz) -! -! transform vector measured in latitude/longitude space to a vector measured in x,y,z -! -! INTENT(IN) -! xin = x position -! yin = y position -! zin = z position -! ulon = east component of vector -! ulat = north component of vector -! -! INTENT(OUT) -! ux = x component of vector -! uy = y component of vector -! uz = z component of vector -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - -implicit none -real, intent(in) :: xin, yin, zin, ulon, ulat -real, intent(out) :: ux, uy, uz -real :: h(3,3), p(3), q(3), g(3), X1(3,3), X2(3,3), trans_X2_to_X1(3,3), r -integer :: i,j,k -logical :: l_Pole -real, parameter :: epsvt = 1.0e-10 - -!----------------------------------------------------------------------- -! define the e1, e2, and e3 directions -!----------------------------------------------------------------------- - X1(1,1) = 1.0; X1(1,2) = 0.0; X1(1,3) = 0.0 - X1(2,1) = 0.0; X1(2,2) = 1.0; X1(2,3) = 0.0 - X1(3,1) = 0.0; X1(3,2) = 0.0; X1(3,3) = 1.0 - -!----------------------------------------------------------------------- -! find the vectors (measured in X1) that point in the local -! east (h(1,:)), north (h(2,:)), and vertical (h(3,:)) direction -!----------------------------------------------------------------------- - h(3,1) = xin; h(3,2) = yin; h(3,3) = zin - call unit_vector_in_3space(h(3,:)) - -!----------------------------------------------------------------------- -! g(:) is a work array and holds the vector pointing to the North Pole. -! measured in X1 -!----------------------------------------------------------------------- - g(:) = X1(3,:) - -!----------------------------------------------------------------------- -! determine if the local vertical hits a pole -!----------------------------------------------------------------------- - l_Pole = .false. - r = g(1)*h(3,1) + g(2)*h(3,2) + g(3)*h(3,3) - r = abs(r) + epsvt - if(r.gt.1.0) then - l_Pole = .true. - h(3,:) = h(3,:) + epsvt - call unit_vector_in_3space(h(3,:)) - endif - -!----------------------------------------------------------------------- -! find the vector that is perpendicular to the local vertical vector -! and points in the direction of of the North pole, this defines the local -! north direction. measured in X1 -!----------------------------------------------------------------------- - call vector_on_tangent_plane ( h(3,:), g(:), h(2,:) ) - -!----------------------------------------------------------------------- -! take the cross product of the local North direction and the local vertical -! to find the local east vector. still in X1 -!----------------------------------------------------------------------- - call cross_product_in_3space ( h(2,:), h(3,:), h(1,:) ) - -!----------------------------------------------------------------------- -! put these 3 vectors into a matrix X2 -!----------------------------------------------------------------------- - X2(1,:) = h(1,:) ! local east (measured in X1) - X2(2,:) = h(2,:) ! local north (measured in X1) - X2(3,:) = h(3,:) ! local vertical (measured in X1) - -!----------------------------------------------------------------------- -! compute the transformation matrix -!----------------------------------------------------------------------- - trans_X2_to_X1(:,:) = matmul(X1,transpose(X2)) - -!----------------------------------------------------------------------- -! transform (ulon, ulat) into (x,y,z) -!----------------------------------------------------------------------- - p(1) = ulon; p(2) = ulat; p(3) = 0 - g(:) = matmul(trans_X2_to_X1(:, :), p(:)) - ux = g(1); uy = g(2); uz = g(3) - -end subroutine transform_from_lonlat_to_xyz - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine transform_from_xyz_to_lonlat(xin, yin, zin, ux, uy, uz, ulon, ulat) -! -! transform vector measured in x,y,z space to a vector measured in latitude/longitude space -! -! INTENT(IN) -! xin = x position -! yin = y position -! zin = z position -! ux = x component of vector -! uy = y component of vector -! uz = z component of vector -! -! INTENT(OUT) -! ulon = east component of vector -! ulat = north component of vector -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - -implicit none -real, intent(in) :: xin, yin, zin, ux, uy, uz -real, intent(out) :: ulon, ulat -real :: h(3,3), p(3), q(3), g(3), X1(3,3), X2(3,3), trans_X1_to_X2(3,3), r -integer :: i,j,k -logical :: l_Pole -real, parameter :: epsvt = 1.0e-10 - -!----------------------------------------------------------------------- -! define the e1, e2, and e3 directions -!----------------------------------------------------------------------- - X1(1,1) = 1.0; X1(1,2) = 0.0; X1(1,3) = 0.0 - X1(2,1) = 0.0; X1(2,2) = 1.0; X1(2,3) = 0.0 - X1(3,1) = 0.0; X1(3,2) = 0.0; X1(3,3) = 1.0 - -!----------------------------------------------------------------------- -! find the vectors (measured in X1) that point in the local -! east (h(1,:)), north (h(2,:)), and vertical (h(3,:)) direction -!----------------------------------------------------------------------- - h(3,1) = xin; h(3,2) = yin; h(3,3) = zin - call unit_vector_in_3space(h(3,:)) - -!----------------------------------------------------------------------- -! g(:) is a work array and holds the vector pointing to the North Pole. -! measured in X1 -!----------------------------------------------------------------------- - g(:) = X1(3,:) - -!----------------------------------------------------------------------- -! determine if the local vertical hits a pole -!----------------------------------------------------------------------- - l_Pole = .false. - r = g(1)*h(3,1) + g(2)*h(3,2) + g(3)*h(3,3) - r = abs(r) + epsvt - if(r.gt.1.0) then - l_Pole = .true. - h(3,:) = h(3,:) + epsvt - call unit_vector_in_3space(h(3,:)) - endif - -!----------------------------------------------------------------------- -! find the vector that is perpendicular to the local vertical vector -! and points in the direction of of the North pole, this defines the local -! north direction. measured in X1 -!----------------------------------------------------------------------- - call vector_on_tangent_plane ( h(3,:), g(:), h(2,:) ) - -!----------------------------------------------------------------------- -! take the cross product of the local North direction and the local vertical -! to find the local east vector. still in X1 -!----------------------------------------------------------------------- - call cross_product_in_3space ( h(2,:), h(3,:), h(1,:) ) - -!----------------------------------------------------------------------- -! put these 3 vectors into a matrix X2 -!----------------------------------------------------------------------- - X2(1,:) = h(1,:) ! local east (measured in X1) - X2(2,:) = h(2,:) ! local north (measured in X1) - X2(3,:) = h(3,:) ! local vertical (measured in X1) - -!----------------------------------------------------------------------- -! compute the transformation matrix -!----------------------------------------------------------------------- - trans_X1_to_X2(:,:) = matmul(X2,transpose(X1)) - -!----------------------------------------------------------------------- -! transform (ulon, ulat) into (x,y,z) -!----------------------------------------------------------------------- - p(1) = ux; p(2) = uy; p(3) = uz - g(:) = matmul(trans_X1_to_X2(:, :), p(:)) - ulon = g(1); ulat= g(2); - -end subroutine transform_from_xyz_to_lonlat - -!====================================================================== -! BEGINNING OF UNIT_VECTOR_IN_3SPACE -!====================================================================== - subroutine unit_vector_in_3space (p_1) - -!----------------------------------------------------------------------- -! PURPOSE : normalize p_1 to unit length and overwrite p_1 -!----------------------------------------------------------------------- - -!----------------------------------------------------------------------- -! intent(inout) -!----------------------------------------------------------------------- - real , intent(inout) :: & - p_1 (:) - -!----------------------------------------------------------------------- -! local -!----------------------------------------------------------------------- - real :: length - - length = SQRT (p_1(1)**2 + p_1(2)**2 + p_1(3)**2 ) - length = 1.0/length - p_1(1) = p_1(1)*length - p_1(2) = p_1(2)*length - p_1(3) = p_1(3)*length - - end subroutine unit_vector_in_3space -!====================================================================== -! END OF UNIT_VECTOR_IN_3SPACE -!====================================================================== - -!====================================================================== -! BEGINNING OF CROSS_PRODUCT_IN_3SPACE -!====================================================================== - subroutine cross_product_in_3space(p_1,p_2,p_out) - -!----------------------------------------------------------------------- -! PURPOSE: compute p_1 cross p_2 and place in p_out -!----------------------------------------------------------------------- - -!----------------------------------------------------------------------- -! intent(in) -!----------------------------------------------------------------------- - real , intent(in) :: & - p_1 (:), & - p_2 (:) - -!----------------------------------------------------------------------- -! intent(out) -!----------------------------------------------------------------------- - real , intent(out) :: & - p_out (:) - - p_out(1) = p_1(2)*p_2(3)-p_1(3)*p_2(2) - p_out(2) = p_1(3)*p_2(1)-p_1(1)*p_2(3) - p_out(3) = p_1(1)*p_2(2)-p_1(2)*p_2(1) - - end subroutine cross_product_in_3space -!====================================================================== -! END OF CROSS_PRODUCT_IN_3SPACE -!====================================================================== - -!====================================================================== -! BEGINNING OF VECTOR_ON_TANGENT_PLANE -!====================================================================== - subroutine vector_on_tangent_plane(p_1, p_2, p_out) - -!----------------------------------------------------------------------- -! PURPOSE : given two points measured in (x,y,z) and lying on -! the unit sphere, find the vector (p_out) that lies on the plane -! perpendicular to the p_1 vector and points in the direction of -! the projection of p_2 onto the tangent plane. -! -! NOTE : p_1 and p_2 are assumed to be of unit length -! NOTE : p_out is normalized to unit length -!----------------------------------------------------------------------- - -!----------------------------------------------------------------------- -! intent(in) -!----------------------------------------------------------------------- - real , intent(in) :: & - p_1 (:), & - p_2 (:) - -!----------------------------------------------------------------------- -! intent(out) -!----------------------------------------------------------------------- - real , intent(out) :: & - p_out (:) - -!----------------------------------------------------------------------- -! local -!----------------------------------------------------------------------- - real :: & - work (3), t1(3), t2(3) - -! work (1) = - p_1(2) * ( -p_1(2) * p_2(1) + p_1(1) * p_2(2) ) & -! + p_1(3) * ( p_1(3) * p_2(1) - p_1(1) * p_2(3) ) - -! work (2) = + p_1(1) * ( -p_1(2) * p_2(1) + p_1(1) * p_2(2) ) & -! - p_1(3) * ( -p_1(3) * p_2(2) + p_1(2) * p_2(3) ) - -! work (3) = - p_1(1) * ( p_1(3) * p_2(1) - p_1(1) * p_2(3) ) & -! + p_1(2) * ( -p_1(3) * p_2(2) + p_1(2) * p_2(3) ) - - - t1(:) = p_2(:) - p_1(:) - t2(:) = p_1 - - call unit_vector_in_3space (t1) - call unit_vector_in_3space (t2) - - call cross_product_in_3space(t1(:), t2(:), work(:)) - call unit_vector_in_3space (work) - call cross_product_in_3space(t2(:),work(:),p_out(:)) - call unit_vector_in_3space (p_out) - - end subroutine vector_on_tangent_plane -!====================================================================== -! END OF VECTOR_ON_TANGENT_PLANE -!====================================================================== - -end module utilities From 62785d68cf3e6d93b01cca70bc90c52d027cf7db Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:38:50 -0600 Subject: [PATCH 008/180] Remove grid_gen/global_scvt --- grid_gen/global_scvt/INSTALL | 6 - grid_gen/global_scvt/Makefile | 35 - grid_gen/global_scvt/README | 52 - grid_gen/global_scvt/centroids.162.dat | 163 - grid_gen/global_scvt/convergence | 1 - grid_gen/global_scvt/dx/README | 17 - grid_gen/global_scvt/dx/example.tiff | Bin 692302 -> 0 bytes grid_gen/global_scvt/dx/mesh.cfg | 142 - grid_gen/global_scvt/dx/mesh.net | 852 --- grid_gen/global_scvt/dx/topography.dx | 235 - grid_gen/global_scvt/dx/vor.area.data | 162 - grid_gen/global_scvt/dx/vor.edge.data | 960 --- grid_gen/global_scvt/dx/vor.face.data | 162 - grid_gen/global_scvt/dx/vor.loop.data | 162 - grid_gen/global_scvt/dx/vor.position.data | 960 --- grid_gen/global_scvt/dx/voronoi.dx | 25 - grid_gen/global_scvt/locs.dat | 163 - grid_gen/global_scvt/namelist.input | 9 - grid_gen/global_scvt/refine/grid_ref.f | 58 - grid_gen/global_scvt/refine/scvt.m | 36 - grid_gen/global_scvt/refine/svtgen.f | 6729 ----------------- grid_gen/global_scvt/runit.csh | 26 - grid_gen/global_scvt/src/Makefile | 39 - grid_gen/global_scvt/src/STRIPACK.f | 6706 ---------------- grid_gen/global_scvt/src/grid_gen.F | 157 - grid_gen/global_scvt/src/module_data_types.F | 206 - .../global_scvt/src/module_grid_constants.F | 11 - .../global_scvt/src/module_grid_gen_utils.F | 309 - grid_gen/global_scvt/src/module_grid_meta.F | 1470 ---- grid_gen/global_scvt/src/module_grid_params.F | 43 - grid_gen/global_scvt/src/module_scvt.F | 276 - .../global_scvt/src/module_sphere_utilities.F | 959 --- .../global_scvt/src/module_voronoi_utils.F | 113 - .../global_scvt/src/module_write_netcdf.F | 646 -- 34 files changed, 21890 deletions(-) delete mode 100644 grid_gen/global_scvt/INSTALL delete mode 100644 grid_gen/global_scvt/Makefile delete mode 100644 grid_gen/global_scvt/README delete mode 100644 grid_gen/global_scvt/centroids.162.dat delete mode 100644 grid_gen/global_scvt/convergence delete mode 100644 grid_gen/global_scvt/dx/README delete mode 100644 grid_gen/global_scvt/dx/example.tiff delete mode 100644 grid_gen/global_scvt/dx/mesh.cfg delete mode 100644 grid_gen/global_scvt/dx/mesh.net delete mode 100644 grid_gen/global_scvt/dx/topography.dx delete mode 100644 grid_gen/global_scvt/dx/vor.area.data delete mode 100644 grid_gen/global_scvt/dx/vor.edge.data delete mode 100644 grid_gen/global_scvt/dx/vor.face.data delete mode 100644 grid_gen/global_scvt/dx/vor.loop.data delete mode 100644 grid_gen/global_scvt/dx/vor.position.data delete mode 100644 grid_gen/global_scvt/dx/voronoi.dx delete mode 100644 grid_gen/global_scvt/locs.dat delete mode 100644 grid_gen/global_scvt/namelist.input delete mode 100644 grid_gen/global_scvt/refine/grid_ref.f delete mode 100644 grid_gen/global_scvt/refine/scvt.m delete mode 100644 grid_gen/global_scvt/refine/svtgen.f delete mode 100644 grid_gen/global_scvt/runit.csh delete mode 100644 grid_gen/global_scvt/src/Makefile delete mode 100644 grid_gen/global_scvt/src/STRIPACK.f delete mode 100644 grid_gen/global_scvt/src/grid_gen.F delete mode 100644 grid_gen/global_scvt/src/module_data_types.F delete mode 100644 grid_gen/global_scvt/src/module_grid_constants.F delete mode 100644 grid_gen/global_scvt/src/module_grid_gen_utils.F delete mode 100644 grid_gen/global_scvt/src/module_grid_meta.F delete mode 100644 grid_gen/global_scvt/src/module_grid_params.F delete mode 100644 grid_gen/global_scvt/src/module_scvt.F delete mode 100644 grid_gen/global_scvt/src/module_sphere_utilities.F delete mode 100644 grid_gen/global_scvt/src/module_voronoi_utils.F delete mode 100644 grid_gen/global_scvt/src/module_write_netcdf.F diff --git a/grid_gen/global_scvt/INSTALL b/grid_gen/global_scvt/INSTALL deleted file mode 100644 index 84c621a0c..000000000 --- a/grid_gen/global_scvt/INSTALL +++ /dev/null @@ -1,6 +0,0 @@ -1) Edit the Makefile to set appropriate compilers and compiler flags. - The main loop of the Lloyd iteration code is parallelized with OpenMP, - so if desired, OpenMP flags may be specified in FFLAGS and LDFLAGS. - -2) Run 'make' to create an executable, grid_gen, in the src directory, as - well as a symbolic link to the executable in the top-level directory. diff --git a/grid_gen/global_scvt/Makefile b/grid_gen/global_scvt/Makefile deleted file mode 100644 index aaef05ca9..000000000 --- a/grid_gen/global_scvt/Makefile +++ /dev/null @@ -1,35 +0,0 @@ -#FC = ifort -#FFLAGS = -FR -m64 -O3 -fast -ipo -openmp -#F77FLAGS = -FI -m64 -O3 -fast -ipo -openmp -#CPPFLAGS = -DRKIND=8 -#PROMOTION = -r8 -#LDFLAGS = -m64 -O3 -fast -ipo -openmp - -FC = gfortran -FFLAGS = -ffree-form -O3 -fopenmp -ffree-line-length-none -F77FLAGS = -ffixed-form -O3 -fopenmp -fsecond-underscore -CPPFLAGS = -DRKIND=8 -PROMOTION = -fdefault-real-8 -LDFLAGS = -O3 -fopenmp - -#FC = pgf90 -#FFLAGS = -Mfree -O3 -mp -byteswapio -#F77FLAGS = -O3 -byteswapio -#CPPFLAGS = -DRKIND=8 -#PROMOTION = -r8 -#LDFLAGS = -O3 -mp -byteswapio - -all: grid_gen grid_ref - -grid_gen: - ( cd src; make FC="$(FC)" FFLAGS="$(FFLAGS)" F77FLAGS="$(F77FLAGS)" CPPFLAGS="$(CPPFLAGS)" PROMOTION="$(PROMOTION)" LDFLAGS="$(LDFLAGS)" ) - if [ ! -e grid_gen ]; then ln -s src/grid_gen .; fi - -grid_ref: - $(FC) refine/grid_ref.f refine/svtgen.f -o refine/grid_ref - if [ ! -e grid_ref ]; then ln -s refine/grid_ref .; fi - -clean: - ( cd src; make clean ) - rm -f grid_gen grid_ref refine/grid_ref - diff --git a/grid_gen/global_scvt/README b/grid_gen/global_scvt/README deleted file mode 100644 index 518827a7b..000000000 --- a/grid_gen/global_scvt/README +++ /dev/null @@ -1,52 +0,0 @@ -The grid_gen utility converts a set of generating points into a complete MPAS -input file, grid.nc. Additionally, grid_gen creates a graph description file -suitable for use with METIS, plots of the initial Delaunay triangulation and -final Delaunay triangulation (after any further adjustment by grid_gen), and a -list of the final generating points. - - - -Input files: - - namelist.input - a Fortran namelist with the following variables - np - the number of generating points in the locs.dat input file - locs_as_xyz - whether the generating points are given as (x,y,z) - coordinates or as (lat,lon) coordinates - n_scvt_iterations - the number of Lloyd iterations to perform, - beginning with the generating points in the locs.dat file, - using the density function specified programmatically in - src/module_scvt.F - - locs.dat - a list of generating points, either given as (lat,lon) - coordinates or as (x,y,z) coordinates; the coorinates used in the - file should be indicated to grid_gen using the locs_as_xyz logical - value in the namelist.input file. - - The format of the file for (x,y,z) coordinates has a header line - giving the number of generating points and the maximum degree of any - node in the Delaunay triangulation, followed by np lines with the - format '(10x,f22.10,f23.10,f23.10)' giving the x, y, and z - coordinates (on the unit sphere) of each generating point. - - The format of the file for (lat,lon) coordinates has np lines with - the format '(f13.10,1x,f13.10)' giving the latitude and longitude - coorinates of each generating point. - - - -Output files: - - scvt_initial.ps - a plot of the Delaunay triangulation of the generating - points specified in the locs.dat file - - scvt_final.ps - a plot of the Delaunay triangulation of the final - generating points, after adjustment by the number of Lloyd iterations - specified in the namelist.input file - - locs.dat.out - a list of the final generating points, after adjustment by - the number of Lloyd iterations specified in the namelist.input file - - graph.info - a description of the output mesh in a format suitable for use - with METIS to produce graph decomposition files for MPAS - - grid.nc - an MPAS input file diff --git a/grid_gen/global_scvt/centroids.162.dat b/grid_gen/global_scvt/centroids.162.dat deleted file mode 100644 index 90f85cf58..000000000 --- a/grid_gen/global_scvt/centroids.162.dat +++ /dev/null @@ -1,163 +0,0 @@ - 162 162 - 1 -0.8909593038 -0.0786862054 0.4472135955 - 2 -0.2004865378 -0.8716680264 0.4472135955 - 3 0.0000000000 0.0000000000 1.0000000000 - 4 0.8909593038 0.0786862054 -0.4472135955 - 5 0.2004865378 0.8716680264 -0.4472135955 - 6 0.3501565946 -0.8230372770 -0.4472135955 - 7 0.7670518092 -0.4600342618 0.4472135955 - 8 -0.7670518092 0.4600342618 -0.4472135955 - 9 0.0000000000 0.0000000000 -1.0000000000 - 10 0.6745506270 0.5873512166 0.4472135955 - 11 -0.6745506270 -0.5873512166 -0.4472135955 - 12 -0.3501565946 0.8230372770 0.4472135955 - 13 -0.7295096215 0.4375185824 0.5257311121 - 14 -0.9745544804 0.2241507635 0.0000000000 - 15 -0.9201836497 -0.3914869741 0.0000000000 - 16 -0.6415357694 -0.5586042019 0.5257311121 - 17 -0.5236927392 -0.0462505911 0.8506508084 - 18 -0.5143339407 -0.8575899938 0.0000000000 - 19 0.0879738521 -0.9961227843 0.0000000000 - 20 0.3330187110 -0.7827549654 0.5257311121 - 21 -0.1178430302 -0.5123536108 0.8506508084 - 22 0.3964909105 0.3452363830 0.8506508084 - 23 -0.2058168823 0.4837691735 0.8506508084 - 24 0.4508617412 -0.2704013546 0.8506508084 - 25 0.9201836497 0.3914869741 0.0000000000 - 26 0.9745544804 -0.2241507635 0.0000000000 - 27 0.7295096215 -0.4375185824 -0.5257311121 - 28 0.5236927392 0.0462505911 -0.8506508084 - 29 0.6415357694 0.5586042019 -0.5257311121 - 30 0.5143339407 0.8575899938 0.0000000000 - 31 0.1178430302 0.5123536108 -0.8506508084 - 32 -0.3330187110 0.7827549654 -0.5257311121 - 33 -0.0879738521 0.9961227843 0.0000000000 - 34 -0.1906740282 -0.8290055565 -0.5257311121 - 35 0.2058168823 -0.4837691735 -0.8506508084 - 36 0.6566786235 -0.7541705281 0.0000000000 - 37 0.8473526517 0.0748350284 0.5257311121 - 38 -0.4508617412 0.2704013546 -0.8506508084 - 39 -0.8473526517 -0.0748350284 -0.5257311121 - 40 -0.6566786235 0.7541705281 0.0000000000 - 41 -0.3964909105 -0.3452363830 -0.8506508084 - 42 0.1906740282 0.8290055565 0.5257311121 - 43 -0.8434663484 0.1827424714 0.5051432551 - 44 -0.9690454298 0.0733970446 0.2357198099 - 45 -0.9411818011 -0.2421011948 0.2357198099 - 46 -0.7983820501 -0.3277444034 0.5051432551 - 47 -0.7379905791 -0.0651765777 0.6716561016 - 48 -0.1660650217 -0.7220114194 0.6716561016 - 49 -0.4344438541 -0.7457136377 0.5051432551 - 50 -0.3692562437 -0.8989360365 0.2357198099 - 51 -0.0605892524 -0.9699304685 0.2357198099 - 52 0.0649898290 -0.8605850417 0.5051432551 - 53 0.2310548507 -0.1385736223 0.9630218103 - 54 0.2031912220 0.1769246171 0.9630218103 - 55 -0.1054757693 0.2479190491 0.9630218103 - 56 -0.2683788324 -0.0237022183 0.9630218103 - 57 -0.0603914710 -0.2625678257 0.9630218103 - 58 0.7983820501 0.3277444034 -0.5051432551 - 59 0.9411818011 0.2421011948 -0.2357198099 - 60 0.9690454298 -0.0733970446 -0.2357198099 - 61 0.8434663484 -0.1827424714 -0.5051432551 - 62 0.7379905791 0.0651765777 -0.6716561016 - 63 0.3692562437 0.8989360365 -0.2357198099 - 64 0.4344438541 0.7457136377 -0.5051432551 - 65 0.1660650217 0.7220114194 -0.6716561016 - 66 -0.0649898290 0.8605850417 -0.5051432551 - 67 0.0605892524 0.9699304685 -0.2357198099 - 68 0.0868470176 -0.8586546962 -0.5051432551 - 69 0.2900382396 -0.6817300791 -0.6716561016 - 70 0.5584170720 -0.6580278608 -0.5051432551 - 71 0.5210930903 -0.8203037014 -0.2357198099 - 72 0.2296467686 -0.9442979047 -0.2357198099 - 73 0.7408325207 -0.6289700689 0.2357198099 - 74 0.9037355838 -0.3573488014 0.2357198099 - 75 0.8385479733 -0.2041264026 0.5051432551 - 76 0.6353567514 -0.3810510197 0.6716561016 - 77 0.5749652804 -0.6436188454 0.5051432551 - 78 -0.5749652804 0.6436188454 -0.5051432551 - 79 -0.6353567514 0.3810510197 -0.6716561016 - 80 -0.8385479733 0.2041264026 -0.5051432551 - 81 -0.9037355838 0.3573488014 -0.2357198099 - 82 -0.7408325207 0.6289700689 -0.2357198099 - 83 0.0603914710 0.2625678257 -0.9630218103 - 84 0.2683788324 0.0237022183 -0.9630218103 - 85 0.1054757693 -0.2479190491 -0.9630218103 - 86 -0.2031912220 -0.1769246171 -0.9630218103 - 87 -0.2310548507 0.1385736223 -0.9630218103 - 88 0.4532613197 0.7344279869 0.5051432551 - 89 0.5587370890 0.4865089377 0.6716561016 - 90 0.7897919397 0.3479353154 0.5051432551 - 91 0.8271159214 0.5102111560 0.2357198099 - 92 0.6191285600 0.7490767634 0.2357198099 - 93 -0.6191285600 -0.7490767634 -0.2357198099 - 94 -0.8271159214 -0.5102111560 -0.2357198099 - 95 -0.7897919397 -0.3479353154 -0.5051432551 - 96 -0.5587370890 -0.4865089377 -0.6716561016 - 97 -0.4532613197 -0.7344279869 -0.5051432551 - 98 -0.2296467686 0.9442979047 0.2357198099 - 99 -0.5210930903 0.8203037014 0.2357198099 - 100 -0.5584170720 0.6580278608 0.5051432551 - 101 -0.2900382396 0.6817300791 0.6716561016 - 102 -0.0868470176 0.8586546962 0.5051432551 - 103 -0.7389477008 0.6174931387 0.2695524424 - 104 -0.8927233180 0.3610907895 0.2695524424 - 105 -0.6485012287 0.2146202028 0.7303316540 - 106 -0.4947256115 0.4710225519 0.7303316540 - 107 -0.9607746082 0.0652198824 -0.2695524424 - 108 -0.9960073301 -0.0879636556 -0.0152247717 - 109 -0.8574905961 0.5142743275 0.0152247717 - 110 -0.7540831171 -0.6566025121 0.0152247717 - 111 -0.8156182708 -0.5119651523 0.2695524424 - 112 -0.9344721764 -0.2326010154 -0.2695524424 - 113 -0.4045138430 -0.5504400294 0.7303316540 - 114 -0.6008476888 -0.3249580433 0.7303316540 - 115 -0.6192844249 -0.7374471383 0.2695524424 - 116 -0.3951596413 0.2369943877 0.8875147988 - 117 -0.3475061014 -0.3025838584 0.8875147988 - 118 -0.3589234758 -0.8935968998 -0.2695524424 - 119 -0.2241247837 -0.9744415260 -0.0152247717 - 120 -0.0675510718 -0.9606135193 -0.2695524424 - 121 0.3914415994 -0.9200769971 0.0152247717 - 122 0.2348678876 -0.9339050038 0.2695524424 - 123 0.5099844947 -0.8168581860 0.2695524424 - 124 0.3984979249 -0.5548108497 0.7303316540 - 125 0.1233813177 -0.6718576676 0.7303316540 - 126 0.1803890593 -0.4240014966 0.8875147988 - 127 0.4589926712 0.0405365222 0.8875147988 - 128 0.6507991050 0.2075480669 0.7303316540 - 129 0.2950904458 0.6160659899 0.7303316540 - 130 0.1032840121 0.4490544452 0.8875147988 - 131 0.0037180419 0.6830826094 0.7303316540 - 132 0.6771015368 -0.0902728309 0.7303316540 - 133 0.9344721764 0.2326010154 0.2695524424 - 134 0.9960073301 0.0879636556 0.0152247717 - 135 0.8156182708 0.5119651523 -0.2695524424 - 136 0.7540831171 0.6566025121 -0.0152247717 - 137 0.8574905961 -0.5142743275 -0.0152247717 - 138 0.8927233180 -0.3610907895 -0.2695524424 - 139 0.9607746082 -0.0652198824 0.2695524424 - 140 0.4947256115 -0.4710225519 -0.7303316540 - 141 0.6485012287 -0.2146202028 -0.7303316540 - 142 0.7389477008 -0.6174931387 -0.2695524424 - 143 0.3475061014 0.3025838584 -0.8875147988 - 144 0.6008476888 0.3249580433 -0.7303316540 - 145 0.3951596413 -0.2369943877 -0.8875147988 - 146 0.6192844249 0.7374471383 -0.2695524424 - 147 0.4045138430 0.5504400294 -0.7303316540 - 148 0.2241247837 0.9744415260 0.0152247717 - 149 0.3589234758 0.8935968998 0.2695524424 - 150 -0.1803890593 0.4240014966 -0.8875147988 - 151 -0.1233813177 0.6718576676 -0.7303316540 - 152 -0.3984979249 0.5548108497 -0.7303316540 - 153 -0.5099844947 0.8168581860 -0.2695524424 - 154 -0.2348678876 0.9339050038 -0.2695524424 - 155 -0.3914415994 0.9200769971 -0.0152247717 - 156 0.0675510718 0.9606135193 0.2695524424 - 157 -0.2950904458 -0.6160659899 -0.7303316540 - 158 -0.0037180419 -0.6830826094 -0.7303316540 - 159 -0.1032840121 -0.4490544452 -0.8875147988 - 160 -0.4589926712 -0.0405365222 -0.8875147988 - 161 -0.6771015368 0.0902728309 -0.7303316540 - 162 -0.6507991050 -0.2075480669 -0.7303316540 diff --git a/grid_gen/global_scvt/convergence b/grid_gen/global_scvt/convergence deleted file mode 100644 index dbb9c0147..000000000 --- a/grid_gen/global_scvt/convergence +++ /dev/null @@ -1 +0,0 @@ - eps = 1.0e-10 diff --git a/grid_gen/global_scvt/dx/README b/grid_gen/global_scvt/dx/README deleted file mode 100644 index 27a14cd79..000000000 --- a/grid_gen/global_scvt/dx/README +++ /dev/null @@ -1,17 +0,0 @@ -NOTES: -Date files in the directory are automatically generated by running "grid_gen" -The mesh visualized here is consistent with the "locs.dat.out" file generated by grid_gen - -To Run OpenDX: -1. Install OpenDX -2. From this directory, type "dx" -3. Select edit visual program -4. Open mesh.net -5. Under Execute, select Execute on Change -6. Under Window, Open Control Panel by Name, select Main Panel -7. To save an image, from the image panel select Save Image under File - -NOTE: -You can keep OpenDX running and load new mesh generated by grid_gen by: -1. Select reset server under connect -2. Select execute on change diff --git a/grid_gen/global_scvt/dx/example.tiff b/grid_gen/global_scvt/dx/example.tiff deleted file mode 100644 index 5447b4d41b8d36948d51655d8457d32fd496e9df..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 692302 zcmeIbU+8sPch|R{bKdjkJv5l2*2FgD1*N?9A`Ll#yz3K9e%trtoE2nwQsUK~H`H=i@lI_u9| zV~#QA+H3FMcR912d#*9QW9;u*`?Kd+&-0%1UEg*4*KZ&F$X~hXkOOi+4#)vHAP3}t z9FPNYKn}-x$8IUom~)qy$p_T~*}TTa~0Xc9%2MVAf&sE&Hl%6y5QRe0OOnsfBUXN}rc!#ScIq<3+s6=v;IhgpMVCM&@ zQhhzCt}+qlU)2{%A65?B-GRcNAerOoPs#Q%%iAZr!Sr$DfE+l}fl8G)lYBs>&@IVgC zDd)y=B(XN&26Zs8Hc%#Z)Gre4Z@c&$4=#;8&^@VE<$xTBI8eUw|IwehmBHHxqI+{I z34>%#Pp^#~PJZXVar2F$9EY}Dp;V0AEy|Sxa^U(7ly7>a`Uz}bb26RDiGpcawL;rCL23k$6E*43jcZ3@@zxIKcx0(~ z&3jc{$pJZFaiDyQ_h7zWFY)tNBe9KPC?`UAkQ7Zi5FJb@p2xXuS11)NcadV`fE>7p z1LbSW>zdw;72=JEc85`75RemPX)*|EcRHmRc0R`=OT~-aw`xNU$bmfv%2%}k@Xb>t z%S~*1B83tmJVJ^N-DJ*6q=z`P?F!}U@7+;amjiO(z7CXc=q9?`izM3N9E_ya2p}hF z(qR^&BRb`3+W8!hEEVs2539NykOS3$@|EPZ4e-6QBnENal}@=t4v&zc!w8deb7_VV z=eAv;RIGQGCUQUyobN#S8ty^cvW`L*nS;TU8$sh-O`6Q*b6l-lXgi;Sk)_=E_p>U> z0Xa~6e7K3 zj>l6poyNI>bU292S+aD0g01Z*#ZIYWmWkOLze;LEj#Yx_DP4ko9hP&ysO>4G#ll*x(W zv|5J|*Ii+VSGF@LwMN|7(v$ z^ZDpv_+BuAbGB;(|C;%u&pyZjzGZv3wyz`NV{%p&4dkLYU5yS$136KjZY$dt6He8! zhwj656xM97gdcRPOI8lN9tZe}*#W*6iAWZr-8%G;PUBobI!t8mP$^nn@OIpA}Eug@N??dyoRft-~{54jv3p+=KKft)B#t9vkStdmto#rWog zur}L8c;7c#S#kge_|z88r|7M~+Jv@wFlxtcVYh~J@7wAx^}oK7P#Wio)2#*I#+Y)e)K^f?+|G6)ORnYK zR*W2YwGQw#G19vchDair<5KjKPUBobI?ToII5k=|LfaRHKBJ56bpubL4mltP)(-I1 zXb{_Y3XwRC6BX!(k&EI)wbCB1<7k|#OshUHkZRA)abK|((Ygix$#b}4mltPaDeYZ zgV@GXhy-bzl};y&LiPD;S?4uEW?ybC&7|^tJt<)$n4G0ahi+W=CQ{3X_p*T}U56Za z%?|JtFw(mbhDa8f-8%G8#+>Jy^AhHxv@xqTQzmPSH&#kY7L5~iXsnmd!FVdV@m>i3 zn!nom?627YzB>(K8&4tP269Rw4KOw_-dHSIyc%(R7*++DG|+84g-9ZnQ>17q$ZAJA zMT2-pL5Cc8Jq~cgGmP}!3o9ZieD;Zy1{wgjoGK!R=-vz{x#iUeZF^XeO~G=C2>m$V z98aNc1n*@7kBAOA@bnIFpYP$?xQ+tjkN)*{7)*R*c4w99yjsw0jH#NN44*I7NG5hr zyCX}v6gUS(=p+tmp8}S`teeu2#L`s|&cSqg@R2zsi|WGrYX+A-d)ooNAe+RtEEc>}p06h*zz*uJ$Rh5; zwIi#PE3XkI=jPH+kk9UPN~a(?B0>+sd;1Hnv#-+uz8V&WuVYALtYN;nLSz@T+rgzY zuMR#3Qz)8C<`lJJh#S%FMCwMcoRv!(!u#vQmp=8L1AHmg&^}x#B-vzmW3gnlp?!F^ zhy?i@NvDAn8jlbu=D0!aNTgyGpMyGdBE0W?&9(m89N_D4i1Bp*Sq*?6o+6!b$nbSQ zNsM4QC6_ivU^!8<807%9BZXQ)CMTxTjPU;2Ag0f|aG-qs*W7Nel&nK~TLO?;(|dEJ z6y-IA%yAJqN#yW2sp6Xh*Y*@jB$7Erik5`;#g|=gUylRa|3CBcuM}X?hIu_pB*Ox2 zdrZ;Xv0dX29D$vq68jnz>;VxDe&%0hzn4Bm=^TPYY2e{kk zJl~v`FKK!8QQDnF(R3823)18u{0^0+Z9AylkyObdbBYfAFd~E}U56aFxC7j!n=oH5 z66xl1B$2x5C{7oo$)OM)Bu?9IVs|A{C-3@2wCYL9{QI zqUjtSAxNKt@H%!jr2*4qVNFa#!zRe7#P*56;0X z>W)Bht|m>6Bk(w7+79BnH=T+>ET^Q?MqzuL@JQ*91J`kY8*)wW#tQL4MElaIn+xGV zYV%Td*uePaDN=lJ4rWnz5RG$X z>2n-`$0^fxoYg)BO1sG%Po(;q?X0lQa1fp>9dh8l4sa80(z~%pN)XZRbV}z!c#s-> zjwA3OWqS5uIV+0>;*@qMPp@;u%K56RATEoZ~>bGdIzFxJYgg(e89gkKphKO!9+@rfbbwqx*bR0 zLCW-;OXd_cy0RnM7fFdVw~Z_0GAx8AOotp8=Kwd@CbTV!=9N^ZnFnlkD^az}Txl|tk;X#^oI}XBw zl<7Gg%UOc-#xSzFHw@)HXm_onBfO6Qmo(cBaO3Sk+ps1S7?05T3I-?G=~N#C;X$%AJOaN% zHR(AQ!sFCv)sF7NNYs1)Kg>e8;(b16k+(YJK+pm1BE})T8v&$8$Q;k5fk6-+Bum3X zIXps~p2yKRU72nfhupRVpmRuRcR=Zr*9!SMVNr)1a63?Ltp>L(rx1zrIif%VgCIOY zws<}Qze6=?JDtFz1?kyB?{*9#Zgd+HDN&I=pRle&4)`4S^EYkU-x~q&O-;V4jKBMz ze*=TbP!#8i)9q*u57IB5Z@#vXUe1zej`HE0Xb2YCWk_JkT`8O*lsz60>h4IcO+Hj zJfF`wr9%$b9Vj<#V?%y;3WeaV+nrA7O7;1K^^4%W*t;hA<_f`y93CM>hlB7tRF<|a z0B^@2;wH8|kt%bZFXo)pAqOlDaH}v5>D>-M!3}43V(EO&^ToUb+>!95flY=t7E9I$ zET?GF-cSe+5~uApw~c4fu~XU|NtFfC=WjWyLk=7|@Ssm)xV;x7-+N2s$#|z?84Z9R zo3?%sI&5=+l} zB;Q;wDJKxV6guR#F`%gB!r84>n&EXQga?V!wguYu7zB-!c85`7jr92n0XpQso&yiJ zY3F|4q_<^}+;MnEoMD7^R~WKxAjcES=i>DzKKiH(=7R`4PL`hS+%`s{>E^a25&b#I zH|NCYkOON6xG@;*ZF&1F>At)zl}B@Sw%Lj6!oD$?hZ z5-<+M>B=;0Bz9LAvKC6aW6N-MOW@1Kpb`^|LT(FuZY!#1S;k)uhQT zTz3T{eu!vy0OET*pRW_9Lk<)NxEUDjZF%D?xfSdc?=0GhZHX*R9#T3IKnXkEOFwQb zCnnO#CaW!r=6M?opxAfd5e2Gbfb6l#lWPy2!De=78p>2;uHXX=Wg7ntQYDYX(4k_&m zKzWbni*+J(ymAM)2N;cQdE*p`mE?O9Vh+*W2q^8@;hYjl(KX&nC%nc{JV=4w8hEe$ zGqHo(7fFdM8t3ZJn1j`h6bkw1eVB#v0^@sciPZ6G9pLIS8uV_QB5{xBi}g~a0r0(3 zB&M+1AyRrSgfH_!K+csdtyaR9x8>DGX?IpBnoH)m8eKU+?MtE7A*dYz$nL>>y-u)> z*Xe-!X$<$w0^o;lNnz|^e6e1F9Zb$mDQ)E9_XM@lHm`0nXC=}@9LR|Z^y34yFN<;x zdLO1ByTcB|3J2`El4LEBkyj_c4& zBAg>qbmBvFB#WYltnLj!bC2Yk>%{ALRSx(z?c67O+%~SGz*r-lm2%3B;`BuNNeAx< zg0vlHa-stLM3|hFOB)eNyK|}AOYe3(f}0HATO?k`>v4d~&Dh(S@2!(!1o&ZCX<$xx zR?^`d6r`mSP@Gfat9#R_7>9FQfj0J7 z-MfxdgXEj1NYU}E4hZb_9|FDz>EJyr*wcT%lKbz)@L#V+qd7oNJ`jYdvYYboq*!>60|)I$hpe2>c(|fB89TZoT5WN zd${geM`sV?d+Vg=cs>WprX98yymOT|pRTjWDTWW%U0GB)nb^j1G?q@?69j2H7r&#` zXt;~gj$leKQka}5LQ7p3-wQ@(596Eb#Oru62e_t;0`~e^78itPv5U`60|*<#_9U>%y2+^Go`4lUlXyMB1MOLxb9jaIGue znMYXNl}oLYiES@OZ|UGYL6El7*?W#4oo}+bu}I2#{DE(KvpI-NVRE7fEp=ghFBqK$ z$=9cd*YPwCa5WhVq|e`$0A9P`91NzM9kwS&(%T6rPA@^*=b(5*6`J28Ij7cRli`g; z6cWjtB1KDk#I~;^vIcl#g?Js$;Q*JCF(>)noUkCSyV5ClGO_LD=q(+*CkWE^2IY zTx&a-3cvEC9aumPk96QzEHohwaIc^mY!7bF0wyX%HS!i2i+`_GM8ni_CEynmI(bC7?@( zU>%R>fcTy0t6llN# zZF`JekVqX5;Q&{Pv4`=^b>i(XF!E#nE zZMgXyNu;iw*j)uindAm@tEm4nj06l!&`+7WCiL5z;8Ilx6>G&bjZr^vEnIUY&5 zE0BENL$U5Nq*u0Ias&UiTmBn}FMIXPS7L^uI9;5EbFrMIMq@r!_hwPZf$QFs!zu*m zxS9jLP5b%#c5a?ISMNTjpKYptp77^9Hg8v%T5TeI&Uqr!2|P}aZnJ2dt3zYGl=j6_ zs29|}_`^a3=(vspTn@(G&b+ZsR(vbor9j2I^Sm@rOgjd9f&7cZZT1d2`?gOr)hppi z97p3sW%|qoa;_Tf`EcEpMHL^bd$SG;5uoED4sb0P4S+XJk=RA%VDQxHSH4mjz9ZpF z16HcvZv$WYtgLoSP)LOEASs&6rE$6%9d?1*5lo3*O1tB!H6IHdPU*Oa1LZO`0Nyx7 zZV=b@^r=t4UdPJ~c0S>!Hs-kQoxij_Xp3HD%Qbbte4T>SASP$Y(qTG{(*p z`U}R6drPg4Z=Vg`uE#~-_R8Dfx-{tBK1Hq_&XLIJ=BZSlPfmP9^Y#Yrh52Uun7=~v zw!{9%FKeXFSCGx(aJmk?rE_?YAg$V2-5ZJcIGC@MDL2RaV$N9|S8#v}(FXY5SrQ|- zHs(@g8grg6<}U$5H}G$J*rc?3@uls$(+*BbhjSq3&e2cDa*8004MOnm(Za><*(ui|TX84;!Vp=HaU@?WQ+)yR#ajY&+}D9}hqM{Kca~f?v@MBKZ*zKE z=BbZpia7(pUdaFQJ?-7MzwPZ5s#nxgG?&bAHCh_S;X%su+~D^7RK3cJ$+zEb>p{#K z!h=?*wSf8JZE5(vaC_y|xdeOYK3pf)f@ojNbZ=N-zK(f8W$+PAxig1TeZH$VZ{SOB zmA5BPO7%FCQxs@s5PpZs()JwSWnPS3ysiP>SRr>1$=9;fTEKk$mKYs(bO8Sk8NL}n zq7T}ZEX18)uc!|YeyTpF?rHOvo*NtZsj+i6>U%#Nw2=bmpa^XYh447>aw+Gp4QSg> zA-Lx7h!qO0VZK-)M#mi;!2eBjA1;!}uzT(*4o*5B(UcE$AF$V7kktnMZCBTz zdZj&G10j?ZobiM0t(k^cAXX!hEqvn2x(Rfb(l^TUJOcY|kY;vdLhtoNp3-s>`@9&)3y2 zwo%{wWx+QGrMptzUY*Y)y|SGN0fPuUPL`g#h}{*8csk4%f|OWbeD5uhI?i_h+btwt z#E>-yd!4i1*pMHdB7GFtXM|sD;HRP&qqp*V&)%b@EVb@5TGu zOYhpE?{OZ@TnLX)qsd-S`{F5-&hxn-`fH4Dt`Mo?Ob0OB!t;3yDSIT}TqmAUNxqm& zXGb%9PhV``|Bo&~Y4_lowjZaLTY}d;J_pxP9s%S;O*-_k+L1-AIgNAYh;Nd7bCF;j zr#XPzb8a8biQfeH;UbBQ3xOTri|qk`x5-~F_nh!$rMw2wZAqt4B8tmp#^E5IDpR;-G>u` z520-gK8$k4tnHgGF;{KO}JZ-X0pf})S;6IpCh@Hu1wCFpxMCq<|$Hi40Qm9jbJB? zU<9+le$n+ZcwR z*e+B(^;aO|e~UkAT{j z%Z(Soa!M{eMBp6HrGZ?$Be>d9;P4jnq+ z97&;K3YN1(=qCm5=(I7udA4MwlTCsc``X!leX#S;M*H?L`}VtR-MN}sSWeNQpF6T0 z;M0JPbO&&?j6K40wreZi6+D&R+e@FeF1EkoO?&xg0q(=dKD;-Vaw$ws6rrU!-Vxc{ znIA5as3X?_EdA==d^?!0@y=owtGj{`zdzXd1rgg{J=;&-y>rVYVmU>Mmc;g4y53if z3tHLEgtrU4v(km@u3*H)cGDlEKI}6YF7cp;wHZ z-sX6pzM)6s4G+pd)g_neGJ>V zz9V+mqcv%}i`8Ah(>;G}wwDge>m1s~If-qIZ=NmX?hC_bs*azVV6S-Z0<|M}x}c{4 zyirJO7u!GGx0fFd?#wP?Zw41$EndI>%^Oy-w%O@A^k!suBWx16^^M}vGtu?Fp9Md~ z_UXq(Y!}-<&vppUU9m~*&BcPI#VZ)!8$_mKIYp4h8qhYLnnc9*yTYAIAhwI`i~smt z;#+R8(a%rD>C=1)#y9bfcnXShMd->%?{?Uv)6aHNbYwZeB@o-i_C%P^rMSDsf%hC` zKEirzvf8nTaxR(UYP8gbw*BlR)r0pEpGCG;u7jBkuycw4$0PKv6iVoM`?LNXNG|R< z1Rhs~kKll|Jq7(ZkP{W?$4>8dK?m?qY<~c@zufHw?DdsvcLltcZ{mXF zn{SECW$$P;8V-Wmmrh+bnS+Ux?uE87eo_|OgSOHm`J9buEW*q#sa4MnUL+b6bztOE9m_d62)g`aJV@131K$QZ&G zjpp$8wIBUu!6K--=&=^x>}#B2adOJ)b~@U^r31%{skdxd;C zb>?uTr^W4JVJ!t+(6Dvq>~7!dvhsgr?f9}DsjYi0tnOL zb^!M*U?+yiu0I>?4MzX7hu(`n-yHgux|QU6lNU}7e@gRGWzOgL99JWFpDxM(q|Z5z z{s=P1HEApt(QY+5>E&}Io^n27+q0$;HntO~!{-3jU4iYiF^!=N-xR0l9?#e7rO6R{ z_!s3k$u|{>x4ncfS#t=Fn^P-};&cVNa??0Hk!B*e?#iW>ozm{esmc-FiDBsQIe>K+ zVf+8^$v2iuc^=`rz+O?GPWb*mFx_N$d$D+XNcgvxVDMgj{WC$`bT~%@=_j4fK|xyT zLbN-WLT+N)6Q?R-yN}woEF8W&+vx$Y*TLIqY;Lo&7E6hWYsVyo@Gl~5Cg+|l-fj}U zoLW=&v=thN&^R}jW`ZbAPp2g}uDcQ`WJh!)aw^osI{^$G4hQhf2zJ5mciZ`DRLK((#n$B{7%Vb~TKxcz^5j=^ORUpA4GGMYK?f|zT1#j=mb{d%Td_5;%8gnuS=csK`IB>jr+$`EiLGcI?dUFulo`QZCpCiFjtsS;=styCm6tZ*x_wLAcs;^AamBK(&Q%LUrrgRdrlaBUI*#( zPH3zb$cgb(&*JbP9XfPC+n9oQFP|gvQ@Ne3JL~1FiDe3!Ie>3=uoFrAAO7GU@X9dK zyAg)SCd1o{rFu8v&jEY=fmjg!dQ>mJ9hf;M>ZRuNmWXS+6mh+Zs#v#Vn0c5q& zZ9H2VXc4~9^-P{GYF!xYa2^8}IKA3iI?RH$MTc^OP&`7GZY{W;$B@zpeV+3(}td_S$S0-WR~lK z^_+lSXm`bz9;Vmxx9EEnuZ?%S3WE29bebFl^SLZlM&Ng-COsR8-4%vx5ZadXsmc-9 z%f6jR9bFFKSR&i&1|Hcp)mdS^3)H?~s$2-{m*2W$*9mZrm!Qu%heymQF_gpU;xybv zYLuf#9#&WexKy;&3z+ZTTyrtucL;@~|;fmTN# zy=Wqy&fd|2^lZm#0x?vs-?EFm7a~G?7gV7J7`!IbfA-4D6x+@lYhqAqF;2nr|r|^ly_6E!s zr?BfZEN2y>XFJ}f`=L4h;~)PT1~Iw&>d|U6+>7hpcuE}dITA4G+t^N+j$Q|__mJ(x z7$385hLP?DwIh*oV*3K%jknm9h1@fBXdApw*ZOD_bRWK=Gn&SU`gGe1YDYX(BIs`C zPT~fz6H_94$)%It;&-SNtvXoUn?jW~pCe}{ zaj|{mFAxqt8Qar-;*JFQwh_)QSMSkvl2d z*j~2oBPsZt+Bht>kNqLIHrshz!#%Lr6<_=<4cl?u6^U{Reus+Cq?^^fiPUQFIdW=J z7TZVus^D;&?F4K-MYk`a;CO5vNulCcTyO5r_ZG$W)O|Y_w+q+3!RQYqb5NZ2diflT zr(%TRd%2UW*#6*b=U0I17-4m9F6A!7c9?%{ywl#rH|q2D=(ZQvz44R?5_nuXy#-m_ zn@+_&KD*Z^O*7cbGZ|8JbUA=yqxbD%J0>^&rpnz<(cT=MSDw>qFQ^^yR2c{7pfbJn z!Z{dEMK{Cu5+_Zuy^GZS!I&(z58vdA?HP|{xK|yZ_N7oO2;mXwbQJ`(FP*x3l=iJp zf?|6=u6r}^xq)}i+8?d&MIzGsTqP1ux#76pd%C{4F19}!+xbP{E*in-h$b!h;2g}N zu8-k+S(BjH-b?EKczoVtJE!(=-L;NRgw?&dloQ+Sk9x4{%Fkg+Z#3!KfxEM)7~ybw zF8u@%9Z9G99;hAblbYDx&+6U`d=}fs{uXH89^SWem%1tKPNZl$qTPZt6M=Ium+B6N zZ>CIAJzyu86o!so2QW=+7u&DAb>|j!^EsGEX*Z2?6X_(#=SVu$H$m-KoTU2MPKu6R z2e4Xf7uzq(cJ9w0pM&W%kV|QY8a+hFoRUiq4wA1^CLOW87t^kItlnfh>s=4k8^K8R zJ~I!-Q*kV=H~-9ZZ&A-=0?%Z)ltDNL(`mp#PZ zaC4@>IW9scT|iC@rgS2oBT{s-0Cn%%NueL?+PAyVKK;YaXtwjXhAP3wEM4i88;k3` z$1j`f*WR~ZmhD``IFqv!XeJTYb}8ycXq=l%GZvT=))->D57dqJ5C|k z1?OP!bZc#5_FjwE#yfUh?axwaDGSS4I<#bGa#kcobNL)pqo2d^rH^lWgIqV;$<@*2 z0FH_6!+!|G_RdG{+?H8PPSl~RENB~bs2HSiZaV$!aof0#f&=el>F9L;)5LbM{ZjjO z?#Vp_!0B?jLBDVKZ+8vMEbG-AG;dt=UNI@|8n58Ry`Uv-W z{IXeB`}UrFJC`pJ$hlJVmcng|2(^MV&P}JE2DFW*5E0wmtnN+3?e%DY+Hnf~E;t8+ zr;oGA*n2IC?Pj)f?Q&_Hu11FzG6!R*l1t{e8Z8+C-Uvgam+ho5bT}Nqw?F>Ber~Yh z0C*z>!62@?(kUmlKjObGj0=~J;&efp4079&PL()~6BX#EL2UaebR2AVU`;7FfZGeU z^OnU1t9wtO)&=KaFy+K{u|1vYRgfFZ!skc~B@)RTm!cm#z#Eb1i0!BS?%?*5v7Mg= zKGcQqu4giaXEI!}3eW3L=r)4xb}r&^8s{p|QiIs`Q|O58_hozT)}0F)+=}<6Q&DU; zZrvxZF7Q{}>EZfFuN~5Do6nK6i06_ys760_dbcBy>S8`iPux`C?7uz#`8ZN(Y zFW;c@dY8WsN$HBsN14!b1mHDyZhn~uQJkJmS8Zb3&mz^!b|2Q1g#)-fn(aKFr%LbM zI}%U1OgIPsg3X25j!{4Pi{CsSlXahtb2v}AN8$OR&uUa;^1Hv~wiu|T^Vuy(Gj?OySir{R)#r>i;D*YnB7*%*X+N-UW9C~xU}f#sC9$huLS zo=7u?#I^^ZCbo<1&$Dm8B-{Cxtzq6?A*GHi&Lj9JBwzGN=P4xL6hSVLX~*dyv@nbll!!d)C&S5)oGS=2Gq|Y%kFJ?Z5pK#-{*0 z6Xx?;G!Q5Gp+fzbOd6c$?3$Smu|ns2Ku%nzii5_9DKu~hYDWOFV!IvA!ART|+lTxX z0P{li#p^vF&s9Fea(Vzo>sWA}^@kw&x=A`uWcZ#`@hy?ny;3ybK=)w^vSNFX*!FbX z7TfdLz6bU>=U>n_-&zlvbBWWr-782B^RZ^;L#*%-_Gp~GPOV-bC&p9#5Y&zUWG!qb zjG-gw03J5jp7L0R61~q=BJt(M$j7zuFS-|q%^BXFe+s=P2K(ZNfD7~xBwsX1+vxyr z2vRPQ)m>7Qc5vI0g0hkAL98hq2XOnD*v`lK(6Pp&EY|3soRC+crX<$l=j6SF1BBa?UAiJ zSMyK*pI>A!@j?4A3*~84l5b8*%pRE|>qU9a$wK^)?ZoIvaR4U|pUJq{j(7q4IJ{Hhfa-J9rxPB*bdv80mgbxP zGbr4+6u=wJ(CS05ojwlo^Dd2)t9G1MC&@ zS%g0m>~rAnKpr<5EWO|<8ooZ>X>usK55;L9g6pnaYPq>>Nkm_4H)1&@48wErjv!+V z_KJ7T9zJO~44A4^TstO(knX23q|bSKmwEXJFFSeMxFEgTEz$GE*-nRt>^&iXHo6e) z4yKR=(e4=Z#r70v8$~cYk?pAMQGLEn*de|50?3_)gXim%BGpCsl5!WA?-hMOuovue z9j}ac+B*T=hb3skjca=%Rou|FCsL(>ch2fabO1jsY$t}u_J0eTfH}b1bAtEi-B>3p zhw5|A+X+wP9$@ddF1G*p*QEbOaHkzo_p|_-u~XU?NeMfm-H}ug+Y{;Cmcs26wxiRA z`T8s=4UDf(5uXqC3i(6AUlZ*7riks?Y{$~mxNRv!KR#j`v(R_rx+{@Fdu-{IIm}o{D#FHVbmkjG-kXs2yR5cOlvx zOrbg3d0PjX%ylF>fS*COqhP`KUJU7*Ft3XQOrzrWgh{Dbp2IUgMC;uH#P&S4bF-ZZ zZDTFEazNXdg18&kU5V2dc{UcgoYIl#0DeZ;PWm3S?d#=oSD4Qy)2S0+zFDH&gm>q* zrGbZKJAbIkB~E`@9ucG;BX3U*qcJ0^yTXuNFm~MAMlDY1$aMfq?QEYvmLZX02l!zm zYICYHpGs^}_q0VCu%PwsF=BhtzMWg{WMbRP(OVa^jlsx{gZW&!U5K+favi`@2iwUy zWVIsz*@}1OcLqDZAY%J3eslY;si&O9=g3YwE0;Fxq`gZd9U6&k3`1~^-=Xt%Ezau5 zbpT7d*j~2o4sIJ$=;!S?ZFAlz^eAlS9~`uG4{zP^@Giu*7p22BwBLST>KCVTGL+|4 zoF?mhb3%ZQbO&&?3-81mLG24e|NcAn0(`Ok)$H53>+FoZW90Ph`26^F-*$@()HY$8 zyV$P6Ssf!Bz*w<;_t)f6@czjA_AC5}D&K-T65CjsCL6U5(MZN&`7_r0X5SXmS(I!9`@(G|pW)EJ1*dK@Q;Ub=b~N z0R3DA?_&EYY`^2yoty2B#I~2F&mhJ((~-?3`9jU2PmGR14&bew?L-o9+=TnW&==dg z|N5mK%eekHm7C2D*n36NX-U;?W+n9y^RoKp>9&O#9 zck9l@y%(;#>e6Ql#y3R}PlWkg%A!n+j-d|Vu-Lx+gCe${$ae0-tiP{r(V<)yWA747 zlbcM=T10;i>VR%y8-0HQxp>0&tdGns&I)f!ghWUrtjm>-MC$xPVW9Md$ct0n12nD3mrkX zBbSPIB(||MO(xR2Ek&(Fw%gT+)Nz^vxNX5ZVZIHyJ&O`zd*5%$1G1f~?0|DTg}O$@ z-X)Co+BD8ROF6OqG{)Ym5La1j-}p_@)_uSucW$;grF{yNz6-RCMQJaW-iK;bOu@Td zhe#bKJAmhhcqhib19wMKWdpPJXDNx1VtY2*xwt+oXJyfVk+dg=(byr1(*r0fwx3Me z^UHCS#rEZIir6l;f6Ga~1&_=2t{(`3b)4_OMet4=OU5Q{#k-`$_EffWWwW;55gpn% zc~5>=j+WB*0c<)UoUdQUwxGE?8_2KbGNY#%Y|ZKuc}jo_3Z_ zECe1GGkw>Z$sN#m@l?}?q@}8e@3?ROTn#v8o(QcXk#3=Ey^^| zKzI8Txowy)&JwTVjt<~|1Mi%*Z@=Aj8feqIeU|hEz1wez*KtP&@V~)!P7&aSwmp$5!h6U6K=qP;oXSPaLh%S4+B=8g zn^mYj4%$X#dT7(TeU|h#%;#qb)^T44xCCPR=YCeSb$4yuxsgTyIZ=}iH>rEhBGruq z9u!6cZFC==MPW|w_MBiHcXxoR(7-!q?b~m6Bvn2K*u#e>;{wF?CuKW76WnI!kbF^v zx`T*ql%;_~bhiUgXv2JQmUta^cYv#~$97Kr_1hVbWhf-JcRZ7M`D>7v_LD9Sr)SYg z;s$%16n!qJd(KR{k~ttm`t0EbZA=yXo znbW&HCs@aI9N=h7wikp zXKm@ZRYpU2oIcH;fOo2l*PkvcBu0N2IBc4CP1 zJ#}ekA$tk7f6BV4pYtEQtSb%!dxP_Q_C3^d>ID3bF2Sc5hj+xsLEETI4@Pt!h9TA9 zc|Ap#j>|g0)p_lB|K#>}f90>N14G`9HFD2cIScFy&Nm+2V903jo}kaCcw)Bm=?;l) z3_z;VyFDRN$7LPh>KNHRX!lM7_uRUF9_*FyOs2-p-^Sk$>n za~=oI0q+S__$-y}pIzCP`!M)+5VVc5G;e{nF$T3mwiBb{`VMfJ&SyJCq56EVaw^^p zV6TKfV;g@-u$KolCs6mC64O2Q>G++uTh>6+2;mW7R5wD~7>1zOetm9RI^YQ<^?F8s}EC;w`qw!9OHNzV##7`q@-=48u-}^0i+iEZ29|7!7@K}bc zW8`xrj4Evur=O*wk=XVyG{yGELib^}T+?2*ckSNMZ(zJ}io^uGj|6)K{?uw27RWyi z*dKc9&J~FeyDOJk=>Xpoq?Vo7_DEzKZ0D?wM|6M-Hv;d-G8*(gJXI9@gg*uBmGDg7 zHp~a#g#3#kWk38tuY9PXf7UrdoSwVj91N!19*Wb~sbwd&F%nII{Snc9*fUo)o$Y8I zqT3jNln3vpgM9Q#2jpYeA|QaorV$tk{0Y$wv&} z$}$r1j+&9%#<0?12jTAu_6qs8Z9nFAz2couF^q|_f>qRECT~Yan6Z#(k;Srj&8l<#4 zozl55U#OvP$8}dE%3}LN1H92Z7uU!31>Vu=C3bH-C3f+CjqUpS2Zf#=5$UyO`X9mJ zbWQqnlR2J9^#zR+-y+_HYkM%t0(*mvPjHG$9K<^cy`c8RQ)n8C4g6KXUQkcN6_pP+ zq4{(Urwh_#oXp-ef!LI-bJ;E~T68W%up_wL6Q3Kg0XA!4BT( z_Ti8|Z^FVB;+cnr(VO_^FIlu*FxbaS|dj}p#r0yn5 zueL~^6_t-Tq5ni6XGzgj3X>B>XekTtNcHZ+?eU1$F(*8WI-bt~E~}gE_u9Ib-TT!@ zzU~Ka9}(%bXZp_qa+VG)B{DfnimtBDcA8x9ysYr5<0&2B%8Ko4EN87?Z$;%JPUt@i z%W)l=$)a((4!s5OjsnBJ3Ga$Tc@FkEFGZh{Z2C|*!1aANywh_QeuwJN=OZG$_DuhA zCZ{OSNji`d1!>R6b`r~gw@kW-Z45@X!hA6)rG>#~b%5*paBSCOnTp?`6Yh-^GRH;e zBoW0Ur0CFycOvUp!`4V_V;F@NCFXBAtK+#H;1av>PHyB6M%}s89lGVt5~t6h_#G`y z|8X)073g6EjdL|=vcY!F)-k8+Vor<^+LkcX7W(tIoYnF44sey-Y$uU)H=-kn)XgIB zI31cCjo+dA^dBU1Fr5Z+Av{QpJ{N3frH(mN+jCMZ+&+v!r%{NrI$nbVVr@`LY8jlqk-1ODHIO5-3}-{G)i!a7#**~fpR|x z?}hj$efoFv*_}wyaS$G%Ov4s*TVjxE1AOnSu?NC*yfz1f_d!1pe4B>i zcc?hcw}%_xjYKMrAn+hfy0t*t7=zj#y&LPs9thL%IvsH1oqHy-19#_AcMy;hWodFK ze#eQ^`B&e(X%Au!M+4*QQ>YDs@CaGDHA34ShGqlf>r=)a2-ESJ9T46Vsa~Y`PL0Fw zIAz*yQ{8?Rsl!nqPuK_o57MMtJF)GN$gTmtw_@*sNFA@)0d4?>M?iRlCareix+@s*Im6d;0=gO?Sce?IfpX{c;+?F>4&0qf-Eq4yVh~s)NZ{DO4Z9;dD*<>|%9qF#2nr*OlJt#Osg))dB7Y#zWwpK66n#LX9TV zIh-y?lUbB@>(E0wigN|&u$R@2c&b#G&nLariPs?q77jT6T$LNiz5R|PQg;N96E*2D z9mTnVbm#_hVj^wiqBvKL_8g#gq)=#%^fGr;Cq;)GSUd12c;^OAW*M?q9z^Y0y#^K#)34? zO{bqM8Yk+|m7CJOL@HLc`>Kyn>d;j#nN!s0DiO;m zQnchlbR>(S;N4f6EFE&7?Ep6k!~W;01iQE2kwof_U~-mbv2p&Ih-jY_&7`0>U4+I` zfSe^lKM_Q`bIaZ0NGDN;9I!Z0?lB|Y363DzolD&;4yWtT+wb~w?<_`fwY>V^9Lb_? zI+L>m=`9P(aUGg*&^0B+U7AE4a=_>ScMC&!cVHji7F1uqE%uhz2(gX16w2c8ARRi4 zV>zxs8!41_h`39Ws6!5PIbeM7&aL&|e)>Ova&N+YiIjG*cW4SNtx4WqA!R;{xVDEO zJ_yK(vNRbdb5MaE986A0ahE1hha7M?aENzqEx=v}Z+(>ZWl@eXl+||e(o7rUma|2t zyxIZ27l}?Hfrm=bY7owmbgHLN+947tO|A|(;B%mXckZi7^*UjZ;bV1I7FB-#Uw*?V z6?wIDI6ac;7H;>()Q`qwkKT=SvWC()SDbDmaE|0s+QHMSG#`h%cY6zBZ6vP7#^Q-V@a5If7_+E_Jg&-K!HR zO|A|(5OIK8XpVQ@GS)m_tdy``Xj|e@o+EuRFLe)roE1Ph2dLvI|}=3Bzb$YS`W4khB0 zjw{p;%LLBznoiww1nJz3XkQ`~vq0_8aaSi%haAXqfZK`DCi&*sl4z%QBNC}ik~bDh z^-VGd7g3I5IbDIS&f)N&D)jH8v?GgJ4j^ZxxT}+>Lk{FRP@c?e0{n218?piI!&9U) z8VqlrD(Soid0x|zO2=}RAiWI&?+LOr@1V3Jg+f`BcIddPlc+-ujBp@`ckawXjIRU8 zYNOkJ7AZzWv~Pk!7KcaZ(B26U9#?|?9jxw3p@aj-St;)7BAwrqy}{^bQQD{DYk({ra$vLralCV*Hh5l75oW}- zBMk8}?2spZ#7;Y1lRi&?@VFB6-%D&`e7WL2CZ}Z4K(9{edGwcB`f>mVxY>euw`}04 z;vlvorF?e|2~Su$ga--I>NxP8piKWpR_}$CYr6T`i`K+=`sr08MTZIs7Bk zc0duSyY%^lQw~Z;Qc5#>gy-xa4v&zf+Y<;pt_1z>L3_SlF4^YmZ~ND7dxQ91z-Gnw zmL^_@95~;B@||ka+j172In|l75}|Y?w|ppG=aA>^2m%k)q~~$$JwciN3&xr3jXip| z*HO!2b(cBck15l(3UI9@$lnU>$Pc zIt~c$Bj6m=gc2k0J5G~^-MH>e?BZ<%@Ag7OGIYp+t2yw*c;_1z#O%G&>3kfDbCqee zm(P)S%0-B6$)!XY#^9@cqt%ZbC=PHN3-9-0_+DKuN-lv1snKTy)V^GbTBz%YL7DOK zZ5oy;#ORO%S9YM>()HnaaDrOaGxHQ3`q@M1W!LF>5RDUMY0?MhU>0>9+-{^GSVp0` zk?o}DkOS9upxoO*s@HVXQm~vNLNk|Tc%w1ePULW|RJmJ%h>oPwK!erxQ{{uqkv^X{ zs&Gn&9C#E5%6(q>9GM`Mg5@j``e{(~ny2VF7sy#^G#0^fN-jOv0lpcD0%K0{^?a`i z>pJAXV>wW6_z2Z&E>c-!j_c6OH38o4ingk)%DPI) zeXK9Fx|IXPfoJ`5Rlb#NkY4aCJ!dgFMTdTZOioOvnM0D#1Bhp=0p4DjS1Aetg&cTX z2ZZ;Fa2}YXFgY$#u$D;XxYV4wbB3?yrTI3)_s-TTOmxVBCve~?@y^%tBG9(CLC1SChv&fwYVl|0DGGFA=js(AX*fvc zU^)$qz;cQvy;YLWC#*9p=st`oQqS?F)~ApI#es5H2dQ4uQA>eyT!c=p0BvIrbQ^?o zB%SIh5FRH&hl8kIu#dhDIq-ZAaAPwnpMw*m;$)61(1ww#7YU=`2%p`#)E&p+x$(*7WJ*GJSZ+`!cQRcm`4zLfUaiVy!;ty(XfQ0_UJe`8an+ccX#QVWE2DwdmMi6WKddici!FZA(0|I^@7B zbf6%jg4#DhBuM6XdifyrwZYiCoT9M^&)vDy9SYs?@8ifE=g}l<%6G@T9x<-o8XCu3_F- zDIc)Djw5_25hrt8flh)19+XaFy~MV}7w79Ot%)3v1J!}@?eGzvTpy(!S=4G^+;XaX z((;!5E%EW^D5}8Hg zbRF6oLFr!2;*tf_y)h{3kOOj{%YpKJv=F{Te)pfal~LaEY9Y2U1~mt(gDLfcFj;fH zoK2^3t{_eJayUJn9(svwj7M3A9FPMJ2MWI0cqgV(eK9HNycz+%7luNR((&~AS(w0i zUUMNlNR2+zS=}W_14dByhM}xO4#;upK#Bw93snhU-foh-u~@RoYlG+Y6p^`D&QhbX5gZ<& zNvnytHcFLCYyov|Oi@m`r6nQ<aKvQ$wY@7kOL<>P`-dhXxqbzY$A$B zNYP|Ega--IY8%e(v*k*4LHjVcYBJFw2jsvV9pF1?=e9kve8)#%IYpE92BCO_EZw>R zUJ}dYSiFAg%b%?u;@T3hP`=~+tkQBo4paxq7t_vddnC#!C>|j~ZzDK7LX%dFY%iB` z{R+nS2Ei73@5Pi_oMoaz4#5#x zi0pB@yKFVjt0g(Gc7QLe5!zj0g#$qvr>E1^AQb1y7Wd|_4OaJ_LblED#o2iq z%yq~CIq)bBlrL`=x{bj_IUUh%LHfysaJpJ?tQXh4@l;s=eDiH-hEq&*$N@R75R+N2A&dQ>P6eh<-iW_b? z2NS72XM1U;Q2m*1W_=bpAO{u>lrMW1x{bjoXMx(MLj!R#yA_JDyawSMPp_RY5!Dy> zzuwA$=XQW^y^+{mVTCF_GRL#1?k002v3%+{lT#E*GkW~;+}~e)IyumApg^Dt-NxXe z9A~vdfzlC5_vV&Qn!@C`NNGkJ^~OD~Q*z+-I8cCKgmzaL$__XOQz+WYXGeT7jn_Dk z;|jGIQT_FNZS_gzfYE`%hc0wmf{Sbr*X#63GvmZT@4b{#F%IMuh0=^3zZh?Fg~@?e z>j12<5W6>~AZU%~y_FK*c>UJ@;MO(PQhVs$TdxW!Opc3`ezZ}4wO>_za5>QHK!q5S zUWT{h5d`LQ{k?e?w|&8zr8mJ|BI7hpQK)T;>b*C%;^lxGs18(;lt~-imb2*iP<&3- z;`5fbDNK%wlxBqVdUI+b2joDO19S9QNWO?6{TxJRRhb&z{}<|>c1l#wx`Cx52jsw^ z0}jqV9NYPgxFg=@4l~yw2jqYpIN5(OdV0OV_!4>O>C60XcB! zz?@(X#@;by5Z%pLFx>LtHX7DF@_$9O!jm zPCO%QPY&B-`@HkLd9NFamjiM@4m_U&3qqdE@O8OGKkK*8cM0@~9hl?SLiKt4=KPiWsgExQU-wP7+rRxix7+Xi>h1Pl{nYLD-~HU}_J92H?eoqzAAZ}0rz&)?qp@!Rd4|MAPW zcmA*MzPo4El`wzc#d+(2b z$L+m8{{y%8{^uXNz4wbhdwc(*U%I{jjbD3v|KI(#+x!2?_ubzA(I2|K|JQ!{_Ws}h z`P&C?ZnqD9{g-ba{5#)$`{4V3@b!p55N8Q z+&=uiKXv=?zxb=S5C76H+&=u@KfHbT55MX5#jpLI+ZR9n>g|jF{!iV$_@DjU?TbJD z%eOE7?3Zp|{0HB0`{?~2xP9~+e(d(qcmC|{qd)XZ`1NJ|Uyldh@SWUc<^O#R*J!%y z`2XJglQ(bb{BON^=k_k^@7_N8k)M3??#ukmOVs;s=kJj5FTTWo*X^Sp{*7;b_1kr_ u-uz3S>fGLc^J_kh`LBQA&9A+^_fqSlAO4m%|MG19ugvtn{Fy$nw|@>rBYCs{ diff --git a/grid_gen/global_scvt/dx/mesh.cfg b/grid_gen/global_scvt/dx/mesh.cfg deleted file mode 100644 index bdecee6a5..000000000 --- a/grid_gen/global_scvt/dx/mesh.cfg +++ /dev/null @@ -1,142 +0,0 @@ -// -// time: Fri Feb 26 16:11:30 2010 -// -// version: 3.2.0 (format), 4.4.4 (DX) -// -// inaccessible panels: 5 -// inaccessible groups: -// -// panel[0]: position = (0.0078,0.1979), size = 0.2930x0.5784, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 124, height = 56 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[1]: position = (0.0078,0.1697), size = 0.2930x0.5784, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 185, height = 60 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[2]: position = (0.0836,0.2648), size = 0.2930x0.5784, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 185, height = 60 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[3]: position = (0.4891,0.1465), size = 0.2930x0.5784, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 126, height = 59 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[4]: position = (0.0258,0.1620), size = 0.2930x0.5784, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 137, height = 59 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[5]: position = (0.0531,0.0874), size = 0.1586x0.3997, startup = 1, devstyle = 1, screen = 0 -// title: value = Main Panel -// -// inaccessible panels: 5 -// inaccessible groups: -// workspace: width = 143, height = 203 -// layout: snap = 0, width = 50, height = 50, align = NC -// -// panel[6]: position = (0.0078,0.1131), size = 0.2930x0.5784, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 131, height = 56 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// interactor Integer[5]: num_components = 1, value = 1 -// component[0]: minimum = 0, maximum = 1, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 3, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// label: value = Show Mesh -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 5, x = 15, y = 3, style = Stepper, vertical = 1, size = 121x54 -// label: value = Show Mesh -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// interactor FileSelector[1]: num_components = 1, value = "voronoi.dx" -// instance: panel = 1, x = 5, y = 5, style = FileSelector, vertical = 1, size = 180x55 -// -// interactor String[1]: num_components = 1, value = "area" -// instance: panel = 0, x = 5, y = 5, style = Text, vertical = 1, size = 119x51 -// -// interactor Integer[1]: num_components = 1, value = 1 -// component[0]: minimum = 0, maximum = 1, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 4, x = 5, y = 5, style = Stepper, vertical = 1, size = 132x54 -// label: value = Show Scalar Data -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 5, x = 9, y = 78, style = Stepper, vertical = 1, size = 131x54 -// label: value = Show Scalar Data -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// node Colormap[1]: -// input[1]: defaulting = 0, value = { [0.0 0.74683544] [1.0 0.0] } -// input[2]: defaulting = 0, value = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] } -// input[3]: defaulting = 0, value = { [0.84699454 1.0] } -// input[4]: defaulting = 0, value = { [0.84972678 1.0] } -// input[5]: defaulting = 0, value = "Colormap_1" -// input[7]: defaulting = 1, value = 0.067314833 -// input[8]: defaulting = 1, value = 0.080266804 -// input[9]: defaulting = 1, value = 20 -// input[12]: defaulting = 0, value = { 0.067314833 0.080266804 } -// input[17]: defaulting = 0, value = 0.067314833 -// input[18]: defaulting = 0, value = 0.080266804 -// window: position = (0.0602,0.4692), size = 0.4297x0.4280, screen = 0 -// -// interactor Integer[7]: num_components = 1, value = 0 -// component[0]: minimum = 0, maximum = 1, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 5, x = 7, y = 149, style = Stepper, vertical = 1, size = 136x54 -// label: value = Show Topography -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// interactor FileSelector[4]: num_components = 1, value = "topography.dx" -// instance: panel = 2, x = 5, y = 5, style = FileSelector, vertical = 1, size = 180x55 -// -// interactor String[4]: num_components = 1, value = "tpg" -// -// interactor String[7]: num_components = 1, value = "white" -// instance: panel = 6, x = 5, y = 5, style = Text, vertical = 1, size = 126x51 -// -// node Image[2]: -// depth: value = 24 -// window: position = (0.2234,0.0656), size = 0.5680x0.8985, screen = 0 -// input[1]: defaulting = 0, value = "Image_2" -// input[4]: defaulting = 0, value = 1 -// input[5]: defaulting = 0, value = [0.015844 -0.0369692 0] -// input[6]: defaulting = 0, value = [0.456141 -6.14723 3.44158] -// input[7]: defaulting = 0, value = 2.49278 -// input[8]: defaulting = 0, value = 713 -// input[9]: defaulting = 0, value = 0.922 -// input[10]: defaulting = 0, value = [0.0452349 0.492725 0.869008] -// input[11]: defaulting = 1, value = 20.1171 -// input[12]: defaulting = 0, value = 0 -// input[14]: defaulting = 0, value = 1 -// input[15]: defaulting = 1, value = "none" -// input[16]: defaulting = 1, value = "none" -// input[17]: defaulting = 1, value = 1 -// input[18]: defaulting = 1, value = 1 -// input[19]: defaulting = 0, value = 0 -// input[22]: defaulting = 0, value = "black" -// input[25]: defaulting = 0, value = "./example.tiff" -// input[26]: defaulting = 0, value = "tiff gamma=1" -// input[27]: defaulting = 0, value = 500 -// input[28]: defaulting = 1, value = 1.0 -// input[29]: defaulting = 0, value = 0 -// input[30]: defaulting = 0, value = {"x axis", "y axis", ""} -// input[31]: defaulting = 0, value = { -15 -15 15 } -// input[34]: defaulting = 0, value = 1 -// input[37]: defaulting = 0, value = {"grey30", "grey5", "yellow", "white"} -// input[38]: defaulting = 0, value = {"background", "grid", "ticks", "labels"} -// input[39]: defaulting = 0, value = 0.5 -// input[41]: defaulting = 0, value = "none" -// internal caching: 1 diff --git a/grid_gen/global_scvt/dx/mesh.net b/grid_gen/global_scvt/dx/mesh.net deleted file mode 100644 index 98eee9b80..000000000 --- a/grid_gen/global_scvt/dx/mesh.net +++ /dev/null @@ -1,852 +0,0 @@ -// -// time: Fri Feb 26 16:11:30 2010 -// -// version: 3.2.0 (format), 4.4.4 (DX) -// -// -// MODULE main -// workspace: width = 779, height = 713 -// layout: snap = 0, width = 50, height = 50, align = NN -// -macro main( -) -> ( -) { - // - // node Integer[5]: x = 61, y = 420, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_5" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 1, visible = 0, type = 1, value = 0 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 1 - // - // - // node FileSelector[1]: x = 66, y = 17, inputs = 0, label = FileSelector - // output[1]: visible = 1, type = 32, value = "voronoi.dx" - // output[2]: visible = 1, type = 32, value = "voronoi.dx" - // - // - // node String[1]: x = 187, y = 20, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "area" - // - // - // node Import[1]: x = 120, y = 93, inputs = 6, label = Import - // input[1]: defaulting = 1, visible = 1, type = 32, value = "/disk5/gdc/swm/run.02562/dx/hexagon.dx" - // input[2]: defaulting = 1, visible = 1, type = 32, value = "center_area" - // input[3]: defaulting = 0, visible = 1, type = 32, value = "dx" - // -main_Import_1_out_1 = - Import( - main_FileSelector_1_out_1, - main_String_1_out_1, - main_Import_1_in_3, - main_Import_1_in_4, - main_Import_1_in_5, - main_Import_1_in_6 - ) [instance: 1, cache: 1]; - // - // node ShowConnections[2]: x = 65, y = 261, inputs = 1, label = ShowConnections - // -main_ShowConnections_2_out_1 = - ShowConnections( - main_Import_1_out_1 - ) [instance: 2, cache: 1]; - // - // node Color[3]: x = 102, y = 345, inputs = 5, label = Color - // input[2]: defaulting = 0, visible = 1, type = 32, value = "yellow" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_3_out_1 = - Color( - main_ShowConnections_2_out_1, - main_Color_3_in_2, - main_Color_3_in_3, - main_Color_3_in_4, - main_Color_3_in_5 - ) [instance: 3, cache: 1]; - // - // node Switch[7]: x = 138, y = 463, inputs = 2, label = Switch - // -main_Switch_7_out_1 = - Switch( - main_Integer_5_out_1, - main_Color_3_out_1 - ) [instance: 7, cache: 1]; - // - // node Integer[1]: x = 220, y = 439, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_1" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 1, visible = 0, type = 1, value = 0 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 1 - // - // - // node Colormap[1]: x = 254, y = 167, inputs = 19, label = Colormap - // input[1]: defaulting = 0, visible = 0, type = 16777224, value = { [0.0 0.74683544] [1.0 0.0] } - // input[2]: defaulting = 0, visible = 0, type = 16777224, value = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] } - // input[3]: defaulting = 0, visible = 0, type = 16777224, value = { [0.84699454 1.0] } - // input[4]: defaulting = 0, visible = 0, type = 16777224, value = { [0.84972678 1.0] } - // input[5]: defaulting = 0, visible = 0, type = 32, value = "Colormap_1" - // input[7]: defaulting = 1, visible = 0, type = 5, value = 0.067314833 - // input[8]: defaulting = 1, visible = 0, type = 5, value = 0.080266804 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 20 - // input[12]: defaulting = 0, visible = 0, type = 16777221, value = { 0.067314833 0.080266804 } - // input[17]: defaulting = 0, visible = 0, type = 5, value = 0.067314833 - // input[18]: defaulting = 0, visible = 0, type = 5, value = 0.080266804 - // window: position = (0.0602,0.4692), size = 0.4297x0.4280, screen = 0 - // -main_Colormap_1_out_1[cache: 2], -main_Colormap_1_out_2[cache: 2] = - Colormap( - main_Colormap_1_in_1, - main_Colormap_1_in_2, - main_Colormap_1_in_3, - main_Colormap_1_in_4, - main_Colormap_1_in_5, - main_Import_1_out_1, - main_Colormap_1_in_7, - main_Colormap_1_in_8, - main_Colormap_1_in_9, - main_Colormap_1_in_10, - main_Colormap_1_in_11, - main_Colormap_1_in_12, - main_Colormap_1_in_13, - main_Colormap_1_in_14, - main_Colormap_1_in_15, - main_Colormap_1_in_16, - main_Colormap_1_in_17, - main_Colormap_1_in_18, - main_Colormap_1_in_19 - ) [instance: 1, cache: 1]; - // - // node Color[5]: x = 275, y = 280, inputs = 5, label = Color - // input[2]: defaulting = 1, visible = 1, type = 32, value = "black" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_5_out_1 = - Color( - main_Import_1_out_1, - main_Colormap_1_out_1, - main_Color_5_in_3, - main_Color_5_in_4, - main_Color_5_in_5 - ) [instance: 5, cache: 1]; - // - // node Collect[3]: x = 245, y = 353, inputs = 2, label = Collect - // -main_Collect_3_out_1 = - Collect( - main_Collect_3_in_1, - main_Color_5_out_1 - ) [instance: 3, cache: 1]; - // - // node Switch[1]: x = 309, y = 436, inputs = 2, label = Switch - // -main_Switch_1_out_1 = - Switch( - main_Integer_1_out_1, - main_Collect_3_out_1 - ) [instance: 1, cache: 1]; - // - // node Integer[7]: x = 554, y = 437, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_7" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[5]: defaulting = 1, visible = 0, type = 1, value = 0 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 0 - // - // - // node FileSelector[4]: x = 544, y = 107, inputs = 0, label = FileSelector - // output[1]: visible = 1, type = 32, value = "topography.dx" - // output[2]: visible = 1, type = 32, value = "topography.dx" - // - // - // node String[4]: x = 663, y = 110, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "tpg" - // - // - // node Import[4]: x = 597, y = 183, inputs = 6, label = Import - // input[1]: defaulting = 1, visible = 1, type = 32, value = "/disk5/gdc/swm/run.02562/dx/hexagon.dx" - // input[2]: defaulting = 1, visible = 1, type = 32, value = "center_area" - // input[3]: defaulting = 0, visible = 1, type = 32, value = "dx" - // -main_Import_4_out_1 = - Import( - main_FileSelector_4_out_1, - main_String_4_out_1, - main_Import_4_in_3, - main_Import_4_in_4, - main_Import_4_in_5, - main_Import_4_in_6 - ) [instance: 4, cache: 1]; - // - // node ShowConnections[3]: x = 580, y = 279, inputs = 1, label = ShowConnections - // -main_ShowConnections_3_out_1 = - ShowConnections( - main_Import_4_out_1 - ) [instance: 3, cache: 1]; - // - // node Color[8]: x = 617, y = 363, inputs = 5, label = Color - // input[2]: defaulting = 0, visible = 1, type = 32, value = "black" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_8_out_1 = - Color( - main_ShowConnections_3_out_1, - main_Color_8_in_2, - main_Color_8_in_3, - main_Color_8_in_4, - main_Color_8_in_5 - ) [instance: 8, cache: 1]; - // - // node Tube[3]: x = 731, y = 419, inputs = 4, label = Tube - // input[2]: defaulting = 0, visible = 1, type = 5, value = 0.01 - // -main_Tube_3_out_1 = - Tube( - main_Color_8_out_1, - main_Tube_3_in_2, - main_Tube_3_in_3, - main_Tube_3_in_4 - ) [instance: 3, cache: 1]; - // - // node Switch[9]: x = 653, y = 481, inputs = 2, label = Switch - // -main_Switch_9_out_1 = - Switch( - main_Integer_7_out_1, - main_Tube_3_out_1 - ) [instance: 9, cache: 1]; - // - // node Collect[2]: x = 406, y = 576, inputs = 7, label = Collect - // -main_Collect_2_out_1 = - Collect( - main_Switch_7_out_1, - main_Switch_1_out_1, - main_Switch_9_out_1, - main_Collect_2_in_4, - main_Collect_2_in_5, - main_Collect_2_in_6, - main_Collect_2_in_7 - ) [instance: 2, cache: 1]; - // - // node String[7]: x = 13, y = 114, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "white" - // - // - // node ColorBar[1]: x = 65, y = 176, inputs = 16, label = ColorBar - // input[2]: defaulting = 0, visible = 1, type = 8, value = [0.05 0.15] - // input[3]: defaulting = 0, visible = 1, type = 8, value = [200 15] - // input[4]: defaulting = 0, visible = 1, type = 3, value = 0 - // input[9]: defaulting = 1, visible = 1, type = 16777248, value = {"white"} - // -main_ColorBar_1_out_1 = - ColorBar( - main_Colormap_1_out_1, - main_ColorBar_1_in_2, - main_ColorBar_1_in_3, - main_ColorBar_1_in_4, - main_ColorBar_1_in_5, - main_ColorBar_1_in_6, - main_ColorBar_1_in_7, - main_ColorBar_1_in_8, - main_String_7_out_1, - main_ColorBar_1_in_10, - main_ColorBar_1_in_11, - main_ColorBar_1_in_12, - main_ColorBar_1_in_13, - main_ColorBar_1_in_14, - main_ColorBar_1_in_15, - main_ColorBar_1_in_16 - ) [instance: 1, cache: 1]; - // - // node Image[2]: x = 453, y = 651, inputs = 49, label = Image - // input[1]: defaulting = 0, visible = 0, type = 67108863, value = "Image_2" - // input[4]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 0, visible = 0, type = 8, value = [0.015844 -0.0369692 0] - // input[6]: defaulting = 0, visible = 0, type = 8, value = [0.456141 -6.14723 3.44158] - // input[7]: defaulting = 0, visible = 0, type = 5, value = 2.49278 - // input[8]: defaulting = 0, visible = 0, type = 1, value = 713 - // input[9]: defaulting = 0, visible = 0, type = 5, value = 0.922 - // input[10]: defaulting = 0, visible = 0, type = 8, value = [0.0452349 0.492725 0.869008] - // input[11]: defaulting = 1, visible = 0, type = 5, value = 20.1171 - // input[12]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[14]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[15]: defaulting = 1, visible = 0, type = 32, value = "none" - // input[16]: defaulting = 1, visible = 0, type = 32, value = "none" - // input[17]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[18]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[19]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[22]: defaulting = 0, visible = 0, type = 32, value = "black" - // input[25]: defaulting = 0, visible = 0, type = 32, value = "./example.tiff" - // input[26]: defaulting = 0, visible = 0, type = 32, value = "tiff gamma=1" - // input[27]: defaulting = 0, visible = 0, type = 1, value = 500 - // input[28]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[29]: defaulting = 0, visible = 0, type = 3, value = 0 - // input[30]: defaulting = 0, visible = 0, type = 16777248, value = {"x axis", "y axis", ""} - // input[31]: defaulting = 0, visible = 0, type = 16777217, value = { -15 -15 15 } - // input[34]: defaulting = 0, visible = 0, type = 3, value = 1 - // input[37]: defaulting = 0, visible = 0, type = 16777248, value = {"grey30", "grey5", "yellow", "white"} - // input[38]: defaulting = 0, visible = 0, type = 16777248, value = {"background", "grid", "ticks", "labels"} - // input[39]: defaulting = 0, visible = 0, type = 5, value = 0.5 - // input[41]: defaulting = 0, visible = 0, type = 32, value = "none" - // depth: value = 24 - // window: position = (0.2234,0.0656), size = 0.5680x0.8985, screen = 0 - // internal caching: 1 - // -main_Image_2_out_1, -main_Image_2_out_2, -main_Image_2_out_3 = - Image( - main_Image_2_in_1, - main_Collect_2_out_1, - main_Image_2_in_3, - main_Image_2_in_4, - main_Image_2_in_5, - main_Image_2_in_6, - main_Image_2_in_7, - main_Image_2_in_8, - main_Image_2_in_9, - main_Image_2_in_10, - main_Image_2_in_11, - main_Image_2_in_12, - main_Image_2_in_13, - main_Image_2_in_14, - main_Image_2_in_15, - main_Image_2_in_16, - main_Image_2_in_17, - main_Image_2_in_18, - main_Image_2_in_19, - main_Image_2_in_20, - main_Image_2_in_21, - main_Image_2_in_22, - main_Image_2_in_23, - main_Image_2_in_24, - main_Image_2_in_25, - main_Image_2_in_26, - main_Image_2_in_27, - main_Image_2_in_28, - main_Image_2_in_29, - main_Image_2_in_30, - main_Image_2_in_31, - main_Image_2_in_32, - main_Image_2_in_33, - main_Image_2_in_34, - main_Image_2_in_35, - main_Image_2_in_36, - main_Image_2_in_37, - main_Image_2_in_38, - main_Image_2_in_39, - main_Image_2_in_40, - main_Image_2_in_41, - main_Image_2_in_42, - main_Image_2_in_43, - main_Image_2_in_44, - main_Image_2_in_45, - main_Image_2_in_46, - main_Image_2_in_47, - main_Image_2_in_48, - main_Image_2_in_49 - ) [instance: 2, cache: 1]; - // - // node Tube[2]: x = 11, y = 345, inputs = 4, label = Tube - // input[2]: defaulting = 1, visible = 1, type = 5, value = 0.0025 - // input[3]: defaulting = 0, visible = 0, type = 1, value = 16 - // -main_Tube_2_out_1 = - Tube( - main_Tube_2_in_1, - main_Tube_2_in_2, - main_Tube_2_in_3, - main_Tube_2_in_4 - ) [instance: 2, cache: 1]; -// network: end of macro body -CacheScene(main_Image_2_in_1, main_Image_2_out_1, main_Image_2_out_2); -} -main_Integer_5_in_1 = "Integer_5"; -main_Integer_5_in_2 = NULL; -main_Integer_5_in_3 = 1 ; -main_Integer_5_in_4 = NULL; -main_Integer_5_in_5 = NULL; -main_Integer_5_in_6 = NULL; -main_Integer_5_in_7 = NULL; -main_Integer_5_in_8 = NULL; -main_Integer_5_in_9 = NULL; -main_Integer_5_in_10 = NULL; -main_Integer_5_in_11 = NULL; -main_Integer_5_out_1 = 1 ; -main_FileSelector_1_out_1 = "voronoi.dx"; -main_String_1_out_1 = "area"; -main_Import_1_in_3 = "dx"; -main_Import_1_in_4 = NULL; -main_Import_1_in_5 = NULL; -main_Import_1_in_6 = NULL; -main_Import_1_out_1 = NULL; -main_ShowConnections_2_out_1 = NULL; -main_Color_3_in_2 = "yellow"; -main_Color_3_in_3 = 1.0; -main_Color_3_in_4 = NULL; -main_Color_3_in_5 = NULL; -main_Color_3_out_1 = NULL; -main_Switch_7_out_1 = NULL; -main_Integer_1_in_1 = "Integer_1"; -main_Integer_1_in_2 = NULL; -main_Integer_1_in_3 = 1 ; -main_Integer_1_in_4 = NULL; -main_Integer_1_in_5 = NULL; -main_Integer_1_in_6 = NULL; -main_Integer_1_in_7 = NULL; -main_Integer_1_in_8 = NULL; -main_Integer_1_in_9 = NULL; -main_Integer_1_in_10 = NULL; -main_Integer_1_in_11 = NULL; -main_Integer_1_out_1 = 1 ; -main_Colormap_1_in_1 = { [0.0 0.74683544] [1.0 0.0] }; -main_Colormap_1_in_2 = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] }; -main_Colormap_1_in_3 = { [0.84699454 1.0] }; -main_Colormap_1_in_4 = { [0.84972678 1.0] }; -main_Colormap_1_in_5 = "Colormap_1"; -main_Colormap_1_in_7 = NULL; -main_Colormap_1_in_8 = NULL; -main_Colormap_1_in_9 = NULL; -main_Colormap_1_in_10 = NULL; -main_Colormap_1_in_11 = NULL; -main_Colormap_1_in_12 = { 0.067314833 0.080266804 }; -main_Colormap_1_in_13 = NULL; -main_Colormap_1_in_14 = NULL; -main_Colormap_1_in_15 = NULL; -main_Colormap_1_in_16 = NULL; -main_Colormap_1_in_17 = 0.067314833; -main_Colormap_1_in_18 = 0.080266804; -main_Colormap_1_in_19 = NULL; -main_Colormap_1_out_1 = NULL; -main_Color_5_in_3 = 1.0; -main_Color_5_in_4 = NULL; -main_Color_5_in_5 = NULL; -main_Color_5_out_1 = NULL; -main_Collect_3_in_1 = NULL; -main_Collect_3_out_1 = NULL; -main_Switch_1_out_1 = NULL; -main_Integer_7_in_1 = "Integer_7"; -main_Integer_7_in_2 = NULL; -main_Integer_7_in_3 = 0 ; -main_Integer_7_in_4 = NULL; -main_Integer_7_in_5 = NULL; -main_Integer_7_in_6 = NULL; -main_Integer_7_in_7 = NULL; -main_Integer_7_in_8 = NULL; -main_Integer_7_in_9 = NULL; -main_Integer_7_in_10 = NULL; -main_Integer_7_in_11 = NULL; -main_Integer_7_out_1 = 0 ; -main_FileSelector_4_out_1 = "topography.dx"; -main_String_4_out_1 = "tpg"; -main_Import_4_in_3 = "dx"; -main_Import_4_in_4 = NULL; -main_Import_4_in_5 = NULL; -main_Import_4_in_6 = NULL; -main_Import_4_out_1 = NULL; -main_ShowConnections_3_out_1 = NULL; -main_Color_8_in_2 = "black"; -main_Color_8_in_3 = 1.0; -main_Color_8_in_4 = NULL; -main_Color_8_in_5 = NULL; -main_Color_8_out_1 = NULL; -main_Tube_3_in_2 = 0.01; -main_Tube_3_in_3 = NULL; -main_Tube_3_in_4 = NULL; -main_Tube_3_out_1 = NULL; -main_Switch_9_out_1 = NULL; -main_Collect_2_in_4 = NULL; -main_Collect_2_in_5 = NULL; -main_Collect_2_in_6 = NULL; -main_Collect_2_in_7 = NULL; -main_Collect_2_out_1 = NULL; -main_String_7_out_1 = "white"; -main_ColorBar_1_in_2 = [0.05 0.15]; -main_ColorBar_1_in_3 = [200 15]; -main_ColorBar_1_in_4 = 0; -main_ColorBar_1_in_5 = NULL; -main_ColorBar_1_in_6 = NULL; -main_ColorBar_1_in_7 = NULL; -main_ColorBar_1_in_8 = NULL; -main_ColorBar_1_in_10 = NULL; -main_ColorBar_1_in_11 = NULL; -main_ColorBar_1_in_12 = NULL; -main_ColorBar_1_in_13 = NULL; -main_ColorBar_1_in_14 = NULL; -main_ColorBar_1_in_15 = NULL; -main_ColorBar_1_in_16 = NULL; -macro Image( - id, - object, - where, - useVector, - to, - from, - width, - resolution, - aspect, - up, - viewAngle, - perspective, - options, - buttonState = 1, - buttonUpApprox = "none", - buttonDownApprox = "none", - buttonUpDensity = 1, - buttonDownDensity = 1, - renderMode = 0, - defaultCamera, - reset, - backgroundColor, - throttle, - RECenable = 0, - RECfile, - RECformat, - RECresolution, - RECaspect, - AAenable = 0, - AAlabels, - AAticks, - AAcorners, - AAframe, - AAadjust, - AAcursor, - AAgrid, - AAcolors, - AAannotation, - AAlabelscale, - AAfont, - interactionMode, - title, - AAxTickLocs, - AAyTickLocs, - AAzTickLocs, - AAxTickLabels, - AAyTickLabels, - AAzTickLabels, - webOptions) -> ( - object, - camera, - where) -{ - ImageMessage( - id, - backgroundColor, - throttle, - RECenable, - RECfile, - RECformat, - RECresolution, - RECaspect, - AAenable, - AAlabels, - AAticks, - AAcorners, - AAframe, - AAadjust, - AAcursor, - AAgrid, - AAcolors, - AAannotation, - AAlabelscale, - AAfont, - AAxTickLocs, - AAyTickLocs, - AAzTickLocs, - AAxTickLabels, - AAyTickLabels, - AAzTickLabels, - interactionMode, - title, - renderMode, - buttonUpApprox, - buttonDownApprox, - buttonUpDensity, - buttonDownDensity) [instance: 1, cache: 1]; - autoCamera = - AutoCamera( - object, - "front", - object, - resolution, - aspect, - [0,1,0], - perspective, - viewAngle, - backgroundColor) [instance: 1, cache: 1]; - realCamera = - Camera( - to, - from, - width, - resolution, - aspect, - up, - perspective, - viewAngle, - backgroundColor) [instance: 1, cache: 1]; - coloredDefaultCamera = - UpdateCamera(defaultCamera, - background=backgroundColor) [instance: 1, cache: 1]; - nullDefaultCamera = - Inquire(defaultCamera, - "is null + 1") [instance: 1, cache: 1]; - resetCamera = - Switch( - nullDefaultCamera, - coloredDefaultCamera, - autoCamera) [instance: 1, cache: 1]; - resetNull = - Inquire( - reset, - "is null + 1") [instance: 2, cache: 1]; - reset = - Switch( - resetNull, - reset, - 0) [instance: 2, cache: 1]; - whichCamera = - Compute( - "($0 != 0 || $1 == 0) ? 1 : 2", - reset, - useVector) [instance: 1, cache: 1]; - camera = Switch( - whichCamera, - resetCamera, - realCamera) [instance: 3, cache: 1]; - AAobject = - AutoAxes( - object, - camera, - AAlabels, - AAticks, - AAcorners, - AAframe, - AAadjust, - AAcursor, - AAgrid, - AAcolors, - AAannotation, - AAlabelscale, - AAfont, - AAxTickLocs, - AAyTickLocs, - AAzTickLocs, - AAxTickLabels, - AAyTickLabels, - AAzTickLabels) [instance: 1, cache: 1]; - switchAAenable = Compute("$0+1", - AAenable) [instance: 2, cache: 1]; - object = Switch( - switchAAenable, - object, - AAobject) [instance:4, cache: 1]; - SWapproximation_options = - Switch( - buttonState, - buttonUpApprox, - buttonDownApprox) [instance: 5, cache: 1]; - SWdensity_options = - Switch( - buttonState, - buttonUpDensity, - buttonDownDensity) [instance: 6, cache: 1]; - HWapproximation_options = - Format( - "%s,%s", - buttonDownApprox, - buttonUpApprox) [instance: 1, cache: 1]; - HWdensity_options = - Format( - "%d,%d", - buttonDownDensity, - buttonUpDensity) [instance: 2, cache: 1]; - switchRenderMode = Compute( - "$0+1", - renderMode) [instance: 3, cache: 1]; - approximation_options = Switch( - switchRenderMode, - SWapproximation_options, - HWapproximation_options) [instance: 7, cache: 1]; - density_options = Switch( - switchRenderMode, - SWdensity_options, - HWdensity_options) [instance: 8, cache: 1]; - renderModeString = Switch( - switchRenderMode, - "software", - "hardware")[instance: 9, cache: 1]; - object_tag = Inquire( - object, - "object tag")[instance: 3, cache: 1]; - annoted_object = - Options( - object, - "send boxes", - 0, - "cache", - 1, - "object tag", - object_tag, - "ddcamera", - whichCamera, - "rendering approximation", - approximation_options, - "render every", - density_options, - "button state", - buttonState, - "rendering mode", - renderModeString) [instance: 1, cache: 1]; - RECresNull = - Inquire( - RECresolution, - "is null + 1") [instance: 4, cache: 1]; - ImageResolution = - Inquire( - camera, - "camera resolution") [instance: 5, cache: 1]; - RECresolution = - Switch( - RECresNull, - RECresolution, - ImageResolution) [instance: 10, cache: 1]; - RECaspectNull = - Inquire( - RECaspect, - "is null + 1") [instance: 6, cache: 1]; - ImageAspect = - Inquire( - camera, - "camera aspect") [instance: 7, cache: 1]; - RECaspect = - Switch( - RECaspectNull, - RECaspect, - ImageAspect) [instance: 11, cache: 1]; - switchRECenable = Compute( - "$0 == 0 ? 1 : (($2 == $3) && ($4 == $5)) ? ($1 == 1 ? 2 : 3) : 4", - RECenable, - switchRenderMode, - RECresolution, - ImageResolution, - RECaspect, - ImageAspect) [instance: 4, cache: 1]; - NoRECobject, RECNoRerenderObject, RECNoRerHW, RECRerenderObject = Route(switchRECenable, annoted_object); - Display( - NoRECobject, - camera, - where, - throttle) [instance: 1, cache: 1]; - image = - Render( - RECNoRerenderObject, - camera) [instance: 1, cache: 1]; - Display( - image, - NULL, - where, - throttle) [instance: 2, cache: 1]; - WriteImage( - image, - RECfile, - RECformat) [instance: 1, cache: 1]; - rec_where = Display( - RECNoRerHW, - camera, - where, - throttle) [instance: 1, cache: 0]; - rec_image = ReadImageWindow( - rec_where) [instance: 1, cache: 1]; - WriteImage( - rec_image, - RECfile, - RECformat) [instance: 1, cache: 1]; - RECupdateCamera = - UpdateCamera( - camera, - resolution=RECresolution, - aspect=RECaspect) [instance: 2, cache: 1]; - Display( - RECRerenderObject, - camera, - where, - throttle) [instance: 1, cache: 1]; - RECRerenderObject = - ScaleScreen( - RECRerenderObject, - NULL, - RECresolution, - camera) [instance: 1, cache: 1]; - image = - Render( - RECRerenderObject, - RECupdateCamera) [instance: 2, cache: 1]; - WriteImage( - image, - RECfile, - RECformat) [instance: 2, cache: 1]; -} -main_Image_2_in_1 = "Image_2"; -main_Image_2_in_3 = "X24,,"; -main_Image_2_in_4 = 1; -main_Image_2_in_5 = [0.015844 -0.0369692 0]; -main_Image_2_in_6 = [0.456141 -6.14723 3.44158]; -main_Image_2_in_7 = 2.49278; -main_Image_2_in_8 = 713; -main_Image_2_in_9 = 0.922; -main_Image_2_in_10 = [0.0452349 0.492725 0.869008]; -main_Image_2_in_11 = NULL; -main_Image_2_in_12 = 0; -main_Image_2_in_13 = NULL; -main_Image_2_in_14 = 1; -main_Image_2_in_15 = NULL; -main_Image_2_in_16 = NULL; -main_Image_2_in_17 = NULL; -main_Image_2_in_18 = NULL; -main_Image_2_in_19 = 0; -main_Image_2_in_20 = NULL; -main_Image_2_in_21 = NULL; -main_Image_2_in_22 = "black"; -main_Image_2_in_23 = NULL; -main_Image_2_in_25 = "./example.tiff"; -main_Image_2_in_26 = "tiff gamma=1"; -main_Image_2_in_27 = 500; -main_Image_2_in_28 = NULL; -main_Image_2_in_29 = 0; -main_Image_2_in_30 = {"x axis", "y axis", ""}; -main_Image_2_in_31 = { -15 -15 15 }; -main_Image_2_in_32 = NULL; -main_Image_2_in_33 = NULL; -main_Image_2_in_34 = 1; -main_Image_2_in_35 = NULL; -main_Image_2_in_36 = NULL; -main_Image_2_in_37 = {"grey30", "grey5", "yellow", "white"}; -main_Image_2_in_38 = {"background", "grid", "ticks", "labels"}; -main_Image_2_in_39 = 0.5; -main_Image_2_in_40 = NULL; -main_Image_2_in_41 = "none"; -main_Image_2_in_42 = NULL; -main_Image_2_in_43 = NULL; -main_Image_2_in_44 = NULL; -main_Image_2_in_45 = NULL; -main_Image_2_in_46 = NULL; -main_Image_2_in_47 = NULL; -main_Image_2_in_48 = NULL; -main_Image_2_in_49 = NULL; -main_Tube_2_in_1 = NULL; -main_Tube_2_in_2 = NULL; -main_Tube_2_in_3 = 16; -main_Tube_2_in_4 = NULL; -Executive("product version 4 4 4"); -$sync -main(); diff --git a/grid_gen/global_scvt/dx/topography.dx b/grid_gen/global_scvt/dx/topography.dx deleted file mode 100644 index 6e0e1d5a2..000000000 --- a/grid_gen/global_scvt/dx/topography.dx +++ /dev/null @@ -1,235 +0,0 @@ -object "positions list" class array type float rank 1 shape 3 items 101 -data follows - - 0.2991601112E+00 -0.8219356504E+00 0.5049999952E+00 - 0.2947937299E+00 -0.8116439251E+00 0.5238614095E+00 - 0.2892384301E+00 -0.8013977596E+00 0.5424045927E+00 - 0.2825631166E+00 -0.7912255171E+00 0.5605535183E+00 - 0.2748426807E+00 -0.7811551564E+00 0.5782370649E+00 - 0.2661568154E+00 -0.7712145858E+00 0.5953894465E+00 - 0.2565887515E+00 -0.7614320220E+00 0.6119505447E+00 - 0.2462239371E+00 -0.7518363312E+00 0.6278661359E+00 - 0.2351486896E+00 -0.7424573287E+00 0.6430880108E+00 - 0.2234488481E+00 -0.7333260158E+00 0.6575739864E+00 - 0.2112084569E+00 -0.7244747360E+00 0.6712878111E+00 - 0.1985085106E+00 -0.7159372332E+00 0.6841989696E+00 - 0.1854257918E+00 -0.7077486012E+00 0.6962823953E+00 - 0.1720318285E+00 -0.6999451191E+00 0.7075180994E+00 - 0.1583920008E+00 -0.6925639711E+00 0.7178907304E+00 - 0.1445648184E+00 -0.6856428558E+00 0.7273890763E+00 - 0.1306013902E+00 -0.6792194960E+00 0.7360055257E+00 - 0.1165451010E+00 -0.6733310644E+00 0.7437355026E+00 - 0.1024315050E+00 -0.6680135450E+00 0.7505768923E+00 - 0.8828844081E-01 -0.6633010548E+00 0.7565294724E+00 - 0.7413636571E-01 -0.6592251512E+00 0.7615943671E+00 - 0.5998890034E-01 -0.6558141530E+00 0.7657735366E+00 - 0.4585357015E-01 -0.6530925023E+00 0.7690693169E+00 - 0.3173272249E-01 -0.6510801937E+00 0.7714840222E+00 - 0.1762459364E-01 -0.6497922942E+00 0.7730196174E+00 - 0.3524495337E-02 -0.6492385744E+00 0.7736774730E+00 - -0.1057391358E-01 -0.6494232656E+00 0.7734582052E+00 - -0.2467730726E-01 -0.6503449545E+00 0.7723616066E+00 - -0.3879136329E-01 -0.6519966182E+00 0.7703866697E+00 - -0.5291946212E-01 -0.6543657992E+00 0.7675317010E+00 - -0.6706145356E-01 -0.6574349134E+00 0.7637945253E+00 - -0.8121252588E-01 -0.6611816761E+00 0.7591727719E+00 - -0.9536221021E-01 -0.6655796308E+00 0.7536642387E+00 - -0.1094935484E+00 -0.6705987565E+00 0.7472673226E+00 - -0.1235824478E+00 -0.6762061295E+00 0.7399815063E+00 - -0.1375972401E+00 -0.6823666131E+00 0.7318078885E+00 - -0.1514984563E+00 -0.6890435459E+00 0.7227497426E+00 - -0.1652388226E+00 -0.6961994040E+00 0.7128130906E+00 - -0.1787634760E+00 -0.7037964096E+00 0.7020072751E+00 - -0.1920103928E+00 -0.7117970660E+00 0.6903455125E+00 - -0.2049110176E+00 -0.7201645994E+00 0.6778454140E+00 - -0.2173910734E+00 -0.7288632955E+00 0.6645294563E+00 - -0.2293715332E+00 -0.7378587222E+00 0.6504253890E+00 - -0.2407697257E+00 -0.7471178364E+00 0.6355665649E+00 - -0.2515005478E+00 -0.7566089794E+00 0.6199921834E+00 - -0.2614777540E+00 -0.7663017679E+00 0.6037474351E+00 - -0.2706152924E+00 -0.7761668952E+00 0.5868835447E+00 - -0.2788286562E+00 -0.7861758603E+00 0.5694577050E+00 - -0.2860362213E+00 -0.7963006451E+00 0.5515329033E+00 - -0.2921605418E+00 -0.8065133612E+00 0.5331776409E+00 - -0.2971295777E+00 -0.8167858906E+00 0.5144655516E+00 - -0.3008778301E+00 -0.8270895422E+00 0.4954749249E+00 - -0.3033473643E+00 -0.8373947450E+00 0.4762881464E+00 - -0.3044887036E+00 -0.8476707950E+00 0.4569910649E+00 - -0.3042615800E+00 -0.8578856732E+00 0.4376723014E+00 - -0.3026355325E+00 -0.8680059411E+00 0.4184225154E+00 - -0.2995903456E+00 -0.8779967242E+00 0.3993336423E+00 - -0.2951163286E+00 -0.8878217810E+00 0.3804981190E+00 - -0.2892144352E+00 -0.8974436568E+00 0.3620081133E+00 - -0.2818962295E+00 -0.9068239149E+00 0.3439547701E+00 - -0.2731837090E+00 -0.9159234337E+00 0.3264274889E+00 - -0.2631089938E+00 -0.9247027579E+00 0.3095132432E+00 - -0.2517139001E+00 -0.9331224883E+00 0.2932959513E+00 - -0.2390494126E+00 -0.9411436951E+00 0.2778559055E+00 - -0.2251750759E+00 -0.9487283386E+00 0.2632692642E+00 - -0.2101583237E+00 -0.9558396835E+00 0.2496076103E+00 - -0.1940737662E+00 -0.9624426944E+00 0.2369375742E+00 - -0.1770024544E+00 -0.9685044010E+00 0.2253205211E+00 - -0.1590311390E+00 -0.9739942270E+00 0.2148122980E+00 - -0.1402515399E+00 -0.9788842768E+00 0.2054630353E+00 - -0.1207596391E+00 -0.9831495783E+00 0.1973169962E+00 - -0.1006550063E+00 -0.9867682820E+00 0.1904124681E+00 - -0.8004016474E-01 -0.9897218201E+00 0.1847816862E+00 - -0.5901999861E-01 -0.9919950276E+00 0.1804507847E+00 - -0.3770120119E-01 -0.9935762323E+00 0.1774397668E+00 - -0.1619176011E-01 -0.9944573171E+00 0.1757624877E+00 - 0.5399528292E-02 -0.9946337604E+00 0.1754266469E+00 - 0.2696352476E-01 -0.9941046569E+00 0.1764337843E+00 - 0.4839120411E-01 -0.9928727215E+00 0.1787792801E+00 - 0.6957411512E-01 -0.9909442774E+00 0.1824523562E+00 - 0.9040485500E-01 -0.9883292256E+00 0.1874360820E+00 - 0.1107775685E+00 -0.9850409939E+00 0.1937073861E+00 - 0.1305884806E+00 -0.9810964619E+00 0.2012370788E+00 - 0.1497364679E+00 -0.9765158557E+00 0.2099898912E+00 - 0.1681236727E+00 -0.9713226084E+00 0.2199245367E+00 - 0.1856561576E+00 -0.9655431814E+00 0.2309938024E+00 - 0.2022445971E+00 -0.9592068438E+00 0.2431446780E+00 - 0.2178049982E+00 -0.9523454074E+00 0.2563185294E+00 - 0.2322594375E+00 -0.9449929186E+00 0.2704513234E+00 - 0.2455368021E+00 -0.9371853104E+00 0.2854739104E+00 - 0.2575735158E+00 -0.9289600213E+00 0.3013123685E+00 - 0.2683142337E+00 -0.9203555886E+00 0.3178884131E+00 - 0.2777124839E+00 -0.9114112293E+00 0.3351198714E+00 - 0.2857312381E+00 -0.9021664210E+00 0.3529212223E+00 - 0.2923433910E+00 -0.8926604990E+00 0.3712041973E+00 - 0.2975321311E+00 -0.8829322832E+00 0.3898784361E+00 - 0.3012911860E+00 -0.8730197527E+00 0.4088521901E+00 - 0.3036249294E+00 -0.8629597798E+00 0.4280330611E+00 - 0.3045483392E+00 -0.8527879366E+00 0.4473287661E+00 - 0.3040867986E+00 -0.8425383828E+00 0.4666479113E+00 - 0.3022757370E+00 -0.8322438396E+00 0.4859007624E+00 - -object "edge list" class array type int rank 0 items 101 data follows - - 0 - 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 - 11 - 12 - 13 - 14 - 15 - 16 - 17 - 18 - 19 - 20 - 21 - 22 - 23 - 24 - 25 - 26 - 27 - 28 - 29 - 30 - 31 - 32 - 33 - 34 - 35 - 36 - 37 - 38 - 39 - 40 - 41 - 42 - 43 - 44 - 45 - 46 - 47 - 48 - 49 - 50 - 51 - 52 - 53 - 54 - 55 - 56 - 57 - 58 - 59 - 60 - 61 - 62 - 63 - 64 - 65 - 66 - 67 - 68 - 69 - 70 - 71 - 72 - 73 - 74 - 75 - 76 - 77 - 78 - 79 - 80 - 81 - 82 - 83 - 84 - 85 - 86 - 87 - 88 - 89 - 90 - 91 - 92 - 93 - 94 - 95 - 96 - 97 - 98 - 99 - 100 - -attribute "ref" string "positions" - -object "loops list" class array type int rank 0 items 1 data follows - - 0 - -attribute "ref" string "edges" - -object "face list" class array type int rank 0 items 1 data follows - - 0 - -attribute "ref" string "loops" - -object 0 class array type float rank 0 items 1 data follows - - 1.0 - -attribute "dep" string "faces" - -object "tpg" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 0 diff --git a/grid_gen/global_scvt/dx/vor.area.data b/grid_gen/global_scvt/dx/vor.area.data deleted file mode 100644 index 5e356160e..000000000 --- a/grid_gen/global_scvt/dx/vor.area.data +++ /dev/null @@ -1,162 +0,0 @@ - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 diff --git a/grid_gen/global_scvt/dx/vor.edge.data b/grid_gen/global_scvt/dx/vor.edge.data deleted file mode 100644 index 6019cd51f..000000000 --- a/grid_gen/global_scvt/dx/vor.edge.data +++ /dev/null @@ -1,960 +0,0 @@ - 0 - 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 - 11 - 12 - 13 - 14 - 15 - 16 - 17 - 18 - 19 - 20 - 21 - 22 - 23 - 24 - 25 - 26 - 27 - 28 - 29 - 30 - 31 - 32 - 33 - 34 - 35 - 36 - 37 - 38 - 39 - 40 - 41 - 42 - 43 - 44 - 45 - 46 - 47 - 48 - 49 - 50 - 51 - 52 - 53 - 54 - 55 - 56 - 57 - 58 - 59 - 60 - 61 - 62 - 63 - 64 - 65 - 66 - 67 - 68 - 69 - 70 - 71 - 72 - 73 - 74 - 75 - 76 - 77 - 78 - 79 - 80 - 81 - 82 - 83 - 84 - 85 - 86 - 87 - 88 - 89 - 90 - 91 - 92 - 93 - 94 - 95 - 96 - 97 - 98 - 99 - 100 - 101 - 102 - 103 - 104 - 105 - 106 - 107 - 108 - 109 - 110 - 111 - 112 - 113 - 114 - 115 - 116 - 117 - 118 - 119 - 120 - 121 - 122 - 123 - 124 - 125 - 126 - 127 - 128 - 129 - 130 - 131 - 132 - 133 - 134 - 135 - 136 - 137 - 138 - 139 - 140 - 141 - 142 - 143 - 144 - 145 - 146 - 147 - 148 - 149 - 150 - 151 - 152 - 153 - 154 - 155 - 156 - 157 - 158 - 159 - 160 - 161 - 162 - 163 - 164 - 165 - 166 - 167 - 168 - 169 - 170 - 171 - 172 - 173 - 174 - 175 - 176 - 177 - 178 - 179 - 180 - 181 - 182 - 183 - 184 - 185 - 186 - 187 - 188 - 189 - 190 - 191 - 192 - 193 - 194 - 195 - 196 - 197 - 198 - 199 - 200 - 201 - 202 - 203 - 204 - 205 - 206 - 207 - 208 - 209 - 210 - 211 - 212 - 213 - 214 - 215 - 216 - 217 - 218 - 219 - 220 - 221 - 222 - 223 - 224 - 225 - 226 - 227 - 228 - 229 - 230 - 231 - 232 - 233 - 234 - 235 - 236 - 237 - 238 - 239 - 240 - 241 - 242 - 243 - 244 - 245 - 246 - 247 - 248 - 249 - 250 - 251 - 252 - 253 - 254 - 255 - 256 - 257 - 258 - 259 - 260 - 261 - 262 - 263 - 264 - 265 - 266 - 267 - 268 - 269 - 270 - 271 - 272 - 273 - 274 - 275 - 276 - 277 - 278 - 279 - 280 - 281 - 282 - 283 - 284 - 285 - 286 - 287 - 288 - 289 - 290 - 291 - 292 - 293 - 294 - 295 - 296 - 297 - 298 - 299 - 300 - 301 - 302 - 303 - 304 - 305 - 306 - 307 - 308 - 309 - 310 - 311 - 312 - 313 - 314 - 315 - 316 - 317 - 318 - 319 - 320 - 321 - 322 - 323 - 324 - 325 - 326 - 327 - 328 - 329 - 330 - 331 - 332 - 333 - 334 - 335 - 336 - 337 - 338 - 339 - 340 - 341 - 342 - 343 - 344 - 345 - 346 - 347 - 348 - 349 - 350 - 351 - 352 - 353 - 354 - 355 - 356 - 357 - 358 - 359 - 360 - 361 - 362 - 363 - 364 - 365 - 366 - 367 - 368 - 369 - 370 - 371 - 372 - 373 - 374 - 375 - 376 - 377 - 378 - 379 - 380 - 381 - 382 - 383 - 384 - 385 - 386 - 387 - 388 - 389 - 390 - 391 - 392 - 393 - 394 - 395 - 396 - 397 - 398 - 399 - 400 - 401 - 402 - 403 - 404 - 405 - 406 - 407 - 408 - 409 - 410 - 411 - 412 - 413 - 414 - 415 - 416 - 417 - 418 - 419 - 420 - 421 - 422 - 423 - 424 - 425 - 426 - 427 - 428 - 429 - 430 - 431 - 432 - 433 - 434 - 435 - 436 - 437 - 438 - 439 - 440 - 441 - 442 - 443 - 444 - 445 - 446 - 447 - 448 - 449 - 450 - 451 - 452 - 453 - 454 - 455 - 456 - 457 - 458 - 459 - 460 - 461 - 462 - 463 - 464 - 465 - 466 - 467 - 468 - 469 - 470 - 471 - 472 - 473 - 474 - 475 - 476 - 477 - 478 - 479 - 480 - 481 - 482 - 483 - 484 - 485 - 486 - 487 - 488 - 489 - 490 - 491 - 492 - 493 - 494 - 495 - 496 - 497 - 498 - 499 - 500 - 501 - 502 - 503 - 504 - 505 - 506 - 507 - 508 - 509 - 510 - 511 - 512 - 513 - 514 - 515 - 516 - 517 - 518 - 519 - 520 - 521 - 522 - 523 - 524 - 525 - 526 - 527 - 528 - 529 - 530 - 531 - 532 - 533 - 534 - 535 - 536 - 537 - 538 - 539 - 540 - 541 - 542 - 543 - 544 - 545 - 546 - 547 - 548 - 549 - 550 - 551 - 552 - 553 - 554 - 555 - 556 - 557 - 558 - 559 - 560 - 561 - 562 - 563 - 564 - 565 - 566 - 567 - 568 - 569 - 570 - 571 - 572 - 573 - 574 - 575 - 576 - 577 - 578 - 579 - 580 - 581 - 582 - 583 - 584 - 585 - 586 - 587 - 588 - 589 - 590 - 591 - 592 - 593 - 594 - 595 - 596 - 597 - 598 - 599 - 600 - 601 - 602 - 603 - 604 - 605 - 606 - 607 - 608 - 609 - 610 - 611 - 612 - 613 - 614 - 615 - 616 - 617 - 618 - 619 - 620 - 621 - 622 - 623 - 624 - 625 - 626 - 627 - 628 - 629 - 630 - 631 - 632 - 633 - 634 - 635 - 636 - 637 - 638 - 639 - 640 - 641 - 642 - 643 - 644 - 645 - 646 - 647 - 648 - 649 - 650 - 651 - 652 - 653 - 654 - 655 - 656 - 657 - 658 - 659 - 660 - 661 - 662 - 663 - 664 - 665 - 666 - 667 - 668 - 669 - 670 - 671 - 672 - 673 - 674 - 675 - 676 - 677 - 678 - 679 - 680 - 681 - 682 - 683 - 684 - 685 - 686 - 687 - 688 - 689 - 690 - 691 - 692 - 693 - 694 - 695 - 696 - 697 - 698 - 699 - 700 - 701 - 702 - 703 - 704 - 705 - 706 - 707 - 708 - 709 - 710 - 711 - 712 - 713 - 714 - 715 - 716 - 717 - 718 - 719 - 720 - 721 - 722 - 723 - 724 - 725 - 726 - 727 - 728 - 729 - 730 - 731 - 732 - 733 - 734 - 735 - 736 - 737 - 738 - 739 - 740 - 741 - 742 - 743 - 744 - 745 - 746 - 747 - 748 - 749 - 750 - 751 - 752 - 753 - 754 - 755 - 756 - 757 - 758 - 759 - 760 - 761 - 762 - 763 - 764 - 765 - 766 - 767 - 768 - 769 - 770 - 771 - 772 - 773 - 774 - 775 - 776 - 777 - 778 - 779 - 780 - 781 - 782 - 783 - 784 - 785 - 786 - 787 - 788 - 789 - 790 - 791 - 792 - 793 - 794 - 795 - 796 - 797 - 798 - 799 - 800 - 801 - 802 - 803 - 804 - 805 - 806 - 807 - 808 - 809 - 810 - 811 - 812 - 813 - 814 - 815 - 816 - 817 - 818 - 819 - 820 - 821 - 822 - 823 - 824 - 825 - 826 - 827 - 828 - 829 - 830 - 831 - 832 - 833 - 834 - 835 - 836 - 837 - 838 - 839 - 840 - 841 - 842 - 843 - 844 - 845 - 846 - 847 - 848 - 849 - 850 - 851 - 852 - 853 - 854 - 855 - 856 - 857 - 858 - 859 - 860 - 861 - 862 - 863 - 864 - 865 - 866 - 867 - 868 - 869 - 870 - 871 - 872 - 873 - 874 - 875 - 876 - 877 - 878 - 879 - 880 - 881 - 882 - 883 - 884 - 885 - 886 - 887 - 888 - 889 - 890 - 891 - 892 - 893 - 894 - 895 - 896 - 897 - 898 - 899 - 900 - 901 - 902 - 903 - 904 - 905 - 906 - 907 - 908 - 909 - 910 - 911 - 912 - 913 - 914 - 915 - 916 - 917 - 918 - 919 - 920 - 921 - 922 - 923 - 924 - 925 - 926 - 927 - 928 - 929 - 930 - 931 - 932 - 933 - 934 - 935 - 936 - 937 - 938 - 939 - 940 - 941 - 942 - 943 - 944 - 945 - 946 - 947 - 948 - 949 - 950 - 951 - 952 - 953 - 954 - 955 - 956 - 957 - 958 - 959 diff --git a/grid_gen/global_scvt/dx/vor.face.data b/grid_gen/global_scvt/dx/vor.face.data deleted file mode 100644 index b8954ee4d..000000000 --- a/grid_gen/global_scvt/dx/vor.face.data +++ /dev/null @@ -1,162 +0,0 @@ - 0 - 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 - 11 - 12 - 13 - 14 - 15 - 16 - 17 - 18 - 19 - 20 - 21 - 22 - 23 - 24 - 25 - 26 - 27 - 28 - 29 - 30 - 31 - 32 - 33 - 34 - 35 - 36 - 37 - 38 - 39 - 40 - 41 - 42 - 43 - 44 - 45 - 46 - 47 - 48 - 49 - 50 - 51 - 52 - 53 - 54 - 55 - 56 - 57 - 58 - 59 - 60 - 61 - 62 - 63 - 64 - 65 - 66 - 67 - 68 - 69 - 70 - 71 - 72 - 73 - 74 - 75 - 76 - 77 - 78 - 79 - 80 - 81 - 82 - 83 - 84 - 85 - 86 - 87 - 88 - 89 - 90 - 91 - 92 - 93 - 94 - 95 - 96 - 97 - 98 - 99 - 100 - 101 - 102 - 103 - 104 - 105 - 106 - 107 - 108 - 109 - 110 - 111 - 112 - 113 - 114 - 115 - 116 - 117 - 118 - 119 - 120 - 121 - 122 - 123 - 124 - 125 - 126 - 127 - 128 - 129 - 130 - 131 - 132 - 133 - 134 - 135 - 136 - 137 - 138 - 139 - 140 - 141 - 142 - 143 - 144 - 145 - 146 - 147 - 148 - 149 - 150 - 151 - 152 - 153 - 154 - 155 - 156 - 157 - 158 - 159 - 160 - 161 diff --git a/grid_gen/global_scvt/dx/vor.loop.data b/grid_gen/global_scvt/dx/vor.loop.data deleted file mode 100644 index 4161f5396..000000000 --- a/grid_gen/global_scvt/dx/vor.loop.data +++ /dev/null @@ -1,162 +0,0 @@ - 0 - 5 - 10 - 15 - 20 - 25 - 30 - 35 - 40 - 45 - 50 - 55 - 60 - 66 - 72 - 78 - 84 - 90 - 96 - 102 - 108 - 114 - 120 - 126 - 132 - 138 - 144 - 150 - 156 - 162 - 168 - 174 - 180 - 186 - 192 - 198 - 204 - 210 - 216 - 222 - 228 - 234 - 240 - 246 - 252 - 258 - 264 - 270 - 276 - 282 - 288 - 294 - 300 - 306 - 312 - 318 - 324 - 330 - 336 - 342 - 348 - 354 - 360 - 366 - 372 - 378 - 384 - 390 - 396 - 402 - 408 - 414 - 420 - 426 - 432 - 438 - 444 - 450 - 456 - 462 - 468 - 474 - 480 - 486 - 492 - 498 - 504 - 510 - 516 - 522 - 528 - 534 - 540 - 546 - 552 - 558 - 564 - 570 - 576 - 582 - 588 - 594 - 600 - 606 - 612 - 618 - 624 - 630 - 636 - 642 - 648 - 654 - 660 - 666 - 672 - 678 - 684 - 690 - 696 - 702 - 708 - 714 - 720 - 726 - 732 - 738 - 744 - 750 - 756 - 762 - 768 - 774 - 780 - 786 - 792 - 798 - 804 - 810 - 816 - 822 - 828 - 834 - 840 - 846 - 852 - 858 - 864 - 870 - 876 - 882 - 888 - 894 - 900 - 906 - 912 - 918 - 924 - 930 - 936 - 942 - 948 - 954 diff --git a/grid_gen/global_scvt/dx/vor.position.data b/grid_gen/global_scvt/dx/vor.position.data deleted file mode 100644 index a627e7906..000000000 --- a/grid_gen/global_scvt/dx/vor.position.data +++ /dev/null @@ -1,960 +0,0 @@ - -0.8267643340E+00 0.2569457569E-01 0.5619613196E+00 - -0.9154295376E+00 0.7887094654E-01 0.3946747211E+00 - -0.9529269564E+00 -0.8415895755E-01 0.2912859173E+00 - -0.8874364321E+00 -0.2380933503E+00 0.3946747211E+00 - -0.8094636434E+00 -0.1702001330E+00 0.5619613196E+00 - -0.4779270666E-01 -0.9175770931E+00 0.3946747211E+00 - -0.8826807659E-01 -0.8224404063E+00 0.5619613196E+00 - -0.2799212232E+00 -0.7783595468E+00 0.5619613196E+00 - -0.3578940119E+00 -0.8462527641E+00 0.3946747211E+00 - -0.2144306989E+00 -0.9322939395E+00 0.2912859173E+00 - 0.6549052426E-01 -0.1539343928E+00 0.9859083091E+00 - 0.1666379923E+00 0.1471684647E-01 0.9859083091E+00 - 0.3749741879E-01 0.1630299041E+00 0.9859083091E+00 - -0.1434633130E+00 0.8604117543E-01 0.9859083091E+00 - -0.1261626223E+00 -0.1098535332E+00 0.9859083091E+00 - 0.8267643340E+00 -0.2569457569E-01 -0.5619613196E+00 - 0.8094636434E+00 0.1702001330E+00 -0.5619613196E+00 - 0.8874364321E+00 0.2380933503E+00 -0.3946747211E+00 - 0.9529269564E+00 0.8415895755E-01 -0.2912859173E+00 - 0.9154295376E+00 -0.7887094654E-01 -0.3946747211E+00 - 0.2799212232E+00 0.7783595468E+00 -0.5619613196E+00 - 0.8826807659E-01 0.8224404063E+00 -0.5619613196E+00 - 0.4779270666E-01 0.9175770931E+00 -0.3946747211E+00 - 0.2144306989E+00 0.9322939395E+00 -0.2912859173E+00 - 0.3578940119E+00 0.8462527641E+00 -0.3946747211E+00 - 0.3745105489E+00 -0.8802808433E+00 -0.2912859173E+00 - 0.2078725566E+00 -0.8949976898E+00 -0.3946747211E+00 - 0.2310472359E+00 -0.7942396679E+00 -0.5619613196E+00 - 0.4120079677E+00 -0.7172509392E+00 -0.5619613196E+00 - 0.5006731713E+00 -0.7704273101E+00 -0.3946747211E+00 - 0.6942388739E+00 -0.6018839178E+00 0.3946747211E+00 - 0.8204014962E+00 -0.4920303846E+00 0.2912859173E+00 - 0.8578989150E+00 -0.3290004805E+00 0.3946747211E+00 - 0.7549109720E+00 -0.3380959918E+00 0.5619613196E+00 - 0.6537635039E+00 -0.5067472311E+00 0.5619613196E+00 - -0.6942388739E+00 0.6018839178E+00 -0.3946747211E+00 - -0.6537635039E+00 0.5067472311E+00 -0.5619613196E+00 - -0.7549109720E+00 0.3380959918E+00 -0.5619613196E+00 - -0.8578989150E+00 0.3290004805E+00 -0.3946747211E+00 - -0.8204014962E+00 0.4920303846E+00 -0.2912859173E+00 - -0.3749741879E-01 -0.1630299041E+00 -0.9859083091E+00 - -0.1666379923E+00 -0.1471684647E-01 -0.9859083091E+00 - -0.6549052426E-01 0.1539343928E+00 -0.9859083091E+00 - 0.1261626223E+00 0.1098535332E+00 -0.9859083091E+00 - 0.1434633130E+00 -0.8604117543E-01 -0.9859083091E+00 - 0.5548287157E+00 0.6134855919E+00 0.5619613196E+00 - 0.6839692892E+00 0.4651725343E+00 0.5619613196E+00 - 0.7869572323E+00 0.4742680456E+00 0.3946747211E+00 - 0.7214667080E+00 0.6282024384E+00 0.2912859173E+00 - 0.5780033950E+00 0.7142436138E+00 0.3946747211E+00 - -0.6839692892E+00 -0.4651725343E+00 -0.5619613196E+00 - -0.5548287157E+00 -0.6134855919E+00 -0.5619613196E+00 - -0.5780033950E+00 -0.7142436138E+00 -0.3946747211E+00 - -0.7214667080E+00 -0.6282024384E+00 -0.2912859173E+00 - -0.7869572323E+00 -0.4742680456E+00 -0.3946747211E+00 - -0.2310472359E+00 0.7942396679E+00 0.5619613196E+00 - -0.2078725566E+00 0.8949976898E+00 0.3946747211E+00 - -0.3745105489E+00 0.8802808433E+00 0.2912859173E+00 - -0.5006731713E+00 0.7704273101E+00 0.3946747211E+00 - -0.4120079677E+00 0.7172509392E+00 0.5619613196E+00 - -0.7981815150E+00 0.4787040975E+00 0.3657166338E+00 - -0.8359870647E+00 0.3390817753E+00 0.4314500867E+00 - -0.7443815262E+00 0.2841419580E+00 0.6042842801E+00 - -0.6374812308E+00 0.3823251622E+00 0.6689134105E+00 - -0.6012080497E+00 0.5228665124E+00 0.6042842801E+00 - -0.6928135883E+00 0.5778063297E+00 0.4314500867E+00 - -0.9629456817E+00 0.2078908386E+00 -0.1718051609E+00 - -0.9929343613E+00 0.7282159007E-01 -0.9369295664E-01 - -0.9957492651E+00 0.6526862018E-01 0.6498775488E-01 - -0.9570083487E+00 0.2337049578E+00 0.1718051609E+00 - -0.9249725723E+00 0.3683033672E+00 0.9369295664E-01 - -0.9242047653E+00 0.3763271762E+00 -0.6498775488E-01 - -0.8439420100E+00 -0.5324833101E+00 -0.6498775489E-01 - -0.8461042293E+00 -0.5247182702E+00 0.9369295664E-01 - -0.9012345902E+00 -0.3978180491E+00 0.1718051609E+00 - -0.9688969256E+00 -0.2387788502E+00 0.6498775488E-01 - -0.9648018180E+00 -0.2457215536E+00 -0.9369295664E-01 - -0.9116043454E+00 -0.3734441112E+00 -0.1718051609E+00 - -0.5606053708E+00 -0.4881357060E+00 0.6689134105E+00 - -0.6830591083E+00 -0.4102081952E+00 0.6042842801E+00 - -0.7636176477E+00 -0.4803529024E+00 0.4314500867E+00 - -0.7019263040E+00 -0.6111880296E+00 0.3657166338E+00 - -0.5808201421E+00 -0.6902889144E+00 0.4314500867E+00 - -0.5002616026E+00 -0.6201442072E+00 0.6042842801E+00 - -0.6427339198E+00 0.4522094255E-01 0.7647536693E+00 - -0.6248594999E+00 -0.1571700695E+00 0.7647536693E+00 - -0.4996310559E+00 -0.2230338127E+00 0.8370332887E+00 - -0.3889846814E+00 -0.1363384845E+00 0.9110997395E+00 - -0.4068591013E+00 0.6605252750E-01 0.9110997395E+00 - -0.5309875045E+00 0.1320134222E+00 0.8370332887E+00 - -0.6435033919E+00 -0.7626794715E+00 -0.6498775489E-01 - -0.4952825170E+00 -0.8515739633E+00 -0.1718051609E+00 - -0.3760910397E+00 -0.9218335857E+00 -0.9369295664E-01 - -0.3697775916E+00 -0.9268447143E+00 0.6498775488E-01 - -0.5179984665E+00 -0.8379502225E+00 0.1718051609E+00 - -0.6361095616E+00 -0.7658891927E+00 0.9369295664E-01 - 0.2375761442E+00 -0.9668398035E+00 0.9369295664E-01 - 0.9985064359E-01 -0.9800575675E+00 0.1718051609E+00 - -0.7231343441E-01 -0.9952624573E+00 0.6498775488E-01 - -0.6444507322E-01 -0.9935131919E+00 -0.9369295664E-01 - 0.7346522053E-01 -0.9823878298E+00 -0.1718051609E+00 - 0.2456292985E+00 -0.9671829400E+00 -0.6498775489E-01 - 0.3643672015E+00 -0.8564390733E+00 0.3657166338E+00 - 0.4770204756E+00 -0.7657037865E+00 0.4314500867E+00 - 0.4352028525E+00 -0.6674121560E+00 0.6042842801E+00 - 0.2910080573E+00 -0.6840096197E+00 0.6689134105E+00 - 0.1790543045E+00 -0.7763891195E+00 0.6042842801E+00 - 0.2208719276E+00 -0.8746807500E+00 0.4314500867E+00 - -0.2416233762E+00 -0.5973022429E+00 0.7647536693E+00 - -0.4361458581E-01 -0.6428449216E+00 0.7647536693E+00 - 0.5772327375E-01 -0.5440986099E+00 0.8370332887E+00 - 0.9462727051E-02 -0.4120773247E+00 0.9110997395E+00 - -0.1885460633E+00 -0.3665346460E+00 0.9110997395E+00 - -0.2896363881E+00 -0.4642047353E+00 0.8370332887E+00 - 0.2731140292E+00 0.4741139110E+00 0.8370332887E+00 - 0.2345574672E+00 0.3389396101E+00 0.9110997395E+00 - 0.3679806288E+00 0.1857081622E+00 0.9110997395E+00 - 0.5071734576E+00 0.2053055222E+00 0.8370332887E+00 - 0.5465628671E+00 0.3412050082E+00 0.7647536693E+00 - 0.4131397056E+00 0.4944364560E+00 0.7647536693E+00 - -0.6290668992E-01 0.4073573530E+00 0.9110997395E+00 - -0.3853193725E-01 0.5457935172E+00 0.8370332887E+00 - -0.1556080320E+00 0.6252503224E+00 0.7647536693E+00 - -0.3425698233E+00 0.5457084767E+00 0.7647536693E+00 - -0.3665122481E+00 0.4062561330E+00 0.8370332887E+00 - -0.2498684812E+00 0.3278155072E+00 0.9110997395E+00 - 0.3948329683E+00 -0.1183393081E+00 0.9110997395E+00 - 0.2903312257E+00 -0.2925833968E+00 0.9110997395E+00 - 0.3519823723E+00 -0.4189077264E+00 0.8370332887E+00 - 0.4934024609E+00 -0.4143740303E+00 0.7647536693E+00 - 0.5979042035E+00 -0.2401299416E+00 0.7647536693E+00 - 0.5353060010E+00 -0.1132376214E+00 0.8370332887E+00 - 0.8461042293E+00 0.5247182702E+00 -0.9369295664E-01 - 0.8439420100E+00 0.5324833101E+00 0.6498775489E-01 - 0.9116043454E+00 0.3734441112E+00 0.1718051609E+00 - 0.9648018180E+00 0.2457215536E+00 0.9369295664E-01 - 0.9688969256E+00 0.2387788502E+00 -0.6498775488E-01 - 0.9012345902E+00 0.3978180491E+00 -0.1718051609E+00 - 0.9929343613E+00 -0.7282159007E-01 0.9369295664E-01 - 0.9629456817E+00 -0.2078908386E+00 0.1718051609E+00 - 0.9242047653E+00 -0.3763271762E+00 0.6498775488E-01 - 0.9249725723E+00 -0.3683033672E+00 -0.9369295664E-01 - 0.9570083487E+00 -0.2337049578E+00 -0.1718051609E+00 - 0.9957492651E+00 -0.6526862018E-01 -0.6498775488E-01 - 0.8359870647E+00 -0.3390817753E+00 -0.4314500867E+00 - 0.7981815150E+00 -0.4787040975E+00 -0.3657166338E+00 - 0.6928135883E+00 -0.5778063297E+00 -0.4314500867E+00 - 0.6012080497E+00 -0.5228665124E+00 -0.6042842801E+00 - 0.6374812308E+00 -0.3823251622E+00 -0.6689134105E+00 - 0.7443815262E+00 -0.2841419580E+00 -0.6042842801E+00 - 0.6427339198E+00 -0.4522094255E-01 -0.7647536693E+00 - 0.5309875045E+00 -0.1320134222E+00 -0.8370332887E+00 - 0.4068591013E+00 -0.6605252750E-01 -0.9110997395E+00 - 0.3889846814E+00 0.1363384845E+00 -0.9110997395E+00 - 0.4996310559E+00 0.2230338127E+00 -0.8370332887E+00 - 0.6248594999E+00 0.1571700695E+00 -0.7647536693E+00 - 0.6830591083E+00 0.4102081952E+00 -0.6042842801E+00 - 0.5606053708E+00 0.4881357060E+00 -0.6689134105E+00 - 0.5002616026E+00 0.6201442072E+00 -0.6042842801E+00 - 0.5808201421E+00 0.6902889144E+00 -0.4314500867E+00 - 0.7019263040E+00 0.6111880296E+00 -0.3657166338E+00 - 0.7636176477E+00 0.4803529024E+00 -0.4314500867E+00 - 0.6435033919E+00 0.7626794715E+00 0.6498775489E-01 - 0.6361095616E+00 0.7658891927E+00 -0.9369295664E-01 - 0.5179984665E+00 0.8379502225E+00 -0.1718051609E+00 - 0.3697775916E+00 0.9268447143E+00 -0.6498775488E-01 - 0.3760910397E+00 0.9218335857E+00 0.9369295664E-01 - 0.4952825170E+00 0.8515739633E+00 0.1718051609E+00 - 0.2416233762E+00 0.5973022429E+00 -0.7647536693E+00 - 0.2896363881E+00 0.4642047353E+00 -0.8370332887E+00 - 0.1885460633E+00 0.3665346460E+00 -0.9110997395E+00 - -0.9462727051E-02 0.4120773247E+00 -0.9110997395E+00 - -0.5772327375E-01 0.5440986099E+00 -0.8370332887E+00 - 0.4361458581E-01 0.6428449216E+00 -0.7647536693E+00 - -0.4770204756E+00 0.7657037865E+00 -0.4314500867E+00 - -0.3643672015E+00 0.8564390733E+00 -0.3657166338E+00 - -0.2208719276E+00 0.8746807500E+00 -0.4314500867E+00 - -0.1790543045E+00 0.7763891195E+00 -0.6042842801E+00 - -0.2910080573E+00 0.6840096197E+00 -0.6689134105E+00 - -0.4352028525E+00 0.6674121560E+00 -0.6042842801E+00 - -0.9985064359E-01 0.9800575675E+00 -0.1718051609E+00 - -0.2375761442E+00 0.9668398035E+00 -0.9369295664E-01 - -0.2456292985E+00 0.9671829400E+00 0.6498775489E-01 - -0.7346522053E-01 0.9823878298E+00 0.1718051609E+00 - 0.6444507322E-01 0.9935131919E+00 0.9369295664E-01 - 0.7231343441E-01 0.9952624573E+00 -0.6498775488E-01 - -0.1666203030E+00 -0.7244256510E+00 -0.6689134105E+00 - -0.4020851882E-01 -0.7957535949E+00 -0.6042842801E+00 - -0.6415172192E-01 -0.8998529765E+00 -0.4314500867E+00 - -0.2086229985E+00 -0.9070434324E+00 -0.3657166338E+00 - -0.3354353023E+00 -0.8374568530E+00 -0.4314500867E+00 - -0.3114920992E+00 -0.7333574715E+00 -0.6042842801E+00 - 0.3853193725E-01 -0.5457935172E+00 -0.8370332887E+00 - 0.6290668992E-01 -0.4073573530E+00 -0.9110997395E+00 - 0.2498684812E+00 -0.3278155072E+00 -0.9110997395E+00 - 0.3665122481E+00 -0.4062561330E+00 -0.8370332887E+00 - 0.3425698233E+00 -0.5457084767E+00 -0.7647536693E+00 - 0.1556080320E+00 -0.6252503224E+00 -0.7647536693E+00 - 0.5264977972E+00 -0.8476890121E+00 -0.6498775489E-01 - 0.6568442522E+00 -0.7341924919E+00 -0.1718051609E+00 - 0.7604973159E+00 -0.6425460780E+00 -0.9369295664E-01 - 0.7672141452E+00 -0.6380901559E+00 0.6498775488E-01 - 0.6368676903E+00 -0.7515866762E+00 0.1718051609E+00 - 0.5318352427E+00 -0.8416489200E+00 0.9369295664E-01 - 0.7404582412E+00 0.6539451243E-01 0.6689134105E+00 - 0.7937207543E+00 -0.6962666910E-01 0.6042842801E+00 - 0.9001240061E+00 -0.6022953042E-01 0.4314500867E+00 - 0.9271176189E+00 0.8187957304E-01 0.3657166338E+00 - 0.8756350093E+00 0.2170579490E+00 0.4314500867E+00 - 0.7692317574E+00 0.2076608103E+00 0.6042842801E+00 - -0.3948329683E+00 0.1183393081E+00 -0.9110997395E+00 - -0.5353060010E+00 0.1132376214E+00 -0.8370332887E+00 - -0.5979042035E+00 0.2401299416E+00 -0.7647536693E+00 - -0.4934024609E+00 0.4143740303E+00 -0.7647536693E+00 - -0.3519823723E+00 0.4189077264E+00 -0.8370332887E+00 - -0.2903312257E+00 0.2925833968E+00 -0.9110997395E+00 - -0.7937207543E+00 0.6962666910E-01 -0.6042842801E+00 - -0.7404582412E+00 -0.6539451243E-01 -0.6689134105E+00 - -0.7692317574E+00 -0.2076608103E+00 -0.6042842801E+00 - -0.8756350093E+00 -0.2170579490E+00 -0.4314500867E+00 - -0.9271176189E+00 -0.8187957304E-01 -0.3657166338E+00 - -0.9001240061E+00 0.6022953042E-01 -0.4314500867E+00 - -0.5264977972E+00 0.8476890121E+00 0.6498775489E-01 - -0.5318352427E+00 0.8416489200E+00 -0.9369295664E-01 - -0.6368676903E+00 0.7515866762E+00 -0.1718051609E+00 - -0.7672141452E+00 0.6380901559E+00 -0.6498775488E-01 - -0.7604973159E+00 0.6425460780E+00 0.9369295664E-01 - -0.6568442522E+00 0.7341924919E+00 0.1718051609E+00 - -0.2345574672E+00 -0.3389396101E+00 -0.9110997395E+00 - -0.2731140292E+00 -0.4741139110E+00 -0.8370332887E+00 - -0.4131397056E+00 -0.4944364560E+00 -0.7647536693E+00 - -0.5465628671E+00 -0.3412050082E+00 -0.7647536693E+00 - -0.5071734576E+00 -0.2053055222E+00 -0.8370332887E+00 - -0.3679806288E+00 -0.1857081622E+00 -0.9110997395E+00 - 0.4020851882E-01 0.7957535949E+00 0.6042842801E+00 - 0.1666203030E+00 0.7244256510E+00 0.6689134105E+00 - 0.3114920992E+00 0.7333574715E+00 0.6042842801E+00 - 0.3354353023E+00 0.8374568530E+00 0.4314500867E+00 - 0.2086229985E+00 0.9070434324E+00 0.3657166338E+00 - 0.6415172192E-01 0.8998529765E+00 0.4314500867E+00 - -0.8267643340E+00 0.2569457569E-01 0.5619613196E+00 - -0.7591065523E+00 0.1081792529E+00 0.6419147073E+00 - -0.7443815262E+00 0.2841419580E+00 0.6042842801E+00 - -0.8359870647E+00 0.3390817753E+00 0.4314500867E+00 - -0.9164945828E+00 0.2025716858E+00 0.3449672329E+00 - -0.9154295376E+00 0.7887094654E-01 0.3946747211E+00 - -0.9154295376E+00 0.7887094654E-01 0.3946747211E+00 - -0.9164945828E+00 0.2025716858E+00 0.3449672329E+00 - -0.9570083487E+00 0.2337049578E+00 0.1718051609E+00 - -0.9957492651E+00 0.6526862018E-01 0.6498775488E-01 - -0.9830555860E+00 -0.8681980583E-01 0.1614436008E+00 - -0.9529269564E+00 -0.8415895755E-01 0.2912859173E+00 - -0.9529269564E+00 -0.8415895755E-01 0.2912859173E+00 - -0.9830555860E+00 -0.8681980583E-01 0.1614436008E+00 - -0.9688969256E+00 -0.2387788502E+00 0.6498775488E-01 - -0.9012345902E+00 -0.3978180491E+00 0.1718051609E+00 - -0.8668045177E+00 -0.3600660166E+00 0.3449672329E+00 - -0.8874364321E+00 -0.2380933503E+00 0.3946747211E+00 - -0.8874364321E+00 -0.2380933503E+00 0.3946747211E+00 - -0.8668045177E+00 -0.3600660166E+00 0.3449672329E+00 - -0.7636176477E+00 -0.4803529024E+00 0.4314500867E+00 - -0.6830591083E+00 -0.4102081952E+00 0.6042842801E+00 - -0.7283964032E+00 -0.2395499705E+00 0.6419147073E+00 - -0.8094636434E+00 -0.1702001330E+00 0.5619613196E+00 - -0.8094636434E+00 -0.1702001330E+00 0.5619613196E+00 - -0.7283964032E+00 -0.2395499705E+00 0.6419147073E+00 - -0.6248594999E+00 -0.1571700695E+00 0.7647536693E+00 - -0.6427339198E+00 0.4522094255E-01 0.7647536693E+00 - -0.7591065523E+00 0.1081792529E+00 0.6419147073E+00 - -0.8267643340E+00 0.2569457569E-01 0.5619613196E+00 - -0.8826807659E-01 -0.8224404063E+00 0.5619613196E+00 - 0.2738693219E-02 -0.7667711576E+00 0.6419147073E+00 - -0.4361458581E-01 -0.6428449216E+00 0.7647536693E+00 - -0.2416233762E+00 -0.5973022429E+00 0.7647536693E+00 - -0.3374614086E+00 -0.6885240056E+00 0.6419147073E+00 - -0.2799212232E+00 -0.7783595468E+00 0.5619613196E+00 - -0.2799212232E+00 -0.7783595468E+00 0.5619613196E+00 - -0.3374614086E+00 -0.6885240056E+00 0.6419147073E+00 - -0.5002616026E+00 -0.6201442072E+00 0.6042842801E+00 - -0.5808201421E+00 -0.6902889144E+00 0.4314500867E+00 - -0.4758695232E+00 -0.8090400517E+00 0.3449672329E+00 - -0.3578940119E+00 -0.8462527641E+00 0.3946747211E+00 - -0.3578940119E+00 -0.8462527641E+00 0.3946747211E+00 - -0.4758695232E+00 -0.8090400517E+00 0.3449672329E+00 - -0.5179984665E+00 -0.8379502225E+00 0.1718051609E+00 - -0.3697775916E+00 -0.9268447143E+00 0.6498775488E-01 - -0.2212103404E+00 -0.9617702164E+00 0.1614436008E+00 - -0.2144306989E+00 -0.9322939395E+00 0.2912859173E+00 - -0.2144306989E+00 -0.9322939395E+00 0.2912859173E+00 - -0.2212103404E+00 -0.9617702164E+00 0.1614436008E+00 - -0.7231343441E-01 -0.9952624573E+00 0.6498775488E-01 - 0.9985064359E-01 -0.9800575675E+00 0.1718051609E+00 - 0.7458580462E-01 -0.9356466032E+00 0.3449672329E+00 - -0.4779270666E-01 -0.9175770931E+00 0.3946747211E+00 - -0.4779270666E-01 -0.9175770931E+00 0.3946747211E+00 - 0.7458580462E-01 -0.9356466032E+00 0.3449672329E+00 - 0.2208719276E+00 -0.8746807500E+00 0.4314500867E+00 - 0.1790543045E+00 -0.7763891195E+00 0.6042842801E+00 - 0.2738693219E-02 -0.7667711576E+00 0.6419147073E+00 - -0.8826807659E-01 -0.8224404063E+00 0.5619613196E+00 - 0.6549052426E-01 -0.1539343928E+00 0.9859083091E+00 - 0.1162510682E+00 -0.2732462108E+00 0.9548938147E+00 - 0.2903312257E+00 -0.2925833968E+00 0.9110997395E+00 - 0.3948329683E+00 -0.1183393081E+00 0.9110997395E+00 - 0.2957961450E+00 0.2612361319E-01 0.9548938147E+00 - 0.1666379923E+00 0.1471684647E-01 0.9859083091E+00 - 0.1666379923E+00 0.1471684647E-01 0.9859083091E+00 - 0.2957961450E+00 0.2612361319E-01 0.9548938147E+00 - 0.3679806288E+00 0.1857081622E+00 0.9110997395E+00 - 0.2345574672E+00 0.3389396101E+00 0.9110997395E+00 - 0.6656100312E-01 0.2893914916E+00 0.9548938147E+00 - 0.3749741879E-01 0.1630299041E+00 0.9859083091E+00 - 0.3749741879E-01 0.1630299041E+00 0.9859083091E+00 - 0.6656100312E-01 0.2893914916E+00 0.9548938147E+00 - -0.6290668992E-01 0.4073573530E+00 0.9110997395E+00 - -0.2498684812E+00 0.3278155072E+00 0.9110997395E+00 - -0.2546591828E+00 0.1527301647E+00 0.9548938147E+00 - -0.1434633130E+00 0.8604117543E-01 0.9859083091E+00 - -0.1434633130E+00 0.8604117543E-01 0.9859083091E+00 - -0.2546591828E+00 0.1527301647E+00 0.9548938147E+00 - -0.4068591013E+00 0.6605252750E-01 0.9110997395E+00 - -0.3889846814E+00 -0.1363384845E+00 0.9110997395E+00 - -0.2239490336E+00 -0.1949990588E+00 0.9548938147E+00 - -0.1261626223E+00 -0.1098535332E+00 0.9859083091E+00 - -0.1261626223E+00 -0.1098535332E+00 0.9859083091E+00 - -0.2239490336E+00 -0.1949990588E+00 0.9548938147E+00 - -0.1885460633E+00 -0.3665346460E+00 0.9110997395E+00 - 0.9462727051E-02 -0.4120773247E+00 0.9110997395E+00 - 0.1162510682E+00 -0.2732462108E+00 0.9548938147E+00 - 0.6549052426E-01 -0.1539343928E+00 0.9859083091E+00 - 0.8094636434E+00 0.1702001330E+00 -0.5619613196E+00 - 0.7283964032E+00 0.2395499705E+00 -0.6419147073E+00 - 0.6830591083E+00 0.4102081952E+00 -0.6042842801E+00 - 0.7636176477E+00 0.4803529024E+00 -0.4314500867E+00 - 0.8668045177E+00 0.3600660166E+00 -0.3449672329E+00 - 0.8874364321E+00 0.2380933503E+00 -0.3946747211E+00 - 0.8874364321E+00 0.2380933503E+00 -0.3946747211E+00 - 0.8668045177E+00 0.3600660166E+00 -0.3449672329E+00 - 0.9012345902E+00 0.3978180491E+00 -0.1718051609E+00 - 0.9688969256E+00 0.2387788502E+00 -0.6498775488E-01 - 0.9830555860E+00 0.8681980583E-01 -0.1614436008E+00 - 0.9529269564E+00 0.8415895755E-01 -0.2912859173E+00 - 0.9529269564E+00 0.8415895755E-01 -0.2912859173E+00 - 0.9830555860E+00 0.8681980583E-01 -0.1614436008E+00 - 0.9957492651E+00 -0.6526862018E-01 -0.6498775488E-01 - 0.9570083487E+00 -0.2337049578E+00 -0.1718051609E+00 - 0.9164945828E+00 -0.2025716858E+00 -0.3449672329E+00 - 0.9154295376E+00 -0.7887094654E-01 -0.3946747211E+00 - 0.9154295376E+00 -0.7887094654E-01 -0.3946747211E+00 - 0.9164945828E+00 -0.2025716858E+00 -0.3449672329E+00 - 0.8359870647E+00 -0.3390817753E+00 -0.4314500867E+00 - 0.7443815262E+00 -0.2841419580E+00 -0.6042842801E+00 - 0.7591065523E+00 -0.1081792529E+00 -0.6419147073E+00 - 0.8267643340E+00 -0.2569457569E-01 -0.5619613196E+00 - 0.8267643340E+00 -0.2569457569E-01 -0.5619613196E+00 - 0.7591065523E+00 -0.1081792529E+00 -0.6419147073E+00 - 0.6427339198E+00 -0.4522094255E-01 -0.7647536693E+00 - 0.6248594999E+00 0.1571700695E+00 -0.7647536693E+00 - 0.7283964032E+00 0.2395499705E+00 -0.6419147073E+00 - 0.8094636434E+00 0.1702001330E+00 -0.5619613196E+00 - 0.2144306989E+00 0.9322939395E+00 -0.2912859173E+00 - 0.2212103404E+00 0.9617702164E+00 -0.1614436008E+00 - 0.3697775916E+00 0.9268447143E+00 -0.6498775488E-01 - 0.5179984665E+00 0.8379502225E+00 -0.1718051609E+00 - 0.4758695232E+00 0.8090400517E+00 -0.3449672329E+00 - 0.3578940119E+00 0.8462527641E+00 -0.3946747211E+00 - 0.3578940119E+00 0.8462527641E+00 -0.3946747211E+00 - 0.4758695232E+00 0.8090400517E+00 -0.3449672329E+00 - 0.5808201421E+00 0.6902889144E+00 -0.4314500867E+00 - 0.5002616026E+00 0.6201442072E+00 -0.6042842801E+00 - 0.3374614086E+00 0.6885240056E+00 -0.6419147073E+00 - 0.2799212232E+00 0.7783595468E+00 -0.5619613196E+00 - 0.2799212232E+00 0.7783595468E+00 -0.5619613196E+00 - 0.3374614086E+00 0.6885240056E+00 -0.6419147073E+00 - 0.2416233762E+00 0.5973022429E+00 -0.7647536693E+00 - 0.4361458581E-01 0.6428449216E+00 -0.7647536693E+00 - -0.2738693219E-02 0.7667711576E+00 -0.6419147073E+00 - 0.8826807659E-01 0.8224404063E+00 -0.5619613196E+00 - 0.8826807659E-01 0.8224404063E+00 -0.5619613196E+00 - -0.2738693219E-02 0.7667711576E+00 -0.6419147073E+00 - -0.1790543045E+00 0.7763891195E+00 -0.6042842801E+00 - -0.2208719276E+00 0.8746807500E+00 -0.4314500867E+00 - -0.7458580462E-01 0.9356466032E+00 -0.3449672329E+00 - 0.4779270666E-01 0.9175770931E+00 -0.3946747211E+00 - 0.4779270666E-01 0.9175770931E+00 -0.3946747211E+00 - -0.7458580462E-01 0.9356466032E+00 -0.3449672329E+00 - -0.9985064359E-01 0.9800575675E+00 -0.1718051609E+00 - 0.7231343441E-01 0.9952624573E+00 -0.6498775488E-01 - 0.2212103404E+00 0.9617702164E+00 -0.1614436008E+00 - 0.2144306989E+00 0.9322939395E+00 -0.2912859173E+00 - 0.2078725566E+00 -0.8949976898E+00 -0.3946747211E+00 - 0.9055527954E-01 -0.9342362386E+00 -0.3449672329E+00 - -0.6415172192E-01 -0.8998529765E+00 -0.4314500867E+00 - -0.4020851882E-01 -0.7957535949E+00 -0.6042842801E+00 - 0.1316922418E+00 -0.7553824608E+00 -0.6419147073E+00 - 0.2310472359E+00 -0.7942396679E+00 -0.5619613196E+00 - 0.2310472359E+00 -0.7942396679E+00 -0.5619613196E+00 - 0.1316922418E+00 -0.7553824608E+00 -0.6419147073E+00 - 0.1556080320E+00 -0.6252503224E+00 -0.7647536693E+00 - 0.3425698233E+00 -0.5457084767E+00 -0.7647536693E+00 - 0.4529124277E+00 -0.6187211338E+00 -0.6419147073E+00 - 0.4120079677E+00 -0.7172509392E+00 -0.5619613196E+00 - 0.4120079677E+00 -0.7172509392E+00 -0.5619613196E+00 - 0.4529124277E+00 -0.6187211338E+00 -0.6419147073E+00 - 0.6012080497E+00 -0.5228665124E+00 -0.6042842801E+00 - 0.6928135883E+00 -0.5778063297E+00 -0.4314500867E+00 - 0.6103004582E+00 -0.7131135667E+00 -0.3449672329E+00 - 0.5006731713E+00 -0.7704273101E+00 -0.3946747211E+00 - 0.5006731713E+00 -0.7704273101E+00 -0.3946747211E+00 - 0.6103004582E+00 -0.7131135667E+00 -0.3449672329E+00 - 0.6568442522E+00 -0.7341924919E+00 -0.1718051609E+00 - 0.5264977972E+00 -0.8476890121E+00 -0.6498775489E-01 - 0.3863514246E+00 -0.9081126255E+00 -0.1614436008E+00 - 0.3745105489E+00 -0.8802808433E+00 -0.2912859173E+00 - 0.3745105489E+00 -0.8802808433E+00 -0.2912859173E+00 - 0.3863514246E+00 -0.9081126255E+00 -0.1614436008E+00 - 0.2456292985E+00 -0.9671829400E+00 -0.6498775489E-01 - 0.7346522053E-01 -0.9823878298E+00 -0.1718051609E+00 - 0.9055527954E-01 -0.9342362386E+00 -0.3449672329E+00 - 0.2078725566E+00 -0.8949976898E+00 -0.3946747211E+00 - 0.6942388739E+00 -0.6018839178E+00 0.3946747211E+00 - 0.6223910433E+00 -0.7025859360E+00 0.3449672329E+00 - 0.6368676903E+00 -0.7515866762E+00 0.1718051609E+00 - 0.7672141452E+00 -0.6380901559E+00 0.6498775488E-01 - 0.8463400769E+00 -0.5075868772E+00 0.1614436008E+00 - 0.8204014962E+00 -0.4920303846E+00 0.2912859173E+00 - 0.8204014962E+00 -0.4920303846E+00 0.2912859173E+00 - 0.8463400769E+00 -0.5075868772E+00 0.1614436008E+00 - 0.9242047653E+00 -0.3763271762E+00 0.6498775488E-01 - 0.9629456817E+00 -0.2078908386E+00 0.1718051609E+00 - 0.9129010801E+00 -0.2181953856E+00 0.3449672329E+00 - 0.8578989150E+00 -0.3290004805E+00 0.3946747211E+00 - 0.8578989150E+00 -0.3290004805E+00 0.3946747211E+00 - 0.9129010801E+00 -0.2181953856E+00 0.3449672329E+00 - 0.9001240061E+00 -0.6022953042E-01 0.4314500867E+00 - 0.7937207543E+00 -0.6962666910E-01 0.6042842801E+00 - 0.7300890087E+00 -0.2343406665E+00 0.6419147073E+00 - 0.7549109720E+00 -0.3380959918E+00 0.5619613196E+00 - 0.7549109720E+00 -0.3380959918E+00 0.5619613196E+00 - 0.7300890087E+00 -0.2343406665E+00 0.6419147073E+00 - 0.5979042035E+00 -0.2401299416E+00 0.7647536693E+00 - 0.4934024609E+00 -0.4143740303E+00 0.7647536693E+00 - 0.5505439319E+00 -0.5337104904E+00 0.6419147073E+00 - 0.6537635039E+00 -0.5067472311E+00 0.5619613196E+00 - 0.6537635039E+00 -0.5067472311E+00 0.5619613196E+00 - 0.5505439319E+00 -0.5337104904E+00 0.6419147073E+00 - 0.4352028525E+00 -0.6674121560E+00 0.6042842801E+00 - 0.4770204756E+00 -0.7657037865E+00 0.4314500867E+00 - 0.6223910433E+00 -0.7025859360E+00 0.3449672329E+00 - 0.6942388739E+00 -0.6018839178E+00 0.3946747211E+00 - -0.6942388739E+00 0.6018839178E+00 -0.3946747211E+00 - -0.6223910433E+00 0.7025859360E+00 -0.3449672329E+00 - -0.4770204756E+00 0.7657037865E+00 -0.4314500867E+00 - -0.4352028525E+00 0.6674121560E+00 -0.6042842801E+00 - -0.5505439319E+00 0.5337104904E+00 -0.6419147073E+00 - -0.6537635039E+00 0.5067472311E+00 -0.5619613196E+00 - -0.6537635039E+00 0.5067472311E+00 -0.5619613196E+00 - -0.5505439319E+00 0.5337104904E+00 -0.6419147073E+00 - -0.4934024609E+00 0.4143740303E+00 -0.7647536693E+00 - -0.5979042035E+00 0.2401299416E+00 -0.7647536693E+00 - -0.7300890087E+00 0.2343406665E+00 -0.6419147073E+00 - -0.7549109720E+00 0.3380959918E+00 -0.5619613196E+00 - -0.7549109720E+00 0.3380959918E+00 -0.5619613196E+00 - -0.7300890087E+00 0.2343406665E+00 -0.6419147073E+00 - -0.7937207543E+00 0.6962666910E-01 -0.6042842801E+00 - -0.9001240061E+00 0.6022953042E-01 -0.4314500867E+00 - -0.9129010801E+00 0.2181953856E+00 -0.3449672329E+00 - -0.8578989150E+00 0.3290004805E+00 -0.3946747211E+00 - -0.8578989150E+00 0.3290004805E+00 -0.3946747211E+00 - -0.9129010801E+00 0.2181953856E+00 -0.3449672329E+00 - -0.9629456817E+00 0.2078908386E+00 -0.1718051609E+00 - -0.9242047653E+00 0.3763271762E+00 -0.6498775488E-01 - -0.8463400769E+00 0.5075868772E+00 -0.1614436008E+00 - -0.8204014962E+00 0.4920303846E+00 -0.2912859173E+00 - -0.8204014962E+00 0.4920303846E+00 -0.2912859173E+00 - -0.8463400769E+00 0.5075868772E+00 -0.1614436008E+00 - -0.7672141452E+00 0.6380901559E+00 -0.6498775488E-01 - -0.6368676903E+00 0.7515866762E+00 -0.1718051609E+00 - -0.6223910433E+00 0.7025859360E+00 -0.3449672329E+00 - -0.6942388739E+00 0.6018839178E+00 -0.3946747211E+00 - -0.6549052426E-01 0.1539343928E+00 -0.9859083091E+00 - -0.1162510682E+00 0.2732462108E+00 -0.9548938147E+00 - -0.9462727051E-02 0.4120773247E+00 -0.9110997395E+00 - 0.1885460633E+00 0.3665346460E+00 -0.9110997395E+00 - 0.2239490336E+00 0.1949990588E+00 -0.9548938147E+00 - 0.1261626223E+00 0.1098535332E+00 -0.9859083091E+00 - 0.1261626223E+00 0.1098535332E+00 -0.9859083091E+00 - 0.2239490336E+00 0.1949990588E+00 -0.9548938147E+00 - 0.3889846814E+00 0.1363384845E+00 -0.9110997395E+00 - 0.4068591013E+00 -0.6605252750E-01 -0.9110997395E+00 - 0.2546591828E+00 -0.1527301647E+00 -0.9548938147E+00 - 0.1434633130E+00 -0.8604117543E-01 -0.9859083091E+00 - 0.1434633130E+00 -0.8604117543E-01 -0.9859083091E+00 - 0.2546591828E+00 -0.1527301647E+00 -0.9548938147E+00 - 0.2498684812E+00 -0.3278155072E+00 -0.9110997395E+00 - 0.6290668992E-01 -0.4073573530E+00 -0.9110997395E+00 - -0.6656100312E-01 -0.2893914916E+00 -0.9548938147E+00 - -0.3749741879E-01 -0.1630299041E+00 -0.9859083091E+00 - -0.3749741879E-01 -0.1630299041E+00 -0.9859083091E+00 - -0.6656100312E-01 -0.2893914916E+00 -0.9548938147E+00 - -0.2345574672E+00 -0.3389396101E+00 -0.9110997395E+00 - -0.3679806288E+00 -0.1857081622E+00 -0.9110997395E+00 - -0.2957961450E+00 -0.2612361319E-01 -0.9548938147E+00 - -0.1666379923E+00 -0.1471684647E-01 -0.9859083091E+00 - -0.1666379923E+00 -0.1471684647E-01 -0.9859083091E+00 - -0.2957961450E+00 -0.2612361319E-01 -0.9548938147E+00 - -0.3948329683E+00 0.1183393081E+00 -0.9110997395E+00 - -0.2903312257E+00 0.2925833968E+00 -0.9110997395E+00 - -0.1162510682E+00 0.2732462108E+00 -0.9548938147E+00 - -0.6549052426E-01 0.1539343928E+00 -0.9859083091E+00 - 0.5780033950E+00 0.7142436138E+00 0.3946747211E+00 - 0.4896180912E+00 0.8007944387E+00 0.3449672329E+00 - 0.3354353023E+00 0.8374568530E+00 0.4314500867E+00 - 0.3114920992E+00 0.7333574715E+00 0.6042842801E+00 - 0.4484811290E+00 0.6219406608E+00 0.6419147073E+00 - 0.5548287157E+00 0.6134855919E+00 0.5619613196E+00 - 0.5548287157E+00 0.6134855919E+00 0.5619613196E+00 - 0.4484811290E+00 0.6219406608E+00 0.6419147073E+00 - 0.4131397056E+00 0.4944364560E+00 0.7647536693E+00 - 0.5465628671E+00 0.3412050082E+00 0.7647536693E+00 - 0.6777162709E+00 0.3586727823E+00 0.6419147073E+00 - 0.6839692892E+00 0.4651725343E+00 0.5619613196E+00 - 0.6839692892E+00 0.4651725343E+00 0.5619613196E+00 - 0.6777162709E+00 0.3586727823E+00 0.6419147073E+00 - 0.7692317574E+00 0.2076608103E+00 0.6042842801E+00 - 0.8756350093E+00 0.2170579490E+00 0.4314500867E+00 - 0.8605283422E+00 0.3748180632E+00 0.3449672329E+00 - 0.7869572323E+00 0.4742680456E+00 0.3946747211E+00 - 0.7869572323E+00 0.4742680456E+00 0.3946747211E+00 - 0.8605283422E+00 0.3748180632E+00 0.3449672329E+00 - 0.9116043454E+00 0.3734441112E+00 0.1718051609E+00 - 0.8439420100E+00 0.5324833101E+00 0.6498775489E-01 - 0.7442772740E+00 0.6480642740E+00 0.1614436008E+00 - 0.7214667080E+00 0.6282024384E+00 0.2912859173E+00 - 0.7214667080E+00 0.6282024384E+00 0.2912859173E+00 - 0.7442772740E+00 0.6480642740E+00 0.1614436008E+00 - 0.6435033919E+00 0.7626794715E+00 0.6498775489E-01 - 0.4952825170E+00 0.8515739633E+00 0.1718051609E+00 - 0.4896180912E+00 0.8007944387E+00 0.3449672329E+00 - 0.5780033950E+00 0.7142436138E+00 0.3946747211E+00 - -0.5780033950E+00 -0.7142436138E+00 -0.3946747211E+00 - -0.4896180912E+00 -0.8007944387E+00 -0.3449672329E+00 - -0.4952825170E+00 -0.8515739633E+00 -0.1718051609E+00 - -0.6435033919E+00 -0.7626794715E+00 -0.6498775489E-01 - -0.7442772740E+00 -0.6480642740E+00 -0.1614436008E+00 - -0.7214667080E+00 -0.6282024384E+00 -0.2912859173E+00 - -0.7214667080E+00 -0.6282024384E+00 -0.2912859173E+00 - -0.7442772740E+00 -0.6480642740E+00 -0.1614436008E+00 - -0.8439420100E+00 -0.5324833101E+00 -0.6498775489E-01 - -0.9116043454E+00 -0.3734441112E+00 -0.1718051609E+00 - -0.8605283422E+00 -0.3748180632E+00 -0.3449672329E+00 - -0.7869572323E+00 -0.4742680456E+00 -0.3946747211E+00 - -0.7869572323E+00 -0.4742680456E+00 -0.3946747211E+00 - -0.8605283422E+00 -0.3748180632E+00 -0.3449672329E+00 - -0.8756350093E+00 -0.2170579490E+00 -0.4314500867E+00 - -0.7692317574E+00 -0.2076608103E+00 -0.6042842801E+00 - -0.6777162709E+00 -0.3586727823E+00 -0.6419147073E+00 - -0.6839692892E+00 -0.4651725343E+00 -0.5619613196E+00 - -0.6839692892E+00 -0.4651725343E+00 -0.5619613196E+00 - -0.6777162709E+00 -0.3586727823E+00 -0.6419147073E+00 - -0.5465628671E+00 -0.3412050082E+00 -0.7647536693E+00 - -0.4131397056E+00 -0.4944364560E+00 -0.7647536693E+00 - -0.4484811290E+00 -0.6219406608E+00 -0.6419147073E+00 - -0.5548287157E+00 -0.6134855919E+00 -0.5619613196E+00 - -0.5548287157E+00 -0.6134855919E+00 -0.5619613196E+00 - -0.4484811290E+00 -0.6219406608E+00 -0.6419147073E+00 - -0.3114920992E+00 -0.7333574715E+00 -0.6042842801E+00 - -0.3354353023E+00 -0.8374568530E+00 -0.4314500867E+00 - -0.4896180912E+00 -0.8007944387E+00 -0.3449672329E+00 - -0.5780033950E+00 -0.7142436138E+00 -0.3946747211E+00 - -0.2078725566E+00 0.8949976898E+00 0.3946747211E+00 - -0.9055527954E-01 0.9342362386E+00 0.3449672329E+00 - -0.7346522053E-01 0.9823878298E+00 0.1718051609E+00 - -0.2456292985E+00 0.9671829400E+00 0.6498775489E-01 - -0.3863514246E+00 0.9081126255E+00 0.1614436008E+00 - -0.3745105489E+00 0.8802808433E+00 0.2912859173E+00 - -0.3745105489E+00 0.8802808433E+00 0.2912859173E+00 - -0.3863514246E+00 0.9081126255E+00 0.1614436008E+00 - -0.5264977972E+00 0.8476890121E+00 0.6498775489E-01 - -0.6568442522E+00 0.7341924919E+00 0.1718051609E+00 - -0.6103004582E+00 0.7131135667E+00 0.3449672329E+00 - -0.5006731713E+00 0.7704273101E+00 0.3946747211E+00 - -0.5006731713E+00 0.7704273101E+00 0.3946747211E+00 - -0.6103004582E+00 0.7131135667E+00 0.3449672329E+00 - -0.6928135883E+00 0.5778063297E+00 0.4314500867E+00 - -0.6012080497E+00 0.5228665124E+00 0.6042842801E+00 - -0.4529124277E+00 0.6187211338E+00 0.6419147073E+00 - -0.4120079677E+00 0.7172509392E+00 0.5619613196E+00 - -0.4120079677E+00 0.7172509392E+00 0.5619613196E+00 - -0.4529124277E+00 0.6187211338E+00 0.6419147073E+00 - -0.3425698233E+00 0.5457084767E+00 0.7647536693E+00 - -0.1556080320E+00 0.6252503224E+00 0.7647536693E+00 - -0.1316922418E+00 0.7553824608E+00 0.6419147073E+00 - -0.2310472359E+00 0.7942396679E+00 0.5619613196E+00 - -0.2310472359E+00 0.7942396679E+00 0.5619613196E+00 - -0.1316922418E+00 0.7553824608E+00 0.6419147073E+00 - 0.4020851882E-01 0.7957535949E+00 0.6042842801E+00 - 0.6415172192E-01 0.8998529765E+00 0.4314500867E+00 - -0.9055527954E-01 0.9342362386E+00 0.3449672329E+00 - -0.2078725566E+00 0.8949976898E+00 0.3946747211E+00 - -0.6928135883E+00 0.5778063297E+00 0.4314500867E+00 - -0.6103004582E+00 0.7131135667E+00 0.3449672329E+00 - -0.6568442522E+00 0.7341924919E+00 0.1718051609E+00 - -0.7604973159E+00 0.6425460780E+00 0.9369295664E-01 - -0.8423651527E+00 0.5052029426E+00 0.1875924741E+00 - -0.7981815150E+00 0.4787040975E+00 0.3657166338E+00 - -0.8423651527E+00 0.5052029426E+00 0.1875924741E+00 - -0.9249725723E+00 0.3683033672E+00 0.9369295664E-01 - -0.9570083487E+00 0.2337049578E+00 0.1718051609E+00 - -0.9164945828E+00 0.2025716858E+00 0.3449672329E+00 - -0.8359870647E+00 0.3390817753E+00 0.4314500867E+00 - -0.7981815150E+00 0.4787040975E+00 0.3657166338E+00 - -0.5309875045E+00 0.1320134222E+00 0.8370332887E+00 - -0.5206102953E+00 0.3122325898E+00 0.7946544723E+00 - -0.6374812308E+00 0.3823251622E+00 0.6689134105E+00 - -0.7443815262E+00 0.2841419580E+00 0.6042842801E+00 - -0.7591065523E+00 0.1081792529E+00 0.6419147073E+00 - -0.6427339198E+00 0.4522094255E-01 0.7647536693E+00 - -0.5206102953E+00 0.3122325898E+00 0.7946544723E+00 - -0.3665122481E+00 0.4062561330E+00 0.8370332887E+00 - -0.3425698233E+00 0.5457084767E+00 0.7647536693E+00 - -0.4529124277E+00 0.6187211338E+00 0.6419147073E+00 - -0.6012080497E+00 0.5228665124E+00 0.6042842801E+00 - -0.6374812308E+00 0.3823251622E+00 0.6689134105E+00 - -0.9629456817E+00 0.2078908386E+00 -0.1718051609E+00 - -0.9129010801E+00 0.2181953856E+00 -0.3449672329E+00 - -0.9001240061E+00 0.6022953042E-01 -0.4314500867E+00 - -0.9271176189E+00 -0.8187957304E-01 -0.3657166338E+00 - -0.9784385631E+00 -0.8641204758E-01 -0.1875924741E+00 - -0.9929343613E+00 0.7282159007E-01 -0.9369295664E-01 - -0.9784385631E+00 -0.8641204758E-01 -0.1875924741E+00 - -0.9648018180E+00 -0.2457215536E+00 -0.9369295664E-01 - -0.9688969256E+00 -0.2387788502E+00 0.6498775488E-01 - -0.9830555860E+00 -0.8681980583E-01 0.1614436008E+00 - -0.9957492651E+00 0.6526862018E-01 0.6498775488E-01 - -0.9929343613E+00 0.7282159007E-01 -0.9369295664E-01 - -0.7604973159E+00 0.6425460780E+00 0.9369295664E-01 - -0.7672141452E+00 0.6380901559E+00 -0.6498775488E-01 - -0.8463400769E+00 0.5075868772E+00 -0.1614436008E+00 - -0.9242047653E+00 0.3763271762E+00 -0.6498775488E-01 - -0.9249725723E+00 0.3683033672E+00 0.9369295664E-01 - -0.8423651527E+00 0.5052029426E+00 0.1875924741E+00 - -0.6361095616E+00 -0.7658891927E+00 0.9369295664E-01 - -0.7407816983E+00 -0.6450205727E+00 0.1875924741E+00 - -0.8461042293E+00 -0.5247182702E+00 0.9369295664E-01 - -0.8439420100E+00 -0.5324833101E+00 -0.6498775489E-01 - -0.7442772740E+00 -0.6480642740E+00 -0.1614436008E+00 - -0.6435033919E+00 -0.7626794715E+00 -0.6498775489E-01 - -0.7407816983E+00 -0.6450205727E+00 0.1875924741E+00 - -0.7019263040E+00 -0.6111880296E+00 0.3657166338E+00 - -0.7636176477E+00 -0.4803529024E+00 0.4314500867E+00 - -0.8668045177E+00 -0.3600660166E+00 0.3449672329E+00 - -0.9012345902E+00 -0.3978180491E+00 0.1718051609E+00 - -0.8461042293E+00 -0.5247182702E+00 0.9369295664E-01 - -0.9271176189E+00 -0.8187957304E-01 -0.3657166338E+00 - -0.8756350093E+00 -0.2170579490E+00 -0.4314500867E+00 - -0.8605283422E+00 -0.3748180632E+00 -0.3449672329E+00 - -0.9116043454E+00 -0.3734441112E+00 -0.1718051609E+00 - -0.9648018180E+00 -0.2457215536E+00 -0.9369295664E-01 - -0.9784385631E+00 -0.8641204758E-01 -0.1875924741E+00 - -0.2896363881E+00 -0.4642047353E+00 0.8370332887E+00 - -0.4578282678E+00 -0.3986446373E+00 0.7946544723E+00 - -0.5606053708E+00 -0.4881357060E+00 0.6689134105E+00 - -0.5002616026E+00 -0.6201442072E+00 0.6042842801E+00 - -0.3374614086E+00 -0.6885240056E+00 0.6419147073E+00 - -0.2416233762E+00 -0.5973022429E+00 0.7647536693E+00 - -0.4578282678E+00 -0.3986446373E+00 0.7946544723E+00 - -0.4996310559E+00 -0.2230338127E+00 0.8370332887E+00 - -0.6248594999E+00 -0.1571700695E+00 0.7647536693E+00 - -0.7283964032E+00 -0.2395499705E+00 0.6419147073E+00 - -0.6830591083E+00 -0.4102081952E+00 0.6042842801E+00 - -0.5606053708E+00 -0.4881357060E+00 0.6689134105E+00 - -0.7407816983E+00 -0.6450205727E+00 0.1875924741E+00 - -0.6361095616E+00 -0.7658891927E+00 0.9369295664E-01 - -0.5179984665E+00 -0.8379502225E+00 0.1718051609E+00 - -0.4758695232E+00 -0.8090400517E+00 0.3449672329E+00 - -0.5808201421E+00 -0.6902889144E+00 0.4314500867E+00 - -0.7019263040E+00 -0.6111880296E+00 0.3657166338E+00 - -0.5309875045E+00 0.1320134222E+00 0.8370332887E+00 - -0.4068591013E+00 0.6605252750E-01 0.9110997395E+00 - -0.2546591828E+00 0.1527301647E+00 0.9548938147E+00 - -0.2498684812E+00 0.3278155072E+00 0.9110997395E+00 - -0.3665122481E+00 0.4062561330E+00 0.8370332887E+00 - -0.5206102953E+00 0.3122325898E+00 0.7946544723E+00 - -0.4578282678E+00 -0.3986446373E+00 0.7946544723E+00 - -0.2896363881E+00 -0.4642047353E+00 0.8370332887E+00 - -0.1885460633E+00 -0.3665346460E+00 0.9110997395E+00 - -0.2239490336E+00 -0.1949990588E+00 0.9548938147E+00 - -0.3889846814E+00 -0.1363384845E+00 0.9110997395E+00 - -0.4996310559E+00 -0.2230338127E+00 0.8370332887E+00 - -0.2086229985E+00 -0.9070434324E+00 -0.3657166338E+00 - -0.2201714030E+00 -0.9572531624E+00 -0.1875924741E+00 - -0.3760910397E+00 -0.9218335857E+00 -0.9369295664E-01 - -0.4952825170E+00 -0.8515739633E+00 -0.1718051609E+00 - -0.4896180912E+00 -0.8007944387E+00 -0.3449672329E+00 - -0.3354353023E+00 -0.8374568530E+00 -0.4314500867E+00 - -0.2201714030E+00 -0.9572531624E+00 -0.1875924741E+00 - -0.6444507322E-01 -0.9935131919E+00 -0.9369295664E-01 - -0.7231343441E-01 -0.9952624573E+00 0.6498775488E-01 - -0.2212103404E+00 -0.9617702164E+00 0.1614436008E+00 - -0.3697775916E+00 -0.9268447143E+00 0.6498775488E-01 - -0.3760910397E+00 -0.9218335857E+00 -0.9369295664E-01 - -0.2201714030E+00 -0.9572531624E+00 -0.1875924741E+00 - -0.2086229985E+00 -0.9070434324E+00 -0.3657166338E+00 - -0.6415172192E-01 -0.8998529765E+00 -0.4314500867E+00 - 0.9055527954E-01 -0.9342362386E+00 -0.3449672329E+00 - 0.7346522053E-01 -0.9823878298E+00 -0.1718051609E+00 - -0.6444507322E-01 -0.9935131919E+00 -0.9369295664E-01 - 0.2456292985E+00 -0.9671829400E+00 -0.6498775489E-01 - 0.3863514246E+00 -0.9081126255E+00 -0.1614436008E+00 - 0.5264977972E+00 -0.8476890121E+00 -0.6498775489E-01 - 0.5318352427E+00 -0.8416489200E+00 0.9369295664E-01 - 0.3845368849E+00 -0.9038475800E+00 0.1875924741E+00 - 0.2375761442E+00 -0.9668398035E+00 0.9369295664E-01 - 0.3845368849E+00 -0.9038475800E+00 0.1875924741E+00 - 0.3643672015E+00 -0.8564390733E+00 0.3657166338E+00 - 0.2208719276E+00 -0.8746807500E+00 0.4314500867E+00 - 0.7458580462E-01 -0.9356466032E+00 0.3449672329E+00 - 0.9985064359E-01 -0.9800575675E+00 0.1718051609E+00 - 0.2375761442E+00 -0.9668398035E+00 0.9369295664E-01 - 0.5318352427E+00 -0.8416489200E+00 0.9369295664E-01 - 0.6368676903E+00 -0.7515866762E+00 0.1718051609E+00 - 0.6223910433E+00 -0.7025859360E+00 0.3449672329E+00 - 0.4770204756E+00 -0.7657037865E+00 0.4314500867E+00 - 0.3643672015E+00 -0.8564390733E+00 0.3657166338E+00 - 0.3845368849E+00 -0.9038475800E+00 0.1875924741E+00 - 0.3519823723E+00 -0.4189077264E+00 0.8370332887E+00 - 0.2376568648E+00 -0.5586085251E+00 0.7946544723E+00 - 0.2910080573E+00 -0.6840096197E+00 0.6689134105E+00 - 0.4352028525E+00 -0.6674121560E+00 0.6042842801E+00 - 0.5505439319E+00 -0.5337104904E+00 0.6419147073E+00 - 0.4934024609E+00 -0.4143740303E+00 0.7647536693E+00 - 0.2376568648E+00 -0.5586085251E+00 0.7946544723E+00 - 0.5772327375E-01 -0.5440986099E+00 0.8370332887E+00 - -0.4361458581E-01 -0.6428449216E+00 0.7647536693E+00 - 0.2738693219E-02 -0.7667711576E+00 0.6419147073E+00 - 0.1790543045E+00 -0.7763891195E+00 0.6042842801E+00 - 0.2910080573E+00 -0.6840096197E+00 0.6689134105E+00 - 0.3519823723E+00 -0.4189077264E+00 0.8370332887E+00 - 0.2903312257E+00 -0.2925833968E+00 0.9110997395E+00 - 0.1162510682E+00 -0.2732462108E+00 0.9548938147E+00 - 0.9462727051E-02 -0.4120773247E+00 0.9110997395E+00 - 0.5772327375E-01 -0.5440986099E+00 0.8370332887E+00 - 0.2376568648E+00 -0.5586085251E+00 0.7946544723E+00 - 0.5353060010E+00 -0.1132376214E+00 0.8370332887E+00 - 0.6047082879E+00 0.5340558244E-01 0.7946544723E+00 - 0.5071734576E+00 0.2053055222E+00 0.8370332887E+00 - 0.3679806288E+00 0.1857081622E+00 0.9110997395E+00 - 0.2957961450E+00 0.2612361319E-01 0.9548938147E+00 - 0.3948329683E+00 -0.1183393081E+00 0.9110997395E+00 - 0.6047082879E+00 0.5340558244E-01 0.7946544723E+00 - 0.7404582412E+00 0.6539451243E-01 0.6689134105E+00 - 0.7692317574E+00 0.2076608103E+00 0.6042842801E+00 - 0.6777162709E+00 0.3586727823E+00 0.6419147073E+00 - 0.5465628671E+00 0.3412050082E+00 0.7647536693E+00 - 0.5071734576E+00 0.2053055222E+00 0.8370332887E+00 - 0.1666203030E+00 0.7244256510E+00 0.6689134105E+00 - 0.1360734104E+00 0.5916149902E+00 0.7946544723E+00 - 0.2731140292E+00 0.4741139110E+00 0.8370332887E+00 - 0.4131397056E+00 0.4944364560E+00 0.7647536693E+00 - 0.4484811290E+00 0.6219406608E+00 0.6419147073E+00 - 0.3114920992E+00 0.7333574715E+00 0.6042842801E+00 - 0.1360734104E+00 0.5916149902E+00 0.7946544723E+00 - -0.3853193725E-01 0.5457935172E+00 0.8370332887E+00 - -0.6290668992E-01 0.4073573530E+00 0.9110997395E+00 - 0.6656100312E-01 0.2893914916E+00 0.9548938147E+00 - 0.2345574672E+00 0.3389396101E+00 0.9110997395E+00 - 0.2731140292E+00 0.4741139110E+00 0.8370332887E+00 - 0.1360734104E+00 0.5916149902E+00 0.7946544723E+00 - 0.1666203030E+00 0.7244256510E+00 0.6689134105E+00 - 0.4020851882E-01 0.7957535949E+00 0.6042842801E+00 - -0.1316922418E+00 0.7553824608E+00 0.6419147073E+00 - -0.1556080320E+00 0.6252503224E+00 0.7647536693E+00 - -0.3853193725E-01 0.5457935172E+00 0.8370332887E+00 - 0.5353060010E+00 -0.1132376214E+00 0.8370332887E+00 - 0.5979042035E+00 -0.2401299416E+00 0.7647536693E+00 - 0.7300890087E+00 -0.2343406665E+00 0.6419147073E+00 - 0.7937207543E+00 -0.6962666910E-01 0.6042842801E+00 - 0.7404582412E+00 0.6539451243E-01 0.6689134105E+00 - 0.6047082879E+00 0.5340558244E-01 0.7946544723E+00 - 0.9116043454E+00 0.3734441112E+00 0.1718051609E+00 - 0.8605283422E+00 0.3748180632E+00 0.3449672329E+00 - 0.8756350093E+00 0.2170579490E+00 0.4314500867E+00 - 0.9271176189E+00 0.8187957304E-01 0.3657166338E+00 - 0.9784385631E+00 0.8641204758E-01 0.1875924741E+00 - 0.9648018180E+00 0.2457215536E+00 0.9369295664E-01 - 0.9784385631E+00 0.8641204758E-01 0.1875924741E+00 - 0.9929343613E+00 -0.7282159007E-01 0.9369295664E-01 - 0.9957492651E+00 -0.6526862018E-01 -0.6498775488E-01 - 0.9830555860E+00 0.8681980583E-01 -0.1614436008E+00 - 0.9688969256E+00 0.2387788502E+00 -0.6498775488E-01 - 0.9648018180E+00 0.2457215536E+00 0.9369295664E-01 - 0.9012345902E+00 0.3978180491E+00 -0.1718051609E+00 - 0.8668045177E+00 0.3600660166E+00 -0.3449672329E+00 - 0.7636176477E+00 0.4803529024E+00 -0.4314500867E+00 - 0.7019263040E+00 0.6111880296E+00 -0.3657166338E+00 - 0.7407816983E+00 0.6450205727E+00 -0.1875924741E+00 - 0.8461042293E+00 0.5247182702E+00 -0.9369295664E-01 - 0.7407816983E+00 0.6450205727E+00 -0.1875924741E+00 - 0.6361095616E+00 0.7658891927E+00 -0.9369295664E-01 - 0.6435033919E+00 0.7626794715E+00 0.6498775489E-01 - 0.7442772740E+00 0.6480642740E+00 0.1614436008E+00 - 0.8439420100E+00 0.5324833101E+00 0.6498775489E-01 - 0.8461042293E+00 0.5247182702E+00 -0.9369295664E-01 - 0.9242047653E+00 -0.3763271762E+00 0.6498775488E-01 - 0.8463400769E+00 -0.5075868772E+00 0.1614436008E+00 - 0.7672141452E+00 -0.6380901559E+00 0.6498775488E-01 - 0.7604973159E+00 -0.6425460780E+00 -0.9369295664E-01 - 0.8423651527E+00 -0.5052029426E+00 -0.1875924741E+00 - 0.9249725723E+00 -0.3683033672E+00 -0.9369295664E-01 - 0.8423651527E+00 -0.5052029426E+00 -0.1875924741E+00 - 0.7981815150E+00 -0.4787040975E+00 -0.3657166338E+00 - 0.8359870647E+00 -0.3390817753E+00 -0.4314500867E+00 - 0.9164945828E+00 -0.2025716858E+00 -0.3449672329E+00 - 0.9570083487E+00 -0.2337049578E+00 -0.1718051609E+00 - 0.9249725723E+00 -0.3683033672E+00 -0.9369295664E-01 - 0.9271176189E+00 0.8187957304E-01 0.3657166338E+00 - 0.9001240061E+00 -0.6022953042E-01 0.4314500867E+00 - 0.9129010801E+00 -0.2181953856E+00 0.3449672329E+00 - 0.9629456817E+00 -0.2078908386E+00 0.1718051609E+00 - 0.9929343613E+00 -0.7282159007E-01 0.9369295664E-01 - 0.9784385631E+00 0.8641204758E-01 0.1875924741E+00 - 0.6012080497E+00 -0.5228665124E+00 -0.6042842801E+00 - 0.4529124277E+00 -0.6187211338E+00 -0.6419147073E+00 - 0.3425698233E+00 -0.5457084767E+00 -0.7647536693E+00 - 0.3665122481E+00 -0.4062561330E+00 -0.8370332887E+00 - 0.5206102953E+00 -0.3122325898E+00 -0.7946544723E+00 - 0.6374812308E+00 -0.3823251622E+00 -0.6689134105E+00 - 0.5206102953E+00 -0.3122325898E+00 -0.7946544723E+00 - 0.5309875045E+00 -0.1320134222E+00 -0.8370332887E+00 - 0.6427339198E+00 -0.4522094255E-01 -0.7647536693E+00 - 0.7591065523E+00 -0.1081792529E+00 -0.6419147073E+00 - 0.7443815262E+00 -0.2841419580E+00 -0.6042842801E+00 - 0.6374812308E+00 -0.3823251622E+00 -0.6689134105E+00 - 0.7604973159E+00 -0.6425460780E+00 -0.9369295664E-01 - 0.6568442522E+00 -0.7341924919E+00 -0.1718051609E+00 - 0.6103004582E+00 -0.7131135667E+00 -0.3449672329E+00 - 0.6928135883E+00 -0.5778063297E+00 -0.4314500867E+00 - 0.7981815150E+00 -0.4787040975E+00 -0.3657166338E+00 - 0.8423651527E+00 -0.5052029426E+00 -0.1875924741E+00 - 0.2896363881E+00 0.4642047353E+00 -0.8370332887E+00 - 0.4578282678E+00 0.3986446373E+00 -0.7946544723E+00 - 0.4996310559E+00 0.2230338127E+00 -0.8370332887E+00 - 0.3889846814E+00 0.1363384845E+00 -0.9110997395E+00 - 0.2239490336E+00 0.1949990588E+00 -0.9548938147E+00 - 0.1885460633E+00 0.3665346460E+00 -0.9110997395E+00 - 0.4578282678E+00 0.3986446373E+00 -0.7946544723E+00 - 0.5606053708E+00 0.4881357060E+00 -0.6689134105E+00 - 0.6830591083E+00 0.4102081952E+00 -0.6042842801E+00 - 0.7283964032E+00 0.2395499705E+00 -0.6419147073E+00 - 0.6248594999E+00 0.1571700695E+00 -0.7647536693E+00 - 0.4996310559E+00 0.2230338127E+00 -0.8370332887E+00 - 0.5206102953E+00 -0.3122325898E+00 -0.7946544723E+00 - 0.3665122481E+00 -0.4062561330E+00 -0.8370332887E+00 - 0.2498684812E+00 -0.3278155072E+00 -0.9110997395E+00 - 0.2546591828E+00 -0.1527301647E+00 -0.9548938147E+00 - 0.4068591013E+00 -0.6605252750E-01 -0.9110997395E+00 - 0.5309875045E+00 -0.1320134222E+00 -0.8370332887E+00 - 0.7019263040E+00 0.6111880296E+00 -0.3657166338E+00 - 0.5808201421E+00 0.6902889144E+00 -0.4314500867E+00 - 0.4758695232E+00 0.8090400517E+00 -0.3449672329E+00 - 0.5179984665E+00 0.8379502225E+00 -0.1718051609E+00 - 0.6361095616E+00 0.7658891927E+00 -0.9369295664E-01 - 0.7407816983E+00 0.6450205727E+00 -0.1875924741E+00 - 0.4578282678E+00 0.3986446373E+00 -0.7946544723E+00 - 0.2896363881E+00 0.4642047353E+00 -0.8370332887E+00 - 0.2416233762E+00 0.5973022429E+00 -0.7647536693E+00 - 0.3374614086E+00 0.6885240056E+00 -0.6419147073E+00 - 0.5002616026E+00 0.6201442072E+00 -0.6042842801E+00 - 0.5606053708E+00 0.4881357060E+00 -0.6689134105E+00 - 0.3697775916E+00 0.9268447143E+00 -0.6498775488E-01 - 0.2212103404E+00 0.9617702164E+00 -0.1614436008E+00 - 0.7231343441E-01 0.9952624573E+00 -0.6498775488E-01 - 0.6444507322E-01 0.9935131919E+00 0.9369295664E-01 - 0.2201714030E+00 0.9572531624E+00 0.1875924741E+00 - 0.3760910397E+00 0.9218335857E+00 0.9369295664E-01 - 0.2201714030E+00 0.9572531624E+00 0.1875924741E+00 - 0.2086229985E+00 0.9070434324E+00 0.3657166338E+00 - 0.3354353023E+00 0.8374568530E+00 0.4314500867E+00 - 0.4896180912E+00 0.8007944387E+00 0.3449672329E+00 - 0.4952825170E+00 0.8515739633E+00 0.1718051609E+00 - 0.3760910397E+00 0.9218335857E+00 0.9369295664E-01 - -0.9462727051E-02 0.4120773247E+00 -0.9110997395E+00 - -0.1162510682E+00 0.2732462108E+00 -0.9548938147E+00 - -0.2903312257E+00 0.2925833968E+00 -0.9110997395E+00 - -0.3519823723E+00 0.4189077264E+00 -0.8370332887E+00 - -0.2376568648E+00 0.5586085251E+00 -0.7946544723E+00 - -0.5772327375E-01 0.5440986099E+00 -0.8370332887E+00 - -0.2376568648E+00 0.5586085251E+00 -0.7946544723E+00 - -0.2910080573E+00 0.6840096197E+00 -0.6689134105E+00 - -0.1790543045E+00 0.7763891195E+00 -0.6042842801E+00 - -0.2738693219E-02 0.7667711576E+00 -0.6419147073E+00 - 0.4361458581E-01 0.6428449216E+00 -0.7647536693E+00 - -0.5772327375E-01 0.5440986099E+00 -0.8370332887E+00 - -0.2376568648E+00 0.5586085251E+00 -0.7946544723E+00 - -0.3519823723E+00 0.4189077264E+00 -0.8370332887E+00 - -0.4934024609E+00 0.4143740303E+00 -0.7647536693E+00 - -0.5505439319E+00 0.5337104904E+00 -0.6419147073E+00 - -0.4352028525E+00 0.6674121560E+00 -0.6042842801E+00 - -0.2910080573E+00 0.6840096197E+00 -0.6689134105E+00 - -0.4770204756E+00 0.7657037865E+00 -0.4314500867E+00 - -0.6223910433E+00 0.7025859360E+00 -0.3449672329E+00 - -0.6368676903E+00 0.7515866762E+00 -0.1718051609E+00 - -0.5318352427E+00 0.8416489200E+00 -0.9369295664E-01 - -0.3845368849E+00 0.9038475800E+00 -0.1875924741E+00 - -0.3643672015E+00 0.8564390733E+00 -0.3657166338E+00 - -0.3845368849E+00 0.9038475800E+00 -0.1875924741E+00 - -0.2375761442E+00 0.9668398035E+00 -0.9369295664E-01 - -0.9985064359E-01 0.9800575675E+00 -0.1718051609E+00 - -0.7458580462E-01 0.9356466032E+00 -0.3449672329E+00 - -0.2208719276E+00 0.8746807500E+00 -0.4314500867E+00 - -0.3643672015E+00 0.8564390733E+00 -0.3657166338E+00 - -0.5318352427E+00 0.8416489200E+00 -0.9369295664E-01 - -0.5264977972E+00 0.8476890121E+00 0.6498775489E-01 - -0.3863514246E+00 0.9081126255E+00 0.1614436008E+00 - -0.2456292985E+00 0.9671829400E+00 0.6498775489E-01 - -0.2375761442E+00 0.9668398035E+00 -0.9369295664E-01 - -0.3845368849E+00 0.9038475800E+00 -0.1875924741E+00 - 0.6444507322E-01 0.9935131919E+00 0.9369295664E-01 - -0.7346522053E-01 0.9823878298E+00 0.1718051609E+00 - -0.9055527954E-01 0.9342362386E+00 0.3449672329E+00 - 0.6415172192E-01 0.8998529765E+00 0.4314500867E+00 - 0.2086229985E+00 0.9070434324E+00 0.3657166338E+00 - 0.2201714030E+00 0.9572531624E+00 0.1875924741E+00 - -0.3114920992E+00 -0.7333574715E+00 -0.6042842801E+00 - -0.4484811290E+00 -0.6219406608E+00 -0.6419147073E+00 - -0.4131397056E+00 -0.4944364560E+00 -0.7647536693E+00 - -0.2731140292E+00 -0.4741139110E+00 -0.8370332887E+00 - -0.1360734104E+00 -0.5916149902E+00 -0.7946544723E+00 - -0.1666203030E+00 -0.7244256510E+00 -0.6689134105E+00 - -0.1360734104E+00 -0.5916149902E+00 -0.7946544723E+00 - 0.3853193725E-01 -0.5457935172E+00 -0.8370332887E+00 - 0.1556080320E+00 -0.6252503224E+00 -0.7647536693E+00 - 0.1316922418E+00 -0.7553824608E+00 -0.6419147073E+00 - -0.4020851882E-01 -0.7957535949E+00 -0.6042842801E+00 - -0.1666203030E+00 -0.7244256510E+00 -0.6689134105E+00 - -0.2731140292E+00 -0.4741139110E+00 -0.8370332887E+00 - -0.2345574672E+00 -0.3389396101E+00 -0.9110997395E+00 - -0.6656100312E-01 -0.2893914916E+00 -0.9548938147E+00 - 0.6290668992E-01 -0.4073573530E+00 -0.9110997395E+00 - 0.3853193725E-01 -0.5457935172E+00 -0.8370332887E+00 - -0.1360734104E+00 -0.5916149902E+00 -0.7946544723E+00 - -0.3948329683E+00 0.1183393081E+00 -0.9110997395E+00 - -0.2957961450E+00 -0.2612361319E-01 -0.9548938147E+00 - -0.3679806288E+00 -0.1857081622E+00 -0.9110997395E+00 - -0.5071734576E+00 -0.2053055222E+00 -0.8370332887E+00 - -0.6047082879E+00 -0.5340558244E-01 -0.7946544723E+00 - -0.5353060010E+00 0.1132376214E+00 -0.8370332887E+00 - -0.6047082879E+00 -0.5340558244E-01 -0.7946544723E+00 - -0.7404582412E+00 -0.6539451243E-01 -0.6689134105E+00 - -0.7937207543E+00 0.6962666910E-01 -0.6042842801E+00 - -0.7300890087E+00 0.2343406665E+00 -0.6419147073E+00 - -0.5979042035E+00 0.2401299416E+00 -0.7647536693E+00 - -0.5353060010E+00 0.1132376214E+00 -0.8370332887E+00 - -0.5071734576E+00 -0.2053055222E+00 -0.8370332887E+00 - -0.5465628671E+00 -0.3412050082E+00 -0.7647536693E+00 - -0.6777162709E+00 -0.3586727823E+00 -0.6419147073E+00 - -0.7692317574E+00 -0.2076608103E+00 -0.6042842801E+00 - -0.7404582412E+00 -0.6539451243E-01 -0.6689134105E+00 - -0.6047082879E+00 -0.5340558244E-01 -0.7946544723E+00 diff --git a/grid_gen/global_scvt/dx/voronoi.dx b/grid_gen/global_scvt/dx/voronoi.dx deleted file mode 100644 index 7b8186c89..000000000 --- a/grid_gen/global_scvt/dx/voronoi.dx +++ /dev/null @@ -1,25 +0,0 @@ -object "positions list" class array type float rank 1 shape 3 items 960 -ascii data file vor.position.data - -object "edge list" class array type int rank 0 items 960 -ascii data file vor.edge.data -attribute "ref" string "positions" - -object "loops list" class array type int rank 0 items 162 -ascii data file vor.loop.data -attribute "ref" string "edges" - -object "face list" class array type int rank 0 items 162 -ascii data file vor.face.data -attribute "ref" string "loops" - -object 0 class array type float rank 0 items 162 -data file vor.area.data -attribute "dep" string "faces" - -object "area" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 0 diff --git a/grid_gen/global_scvt/locs.dat b/grid_gen/global_scvt/locs.dat deleted file mode 100644 index 90f85cf58..000000000 --- a/grid_gen/global_scvt/locs.dat +++ /dev/null @@ -1,163 +0,0 @@ - 162 162 - 1 -0.8909593038 -0.0786862054 0.4472135955 - 2 -0.2004865378 -0.8716680264 0.4472135955 - 3 0.0000000000 0.0000000000 1.0000000000 - 4 0.8909593038 0.0786862054 -0.4472135955 - 5 0.2004865378 0.8716680264 -0.4472135955 - 6 0.3501565946 -0.8230372770 -0.4472135955 - 7 0.7670518092 -0.4600342618 0.4472135955 - 8 -0.7670518092 0.4600342618 -0.4472135955 - 9 0.0000000000 0.0000000000 -1.0000000000 - 10 0.6745506270 0.5873512166 0.4472135955 - 11 -0.6745506270 -0.5873512166 -0.4472135955 - 12 -0.3501565946 0.8230372770 0.4472135955 - 13 -0.7295096215 0.4375185824 0.5257311121 - 14 -0.9745544804 0.2241507635 0.0000000000 - 15 -0.9201836497 -0.3914869741 0.0000000000 - 16 -0.6415357694 -0.5586042019 0.5257311121 - 17 -0.5236927392 -0.0462505911 0.8506508084 - 18 -0.5143339407 -0.8575899938 0.0000000000 - 19 0.0879738521 -0.9961227843 0.0000000000 - 20 0.3330187110 -0.7827549654 0.5257311121 - 21 -0.1178430302 -0.5123536108 0.8506508084 - 22 0.3964909105 0.3452363830 0.8506508084 - 23 -0.2058168823 0.4837691735 0.8506508084 - 24 0.4508617412 -0.2704013546 0.8506508084 - 25 0.9201836497 0.3914869741 0.0000000000 - 26 0.9745544804 -0.2241507635 0.0000000000 - 27 0.7295096215 -0.4375185824 -0.5257311121 - 28 0.5236927392 0.0462505911 -0.8506508084 - 29 0.6415357694 0.5586042019 -0.5257311121 - 30 0.5143339407 0.8575899938 0.0000000000 - 31 0.1178430302 0.5123536108 -0.8506508084 - 32 -0.3330187110 0.7827549654 -0.5257311121 - 33 -0.0879738521 0.9961227843 0.0000000000 - 34 -0.1906740282 -0.8290055565 -0.5257311121 - 35 0.2058168823 -0.4837691735 -0.8506508084 - 36 0.6566786235 -0.7541705281 0.0000000000 - 37 0.8473526517 0.0748350284 0.5257311121 - 38 -0.4508617412 0.2704013546 -0.8506508084 - 39 -0.8473526517 -0.0748350284 -0.5257311121 - 40 -0.6566786235 0.7541705281 0.0000000000 - 41 -0.3964909105 -0.3452363830 -0.8506508084 - 42 0.1906740282 0.8290055565 0.5257311121 - 43 -0.8434663484 0.1827424714 0.5051432551 - 44 -0.9690454298 0.0733970446 0.2357198099 - 45 -0.9411818011 -0.2421011948 0.2357198099 - 46 -0.7983820501 -0.3277444034 0.5051432551 - 47 -0.7379905791 -0.0651765777 0.6716561016 - 48 -0.1660650217 -0.7220114194 0.6716561016 - 49 -0.4344438541 -0.7457136377 0.5051432551 - 50 -0.3692562437 -0.8989360365 0.2357198099 - 51 -0.0605892524 -0.9699304685 0.2357198099 - 52 0.0649898290 -0.8605850417 0.5051432551 - 53 0.2310548507 -0.1385736223 0.9630218103 - 54 0.2031912220 0.1769246171 0.9630218103 - 55 -0.1054757693 0.2479190491 0.9630218103 - 56 -0.2683788324 -0.0237022183 0.9630218103 - 57 -0.0603914710 -0.2625678257 0.9630218103 - 58 0.7983820501 0.3277444034 -0.5051432551 - 59 0.9411818011 0.2421011948 -0.2357198099 - 60 0.9690454298 -0.0733970446 -0.2357198099 - 61 0.8434663484 -0.1827424714 -0.5051432551 - 62 0.7379905791 0.0651765777 -0.6716561016 - 63 0.3692562437 0.8989360365 -0.2357198099 - 64 0.4344438541 0.7457136377 -0.5051432551 - 65 0.1660650217 0.7220114194 -0.6716561016 - 66 -0.0649898290 0.8605850417 -0.5051432551 - 67 0.0605892524 0.9699304685 -0.2357198099 - 68 0.0868470176 -0.8586546962 -0.5051432551 - 69 0.2900382396 -0.6817300791 -0.6716561016 - 70 0.5584170720 -0.6580278608 -0.5051432551 - 71 0.5210930903 -0.8203037014 -0.2357198099 - 72 0.2296467686 -0.9442979047 -0.2357198099 - 73 0.7408325207 -0.6289700689 0.2357198099 - 74 0.9037355838 -0.3573488014 0.2357198099 - 75 0.8385479733 -0.2041264026 0.5051432551 - 76 0.6353567514 -0.3810510197 0.6716561016 - 77 0.5749652804 -0.6436188454 0.5051432551 - 78 -0.5749652804 0.6436188454 -0.5051432551 - 79 -0.6353567514 0.3810510197 -0.6716561016 - 80 -0.8385479733 0.2041264026 -0.5051432551 - 81 -0.9037355838 0.3573488014 -0.2357198099 - 82 -0.7408325207 0.6289700689 -0.2357198099 - 83 0.0603914710 0.2625678257 -0.9630218103 - 84 0.2683788324 0.0237022183 -0.9630218103 - 85 0.1054757693 -0.2479190491 -0.9630218103 - 86 -0.2031912220 -0.1769246171 -0.9630218103 - 87 -0.2310548507 0.1385736223 -0.9630218103 - 88 0.4532613197 0.7344279869 0.5051432551 - 89 0.5587370890 0.4865089377 0.6716561016 - 90 0.7897919397 0.3479353154 0.5051432551 - 91 0.8271159214 0.5102111560 0.2357198099 - 92 0.6191285600 0.7490767634 0.2357198099 - 93 -0.6191285600 -0.7490767634 -0.2357198099 - 94 -0.8271159214 -0.5102111560 -0.2357198099 - 95 -0.7897919397 -0.3479353154 -0.5051432551 - 96 -0.5587370890 -0.4865089377 -0.6716561016 - 97 -0.4532613197 -0.7344279869 -0.5051432551 - 98 -0.2296467686 0.9442979047 0.2357198099 - 99 -0.5210930903 0.8203037014 0.2357198099 - 100 -0.5584170720 0.6580278608 0.5051432551 - 101 -0.2900382396 0.6817300791 0.6716561016 - 102 -0.0868470176 0.8586546962 0.5051432551 - 103 -0.7389477008 0.6174931387 0.2695524424 - 104 -0.8927233180 0.3610907895 0.2695524424 - 105 -0.6485012287 0.2146202028 0.7303316540 - 106 -0.4947256115 0.4710225519 0.7303316540 - 107 -0.9607746082 0.0652198824 -0.2695524424 - 108 -0.9960073301 -0.0879636556 -0.0152247717 - 109 -0.8574905961 0.5142743275 0.0152247717 - 110 -0.7540831171 -0.6566025121 0.0152247717 - 111 -0.8156182708 -0.5119651523 0.2695524424 - 112 -0.9344721764 -0.2326010154 -0.2695524424 - 113 -0.4045138430 -0.5504400294 0.7303316540 - 114 -0.6008476888 -0.3249580433 0.7303316540 - 115 -0.6192844249 -0.7374471383 0.2695524424 - 116 -0.3951596413 0.2369943877 0.8875147988 - 117 -0.3475061014 -0.3025838584 0.8875147988 - 118 -0.3589234758 -0.8935968998 -0.2695524424 - 119 -0.2241247837 -0.9744415260 -0.0152247717 - 120 -0.0675510718 -0.9606135193 -0.2695524424 - 121 0.3914415994 -0.9200769971 0.0152247717 - 122 0.2348678876 -0.9339050038 0.2695524424 - 123 0.5099844947 -0.8168581860 0.2695524424 - 124 0.3984979249 -0.5548108497 0.7303316540 - 125 0.1233813177 -0.6718576676 0.7303316540 - 126 0.1803890593 -0.4240014966 0.8875147988 - 127 0.4589926712 0.0405365222 0.8875147988 - 128 0.6507991050 0.2075480669 0.7303316540 - 129 0.2950904458 0.6160659899 0.7303316540 - 130 0.1032840121 0.4490544452 0.8875147988 - 131 0.0037180419 0.6830826094 0.7303316540 - 132 0.6771015368 -0.0902728309 0.7303316540 - 133 0.9344721764 0.2326010154 0.2695524424 - 134 0.9960073301 0.0879636556 0.0152247717 - 135 0.8156182708 0.5119651523 -0.2695524424 - 136 0.7540831171 0.6566025121 -0.0152247717 - 137 0.8574905961 -0.5142743275 -0.0152247717 - 138 0.8927233180 -0.3610907895 -0.2695524424 - 139 0.9607746082 -0.0652198824 0.2695524424 - 140 0.4947256115 -0.4710225519 -0.7303316540 - 141 0.6485012287 -0.2146202028 -0.7303316540 - 142 0.7389477008 -0.6174931387 -0.2695524424 - 143 0.3475061014 0.3025838584 -0.8875147988 - 144 0.6008476888 0.3249580433 -0.7303316540 - 145 0.3951596413 -0.2369943877 -0.8875147988 - 146 0.6192844249 0.7374471383 -0.2695524424 - 147 0.4045138430 0.5504400294 -0.7303316540 - 148 0.2241247837 0.9744415260 0.0152247717 - 149 0.3589234758 0.8935968998 0.2695524424 - 150 -0.1803890593 0.4240014966 -0.8875147988 - 151 -0.1233813177 0.6718576676 -0.7303316540 - 152 -0.3984979249 0.5548108497 -0.7303316540 - 153 -0.5099844947 0.8168581860 -0.2695524424 - 154 -0.2348678876 0.9339050038 -0.2695524424 - 155 -0.3914415994 0.9200769971 -0.0152247717 - 156 0.0675510718 0.9606135193 0.2695524424 - 157 -0.2950904458 -0.6160659899 -0.7303316540 - 158 -0.0037180419 -0.6830826094 -0.7303316540 - 159 -0.1032840121 -0.4490544452 -0.8875147988 - 160 -0.4589926712 -0.0405365222 -0.8875147988 - 161 -0.6771015368 0.0902728309 -0.7303316540 - 162 -0.6507991050 -0.2075480669 -0.7303316540 diff --git a/grid_gen/global_scvt/namelist.input b/grid_gen/global_scvt/namelist.input deleted file mode 100644 index 12b2e45ad..000000000 --- a/grid_gen/global_scvt/namelist.input +++ /dev/null @@ -1,9 +0,0 @@ -&domains - np = 162 - locs_as_xyz = .true. - n_scvt_iterations = 100 - eps = 0.000000001 - l2_conv = .true. - inf_conv = .false. - min_dx = 120000.0 -/ diff --git a/grid_gen/global_scvt/refine/grid_ref.f b/grid_gen/global_scvt/refine/grid_ref.f deleted file mode 100644 index 11851adff..000000000 --- a/grid_gen/global_scvt/refine/grid_ref.f +++ /dev/null @@ -1,58 +0,0 @@ - program Grid_Refine - include "scvt.m" - -C The file "nodes_s.dat" must exist and read it - open(15,file='locs.dat.out',status='unknown') - read(15,*) N - do node = 1,N - read(15,200) X(node),Y(node),Z(node) - enddo - close(15) - print *,"Number of Starting Generators = ", N - -C Generate the Delaunay triangles - CALL TRMESH (N,X,Y,Z,LIST,LPTR,LEND,LNEW,IWK,IWK(N+1),DS,IER) - - DO NODE = 1,N - LPL = LEND(NODE) - LP = LPL - K = 0 -10 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NEIGH(NODE,K) = ND - IF (LP .NE. LPL) GOTO 10 - NEISZ(NODE) = K - ENDDO - - K=N+1 - -C Add points into the middle of edges - DO I = 1,N - DO J = 1,NEISZ(I) - IF (NEIGH(I,J).GT.I) then - X(K) = 0.5*(X(I)+X(NEIGH(I,J))) - Y(K) = 0.5*(Y(I)+Y(NEIGH(I,J))) - Z(K) = 0.5*(Z(I)+Z(NEIGH(I,J))) - R = sqrt(X(K)**2+Y(K)**2+Z(K)**2) - X(K) = X(K)/R - Y(K) = Y(K)/R - Z(K) = Z(K)/R - K=K+1 - ENDIF - ENDDO - ENDDO - NG=K-1 - print *,"Number of Final Generators = ",NG - - open(16,file='locs.dat.out.refined',status='unknown') - write(16,100) NG - do node = 1,NG - write(16,200) X(node),Y(node),Z(node) - enddo - close(16) -100 format(I10) -!200 format(I10,3X,F16.10,3X,F16.10,3X,F16.10) -200 format(10x,f22.10,f23.10,f23.10) - - end program Grid_Refine diff --git a/grid_gen/global_scvt/refine/scvt.m b/grid_gen/global_scvt/refine/scvt.m deleted file mode 100644 index 5bb2fe462..000000000 --- a/grid_gen/global_scvt/refine/scvt.m +++ /dev/null @@ -1,36 +0,0 @@ - IMPLICIT REAL(A-H,O-Z) - INTEGER IER, IFLAG, K, KSUM, KT, LIN, LOUT, LP, LPL, - . LPLT, LPLV, LW, LWK, LNEW, N, N0, N1, N2, N3, - . NA, NB, NCOL, NMAX, NN, NROW, NT, NT6, NTMX, NV - INTEGER NEARND - LOGICAL INSIDE, NUMBR - REAL A, AL, AREA, DEL, ELAT, ELON, P(3), PLTSIZ, SC - . VLAT, VLON, VNRM - REAL V1(3), V2(3), V3(3), V4(3), V5(3), V6(3) - REAL SNW(3), ANGLES(3) - - PARAMETER (NMAX=1000000, NTMX=2*NMAX, NT6=6*NMAX, LWK=2*NMAX, - . NCOL=NMAX, NROW=16) - -C Array storage for the triangulation, work space, and nodal -C coordinates. - - INTEGER LIST(NT6), LPTR(NT6), LEND(NMAX), IWK(LWK), - . NEIGH(NMAX,20), NEISZ(NMAX), NTRI(6*NMAX,3), - . MARK(NMAX) - REAL DS(NMAX), RLAT(NMAX), RLON(NMAX), HV(NMAX), AV(NMAX) - REAL X(NMAX), Y(NMAX), Z(NMAX), X1(NMAX), Y1(NMAX), Z1(NMAX) - -C Array storage for the Voronoi diagram: adjacency array, -C boundary triangle list, triangle circumcenters, and -C circumradii. - - INTEGER LISTC(NT6), LBTRI(6,NCOL) - INTEGER VORTX(NMAX,20), VORSZ(NMAX) - REAL XC(NTMX), YC(NTMX), ZC(NTMX), RC(NTMX) - -C Array storage for the triangle list. - - INTEGER LTRI(NROW,NTMX) - INTEGER I1MACH(2) - diff --git a/grid_gen/global_scvt/refine/svtgen.f b/grid_gen/global_scvt/refine/svtgen.f deleted file mode 100644 index 3fda62cee..000000000 --- a/grid_gen/global_scvt/refine/svtgen.f +++ /dev/null @@ -1,6729 +0,0 @@ - SUBROUTINE ADDNOD (NST,K,X,Y,Z, LIST,LPTR,LEND, - . LNEW, IER) - INTEGER NST, K, LIST(*), LPTR(*), LEND(K), LNEW, IER - REAL X(K), Y(K), Z(K) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/08/99 -C -C This subroutine adds node K to a triangulation of the -C convex hull of nodes 1,...,K-1, producing a triangulation -C of the convex hull of nodes 1,...,K. -C -C The algorithm consists of the following steps: node K -C is located relative to the triangulation (TRFIND), its -C index is added to the data structure (INTADD or BDYADD), -C and a sequence of swaps (SWPTST and SWAP) are applied to -C the arcs opposite K so that all arcs incident on node K -C and opposite node K are locally optimal (satisfy the cir- -C cumcircle test). Thus, if a Delaunay triangulation is -C input, a Delaunay triangulation will result. -C -C -C On input: -C -C NST = Index of a node at which TRFIND begins its -C search. Search time depends on the proximity -C of this node to K. If NST < 1, the search is -C begun at node K-1. -C -C K = Nodal index (index for X, Y, Z, and LEND) of the -C new node to be added. K .GE. 4. -C -C X,Y,Z = Arrays of length .GE. K containing Car- -C tesian coordinates of the nodes. -C (X(I),Y(I),Z(I)) defines node I for -C I = 1,...,K. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Data structure associated with -C the triangulation of nodes 1 -C to K-1. The array lengths are -C assumed to be large enough to -C add node K. Refer to Subrou- -C tine TRMESH. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node K as the -C last entry unless IER .NE. 0 -C and IER .NE. -3, in which case -C the arrays are not altered. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = -1 if K is outside its valid range -C on input. -C IER = -2 if all nodes (including K) are col- -C linear (lie on a common geodesic). -C IER = L if nodes L and K coincide for some -C L < K. -C -C Modules required by ADDNOD: BDYADD, COVSPH, INSERT, -C INTADD, JRAND, LSTPTR, -C STORE, SWAP, SWPTST, -C TRFIND -C -C Intrinsic function called by ADDNOD: ABS -C -C*********************************************************** -C - INTEGER LSTPTR - INTEGER I1, I2, I3, IO1, IO2, IN1, IST, KK, KM1, L, - . LP, LPF, LPO1, LPO1S - LOGICAL SWPTST - REAL B1, B2, B3, P(3) -C -C Local parameters: -C -C B1,B2,B3 = Unnormalized barycentric coordinates returned -C by TRFIND. -C I1,I2,I3 = Vertex indexes of a triangle containing K -C IN1 = Vertex opposite K: first neighbor of IO2 -C that precedes IO1. IN1,IO1,IO2 are in -C counterclockwise order. -C IO1,IO2 = Adjacent neighbors of K defining an arc to -C be tested for a swap -C IST = Index of node at which TRFIND begins its search -C KK = Local copy of K -C KM1 = K-1 -C L = Vertex index (I1, I2, or I3) returned in IER -C if node K coincides with a vertex -C LP = LIST pointer -C LPF = LIST pointer to the first neighbor of K -C LPO1 = LIST pointer to IO1 -C LPO1S = Saved value of LPO1 -C P = Cartesian coordinates of node K -C - KK = K - IF (KK .LT. 4) GO TO 3 -C -C Initialization: -C - KM1 = KK - 1 - IST = NST - IF (IST .LT. 1) IST = KM1 - P(1) = X(KK) - P(2) = Y(KK) - P(3) = Z(KK) -C -C Find a triangle (I1,I2,I3) containing K or the rightmost -C (I1) and leftmost (I2) visible boundary nodes as viewed -C from node K. -C - CALL TRFIND (IST,P,KM1,X,Y,Z,LIST,LPTR,LEND, B1,B2,B3, - . I1,I2,I3) -C -C Test for collinear or duplicate nodes. -C - IF (I1 .EQ. 0) GO TO 4 - IF (I3 .NE. 0) THEN - L = I1 - IF (P(1) .EQ. X(L) .AND. P(2) .EQ. Y(L) .AND. - . P(3) .EQ. Z(L)) GO TO 5 - L = I2 - IF (P(1) .EQ. X(L) .AND. P(2) .EQ. Y(L) .AND. - . P(3) .EQ. Z(L)) GO TO 5 - L = I3 - IF (P(1) .EQ. X(L) .AND. P(2) .EQ. Y(L) .AND. - . P(3) .EQ. Z(L)) GO TO 5 - CALL INTADD (KK,I1,I2,I3, LIST,LPTR,LEND,LNEW ) - ELSE - IF (I1 .NE. I2) THEN - CALL BDYADD (KK,I1,I2, LIST,LPTR,LEND,LNEW ) - ELSE - CALL COVSPH (KK,I1, LIST,LPTR,LEND,LNEW ) - ENDIF - ENDIF - IER = 0 -C -C Initialize variables for optimization of the -C triangulation. -C - LP = LEND(KK) - LPF = LPTR(LP) - IO2 = LIST(LPF) - LPO1 = LPTR(LPF) - IO1 = ABS(LIST(LPO1)) -C -C Begin loop: find the node opposite K. -C - 1 LP = LSTPTR(LEND(IO1),IO2,LIST,LPTR) - IF (LIST(LP) .LT. 0) GO TO 2 - LP = LPTR(LP) - IN1 = ABS(LIST(LP)) -C -C Swap test: if a swap occurs, two new arcs are -C opposite K and must be tested. -C - LPO1S = LPO1 - IF ( .NOT. SWPTST(IN1,KK,IO1,IO2,X,Y,Z) ) GO TO 2 - CALL SWAP (IN1,KK,IO1,IO2, LIST,LPTR,LEND, LPO1) - IF (LPO1 .EQ. 0) THEN -C -C A swap is not possible because KK and IN1 are already -C adjacent. This error in SWPTST only occurs in the -C neutral case and when there are nearly duplicate -C nodes. -C - LPO1 = LPO1S - GO TO 2 - ENDIF - IO1 = IN1 - GO TO 1 -C -C No swap occurred. Test for termination and reset -C IO2 and IO1. -C - 2 IF (LPO1 .EQ. LPF .OR. LIST(LPO1) .LT. 0) RETURN - IO2 = IO1 - LPO1 = LPTR(LPO1) - IO1 = ABS(LIST(LPO1)) - GO TO 1 -C -C KK < 4. -C - 3 IER = -1 - RETURN -C -C All nodes are collinear. -C - 4 IER = -2 - RETURN -C -C Nodes L and K coincide. -C - 5 IER = L - RETURN - END - REAL FUNCTION AREAS (V1,V2,V3) - REAL V1(3), V2(3), V3(3) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 09/18/90 -C -C This function returns the area of a spherical triangle -C on the unit sphere. -C -C -C On input: -C -C V1,V2,V3 = Arrays of length 3 containing the Carte- -C sian coordinates of unit vectors (the -C three triangle vertices in any order). -C These vectors, if nonzero, are implicitly -C scaled to have length 1. -C -C Input parameters are not altered by this function. -C -C On output: -C -C AREAS = Area of the spherical triangle defined by -C V1, V2, and V3 in the range 0 to 2*PI (the -C area of a hemisphere). AREAS = 0 (or 2*PI) -C if and only if V1, V2, and V3 lie in (or -C close to) a plane containing the origin. -C -C Modules required by AREAS: None -C -C Intrinsic functions called by AREAS: ACOS, DBLE, REAL, -C SQRT -C -C*********************************************************** -C - DOUBLE PRECISION A1, A2, A3, CA1, CA2, CA3, DV1(3), - . DV2(3), DV3(3), S12, S23, S31, - . U12(3), U23(3), U31(3) - INTEGER I -C -C Local parameters: -C -C A1,A2,A3 = Interior angles of the spherical triangle -C CA1,CA2,CA3 = cos(A1), cos(A2), and cos(A3), respectively -C DV1,DV2,DV3 = Double Precision copies of V1, V2, and V3 -C I = DO-loop index and index for Uij -C S12,S23,S31 = Sum of squared components of U12, U23, U31 -C U12,U23,U31 = Unit normal vectors to the planes defined by -C pairs of triangle vertices -C - DO 1 I = 1,3 - DV1(I) = DBLE(V1(I)) - DV2(I) = DBLE(V2(I)) - DV3(I) = DBLE(V3(I)) - 1 CONTINUE -C -C Compute cross products Uij = Vi X Vj. -C - U12(1) = DV1(2)*DV2(3) - DV1(3)*DV2(2) - U12(2) = DV1(3)*DV2(1) - DV1(1)*DV2(3) - U12(3) = DV1(1)*DV2(2) - DV1(2)*DV2(1) -C - U23(1) = DV2(2)*DV3(3) - DV2(3)*DV3(2) - U23(2) = DV2(3)*DV3(1) - DV2(1)*DV3(3) - U23(3) = DV2(1)*DV3(2) - DV2(2)*DV3(1) -C - U31(1) = DV3(2)*DV1(3) - DV3(3)*DV1(2) - U31(2) = DV3(3)*DV1(1) - DV3(1)*DV1(3) - U31(3) = DV3(1)*DV1(2) - DV3(2)*DV1(1) -C -C Normalize Uij to unit vectors. -C - S12 = 0.D0 - S23 = 0.D0 - S31 = 0.D0 - DO 2 I = 1,3 - S12 = S12 + U12(I)*U12(I) - S23 = S23 + U23(I)*U23(I) - S31 = S31 + U31(I)*U31(I) - 2 CONTINUE -C -C Test for a degenerate triangle associated with collinear -C vertices. -C - IF (S12 .EQ. 0.D0 .OR. S23 .EQ. 0.D0 .OR. - . S31 .EQ. 0.D0) THEN - AREAS = 0. - RETURN - ENDIF - S12 = SQRT(S12) - S23 = SQRT(S23) - S31 = SQRT(S31) - DO 3 I = 1,3 - U12(I) = U12(I)/S12 - U23(I) = U23(I)/S23 - U31(I) = U31(I)/S31 - 3 CONTINUE -C -C Compute interior angles Ai as the dihedral angles between -C planes: -C CA1 = cos(A1) = - -C CA2 = cos(A2) = - -C CA3 = cos(A3) = - -C - CA1 = -U12(1)*U31(1)-U12(2)*U31(2)-U12(3)*U31(3) - CA2 = -U23(1)*U12(1)-U23(2)*U12(2)-U23(3)*U12(3) - CA3 = -U31(1)*U23(1)-U31(2)*U23(2)-U31(3)*U23(3) - IF (CA1 .LT. -1.D0) CA1 = -1.D0 - IF (CA1 .GT. 1.D0) CA1 = 1.D0 - IF (CA2 .LT. -1.D0) CA2 = -1.D0 - IF (CA2 .GT. 1.D0) CA2 = 1.D0 - IF (CA3 .LT. -1.D0) CA3 = -1.D0 - IF (CA3 .GT. 1.D0) CA3 = 1.D0 - A1 = ACOS(CA1) - A2 = ACOS(CA2) - A3 = ACOS(CA3) -C -C Compute AREAS = A1 + A2 + A3 - PI. -C - AREAS = REAL(A1 + A2 + A3 - ACOS(-1.D0)) - IF (AREAS .LT. 0.) AREAS = 0. - RETURN - END - SUBROUTINE BDYADD (KK,I1,I2, LIST,LPTR,LEND,LNEW ) - INTEGER KK, I1, I2, LIST(*), LPTR(*), LEND(*), LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/11/96 -C -C This subroutine adds a boundary node to a triangulation -C of a set of KK-1 points on the unit sphere. The data -C structure is updated with the insertion of node KK, but no -C optimization is performed. -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C KK = Index of a node to be connected to the sequence -C of all visible boundary nodes. KK .GE. 1 and -C KK must not be equal to I1 or I2. -C -C I1 = First (rightmost as viewed from KK) boundary -C node in the triangulation that is visible from -C node KK (the line segment KK-I1 intersects no -C arcs. -C -C I2 = Last (leftmost) boundary node that is visible -C from node KK. I1 and I2 may be determined by -C Subroutine TRFIND. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Triangulation data structure -C created by Subroutine TRMESH. -C Nodes I1 and I2 must be in- -C cluded in the triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node KK. Node -C KK is connected to I1, I2, and -C all boundary nodes in between. -C -C Module required by BDYADD: INSERT -C -C*********************************************************** -C - INTEGER K, LP, LSAV, N1, N2, NEXT, NSAV -C -C Local parameters: -C -C K = Local copy of KK -C LP = LIST pointer -C LSAV = LIST pointer -C N1,N2 = Local copies of I1 and I2, respectively -C NEXT = Boundary node visible from K -C NSAV = Boundary node visible from K -C - K = KK - N1 = I1 - N2 = I2 -C -C Add K as the last neighbor of N1. -C - LP = LEND(N1) - LSAV = LPTR(LP) - LPTR(LP) = LNEW - LIST(LNEW) = -K - LPTR(LNEW) = LSAV - LEND(N1) = LNEW - LNEW = LNEW + 1 - NEXT = -LIST(LP) - LIST(LP) = NEXT - NSAV = NEXT -C -C Loop on the remaining boundary nodes between N1 and N2, -C adding K as the first neighbor. -C - 1 LP = LEND(NEXT) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - IF (NEXT .EQ. N2) GO TO 2 - NEXT = -LIST(LP) - LIST(LP) = NEXT - GO TO 1 -C -C Add the boundary nodes between N1 and N2 as neighbors -C of node K. -C - 2 LSAV = LNEW - LIST(LNEW) = N1 - LPTR(LNEW) = LNEW + 1 - LNEW = LNEW + 1 - NEXT = NSAV -C - 3 IF (NEXT .EQ. N2) GO TO 4 - LIST(LNEW) = NEXT - LPTR(LNEW) = LNEW + 1 - LNEW = LNEW + 1 - LP = LEND(NEXT) - NEXT = LIST(LP) - GO TO 3 -C - 4 LIST(LNEW) = -N2 - LPTR(LNEW) = LSAV - LEND(K) = LNEW - LNEW = LNEW + 1 - RETURN - END - SUBROUTINE BNODES (N,LIST,LPTR,LEND, NODES,NB,NA,NT) - INTEGER N, LIST(*), LPTR(*), LEND(N), NODES(*), NB, - . NA, NT -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 06/26/96 -C -C Given a triangulation of N nodes on the unit sphere -C created by Subroutine TRMESH, this subroutine returns an -C array containing the indexes (if any) of the counterclock- -C wise-ordered sequence of boundary nodes -- the nodes on -C the boundary of the convex hull of the set of nodes. (The -C boundary is empty if the nodes do not lie in a single -C hemisphere.) The numbers of boundary nodes, arcs, and -C triangles are also returned. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C The above parameters are not altered by this routine. -C -C NODES = Integer array of length at least NB -C (NB .LE. N). -C -C On output: -C -C NODES = Ordered sequence of boundary node indexes -C in the range 1 to N (in the first NB loca- -C tions). -C -C NB = Number of boundary nodes. -C -C NA,NT = Number of arcs and triangles, respectively, -C in the triangulation. -C -C Modules required by BNODES: None -C -C*********************************************************** -C - INTEGER K, LP, N0, NN, NST -C -C Local parameters: -C -C K = NODES index -C LP = LIST pointer -C N0 = Boundary node to be added to NODES -C NN = Local copy of N -C NST = First element of nodes (arbitrarily chosen to be -C the one with smallest index) -C - NN = N -C -C Search for a boundary node. -C - DO 1 NST = 1,NN - LP = LEND(NST) - IF (LIST(LP) .LT. 0) GO TO 2 - 1 CONTINUE -C -C The triangulation contains no boundary nodes. -C - NB = 0 - NA = 3*(NN-2) - NT = 2*(NN-2) - RETURN -C -C NST is the first boundary node encountered. Initialize -C for traversal of the boundary. -C - 2 NODES(1) = NST - K = 1 - N0 = NST -C -C Traverse the boundary in counterclockwise order. -C - 3 LP = LEND(N0) - LP = LPTR(LP) - N0 = LIST(LP) - IF (N0 .EQ. NST) GO TO 4 - K = K + 1 - NODES(K) = N0 - GO TO 3 -C -C Store the counts. -C - 4 NB = K - NT = 2*N - NB - 2 - NA = NT + N - 1 - RETURN - END - SUBROUTINE CIRCUM (V1,V2,V3, C,IER) - INTEGER IER - REAL V1(3), V2(3), V3(3), C(3) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 06/29/95 -C -C This subroutine returns the circumcenter of a spherical -C triangle on the unit sphere: the point on the sphere sur- -C face that is equally distant from the three triangle -C vertices and lies in the same hemisphere, where distance -C is taken to be arc-length on the sphere surface. -C -C -C On input: -C -C V1,V2,V3 = Arrays of length 3 containing the Carte- -C sian coordinates of the three triangle -C vertices (unit vectors) in CCW order. -C -C The above parameters are not altered by this routine. -C -C C = Array of length 3. -C -C On output: -C -C C = Cartesian coordinates of the circumcenter unless -C IER > 0, in which case C is not defined. C = -C (V2-V1) X (V3-V1) normalized to a unit vector. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if V1, V2, and V3 lie on a common -C line: (V2-V1) X (V3-V1) = 0. -C (The vertices are not tested for validity.) -C -C Modules required by CIRCUM: None -C -C Intrinsic function called by CIRCUM: SQRT -C -C*********************************************************** -C - INTEGER I - REAL CNORM, CU(3), E1(3), E2(3) -C -C Local parameters: -C -C CNORM = Norm of CU: used to compute C -C CU = Scalar multiple of C: E1 X E2 -C E1,E2 = Edges of the underlying planar triangle: -C V2-V1 and V3-V1, respectively -C I = DO-loop index -C - DO 1 I = 1,3 - E1(I) = V2(I) - V1(I) - E2(I) = V3(I) - V1(I) - 1 CONTINUE -C -C Compute CU = E1 X E2 and CNORM**2. -C - CU(1) = E1(2)*E2(3) - E1(3)*E2(2) - CU(2) = E1(3)*E2(1) - E1(1)*E2(3) - CU(3) = E1(1)*E2(2) - E1(2)*E2(1) - CNORM = CU(1)*CU(1) + CU(2)*CU(2) + CU(3)*CU(3) -C -C The vertices lie on a common line if and only if CU is -C the zero vector. -C - IF (CNORM .NE. 0.) THEN -C -C No error: compute C. -C - CNORM = SQRT(CNORM) - DO 2 I = 1,3 - C(I) = CU(I)/CNORM - 2 CONTINUE - IER = 0 - ELSE -C -C CU = 0. -C - IER = 1 - ENDIF - RETURN - END - SUBROUTINE COVSPH (KK,N0, LIST,LPTR,LEND,LNEW ) - INTEGER KK, N0, LIST(*), LPTR(*), LEND(*), LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine connects an exterior node KK to all -C boundary nodes of a triangulation of KK-1 points on the -C unit sphere, producing a triangulation that covers the -C sphere. The data structure is updated with the addition -C of node KK, but no optimization is performed. All boun- -C dary nodes must be visible from node KK. -C -C -C On input: -C -C KK = Index of the node to be connected to the set of -C all boundary nodes. KK .GE. 4. -C -C N0 = Index of a boundary node (in the range 1 to -C KK-1). N0 may be determined by Subroutine -C TRFIND. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Triangulation data structure -C created by Subroutine TRMESH. -C Node N0 must be included in -C the triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node KK as the -C last entry. The updated -C triangulation contains no -C boundary nodes. -C -C Module required by COVSPH: INSERT -C -C*********************************************************** -C - INTEGER K, LP, LSAV, NEXT, NST -C -C Local parameters: -C -C K = Local copy of KK -C LP = LIST pointer -C LSAV = LIST pointer -C NEXT = Boundary node visible from K -C NST = Local copy of N0 -C - K = KK - NST = N0 -C -C Traverse the boundary in clockwise order, inserting K as -C the first neighbor of each boundary node, and converting -C the boundary node to an interior node. -C - NEXT = NST - 1 LP = LEND(NEXT) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - NEXT = -LIST(LP) - LIST(LP) = NEXT - IF (NEXT .NE. NST) GO TO 1 -C -C Traverse the boundary again, adding each node to K's -C adjacency list. -C - LSAV = LNEW - 2 LP = LEND(NEXT) - LIST(LNEW) = NEXT - LPTR(LNEW) = LNEW + 1 - LNEW = LNEW + 1 - NEXT = LIST(LP) - IF (NEXT .NE. NST) GO TO 2 -C - LPTR(LNEW-1) = LSAV - LEND(K) = LNEW - 1 - RETURN - END - SUBROUTINE CRLIST (N,NCOL,X,Y,Z,LIST,LEND, LPTR,LNEW, - . LTRI, LISTC,NB,XC,YC,ZC,RC,IER) - INTEGER N, NCOL, LIST(*), LEND(N), LPTR(*), LNEW, - . LTRI(6,NCOL), LISTC(*), NB, IER - REAL X(N), Y(N), Z(N), XC(*), YC(*), ZC(*), RC(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/05/98 -C -C Given a Delaunay triangulation of nodes on the surface -C of the unit sphere, this subroutine returns the set of -C triangle circumcenters corresponding to Voronoi vertices, -C along with the circumradii and a list of triangle indexes -C LISTC stored in one-to-one correspondence with LIST/LPTR -C entries. -C -C A triangle circumcenter is the point (unit vector) lying -C at the same angular distance from the three vertices and -C contained in the same hemisphere as the vertices. (Note -C that the negative of a circumcenter is also equidistant -C from the vertices.) If the triangulation covers the sur- -C face, the Voronoi vertices are the circumcenters of the -C triangles in the Delaunay triangulation. LPTR, LEND, and -C LNEW are not altered in this case. -C -C On the other hand, if the nodes are contained in a sin- -C gle hemisphere, the triangulation is implicitly extended -C to the entire surface by adding pseudo-arcs (of length -C greater than 180 degrees) between boundary nodes forming -C pseudo-triangles whose 'circumcenters' are included in the -C list. This extension to the triangulation actually con- -C sists of a triangulation of the set of boundary nodes in -C which the swap test is reversed (a non-empty circumcircle -C test). The negative circumcenters are stored as the -C pseudo-triangle 'circumcenters'. LISTC, LPTR, LEND, and -C LNEW contain a data structure corresponding to the ex- -C tended triangulation (Voronoi diagram), but LIST is not -C altered in this case. Thus, if it is necessary to retain -C the original (unextended) triangulation data structure, -C copies of LPTR and LNEW must be saved before calling this -C routine. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C Note that, if N = 3, there are only two Voronoi -C vertices separated by 180 degrees, and the -C Voronoi regions are not well defined. -C -C NCOL = Number of columns reserved for LTRI. This -C must be at least NB-2, where NB is the number -C of boundary nodes. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes (unit vectors). -C -C LIST = Integer array containing the set of adjacency -C lists. Refer to Subroutine TRMESH. -C -C LEND = Set of pointers to ends of adjacency lists. -C Refer to Subroutine TRMESH. -C -C The above parameters are not altered by this routine. -C -C LPTR = Array of pointers associated with LIST. Re- -C fer to Subroutine TRMESH. -C -C LNEW = Pointer to the first empty location in LIST -C and LPTR (list length plus one). -C -C LTRI = Integer work space array dimensioned 6 by -C NCOL, or unused dummy parameter if NB = 0. -C -C LISTC = Integer array of length at least 3*NT, where -C NT = 2*N-4 is the number of triangles in the -C triangulation (after extending it to cover -C the entire surface if necessary). -C -C XC,YC,ZC,RC = Arrays of length NT = 2*N-4. -C -C On output: -C -C LPTR = Array of pointers associated with LISTC: -C updated for the addition of pseudo-triangles -C if the original triangulation contains -C boundary nodes (NB > 0). -C -C LNEW = Pointer to the first empty location in LISTC -C and LPTR (list length plus one). LNEW is not -C altered if NB = 0. -C -C LTRI = Triangle list whose first NB-2 columns con- -C tain the indexes of a clockwise-ordered -C sequence of vertices (first three rows) -C followed by the LTRI column indexes of the -C triangles opposite the vertices (or 0 -C denoting the exterior region) in the last -C three rows. This array is not generally of -C any use. -C -C LISTC = Array containing triangle indexes (indexes -C to XC, YC, ZC, and RC) stored in 1-1 corres- -C pondence with LIST/LPTR entries (or entries -C that would be stored in LIST for the -C extended triangulation): the index of tri- -C angle (N1,N2,N3) is stored in LISTC(K), -C LISTC(L), and LISTC(M), where LIST(K), -C LIST(L), and LIST(M) are the indexes of N2 -C as a neighbor of N1, N3 as a neighbor of N2, -C and N1 as a neighbor of N3. The Voronoi -C region associated with a node is defined by -C the CCW-ordered sequence of circumcenters in -C one-to-one correspondence with its adjacency -C list (in the extended triangulation). -C -C NB = Number of boundary nodes unless IER = 1. -C -C XC,YC,ZC = Arrays containing the Cartesian coordi- -C nates of the triangle circumcenters -C (Voronoi vertices). XC(I)**2 + YC(I)**2 -C + ZC(I)**2 = 1. The first NB-2 entries -C correspond to pseudo-triangles if NB > 0. -C -C RC = Array containing circumradii (the arc lengths -C or angles between the circumcenters and associ- -C ated triangle vertices) in 1-1 correspondence -C with circumcenters. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if N < 3. -C IER = 2 if NCOL < NB-2. -C IER = 3 if a triangle is degenerate (has ver- -C tices lying on a common geodesic). -C -C Modules required by CRLIST: CIRCUM, LSTPTR, SWPTST -C -C Intrinsic functions called by CRLIST: ABS, ACOS -C -C*********************************************************** -C - INTEGER LSTPTR - INTEGER I1, I2, I3, I4, IERR, KT, KT1, KT2, KT11, - . KT12, KT21, KT22, LP, LPL, LPN, N0, N1, N2, - . N3, N4, NM2, NN, NT - LOGICAL SWPTST - LOGICAL SWP - REAL C(3), T, V1(3), V2(3), V3(3) -C -C Local parameters: -C -C C = Circumcenter returned by Subroutine CIRCUM -C I1,I2,I3 = Permutation of (1,2,3): LTRI row indexes -C I4 = LTRI row index in the range 1 to 3 -C IERR = Error flag for calls to CIRCUM -C KT = Triangle index -C KT1,KT2 = Indexes of a pair of adjacent pseudo-triangles -C KT11,KT12 = Indexes of the pseudo-triangles opposite N1 -C and N2 as vertices of KT1 -C KT21,KT22 = Indexes of the pseudo-triangles opposite N1 -C and N2 as vertices of KT2 -C LP,LPN = LIST pointers -C LPL = LIST pointer of the last neighbor of N1 -C N0 = Index of the first boundary node (initial -C value of N1) in the loop on boundary nodes -C used to store the pseudo-triangle indexes -C in LISTC -C N1,N2,N3 = Nodal indexes defining a triangle (CCW order) -C or pseudo-triangle (clockwise order) -C N4 = Index of the node opposite N2 -> N1 -C NM2 = N-2 -C NN = Local copy of N -C NT = Number of pseudo-triangles: NB-2 -C SWP = Logical variable set to TRUE in each optimiza- -C tion loop (loop on pseudo-arcs) iff a swap -C is performed -C V1,V2,V3 = Vertices of triangle KT = (N1,N2,N3) sent to -C Subroutine CIRCUM -C - NN = N - NB = 0 - NT = 0 - IF (NN .LT. 3) GO TO 21 -C -C Search for a boundary node N1. -C - DO 1 N1 = 1,NN - LP = LEND(N1) - IF (LIST(LP) .LT. 0) GO TO 2 - 1 CONTINUE -C -C The triangulation already covers the sphere. -C - GO TO 9 -C -C There are NB .GE. 3 boundary nodes. Add NB-2 pseudo- -C triangles (N1,N2,N3) by connecting N3 to the NB-3 -C boundary nodes to which it is not already adjacent. -C -C Set N3 and N2 to the first and last neighbors, -C respectively, of N1. -C - 2 N2 = -LIST(LP) - LP = LPTR(LP) - N3 = LIST(LP) -C -C Loop on boundary arcs N1 -> N2 in clockwise order, -C storing triangles (N1,N2,N3) in column NT of LTRI -C along with the indexes of the triangles opposite -C the vertices. -C - 3 NT = NT + 1 - IF (NT .LE. NCOL) THEN - LTRI(1,NT) = N1 - LTRI(2,NT) = N2 - LTRI(3,NT) = N3 - LTRI(4,NT) = NT + 1 - LTRI(5,NT) = NT - 1 - LTRI(6,NT) = 0 - ENDIF - N1 = N2 - LP = LEND(N1) - N2 = -LIST(LP) - IF (N2 .NE. N3) GO TO 3 -C - NB = NT + 2 - IF (NCOL .LT. NT) GO TO 22 - LTRI(4,NT) = 0 - IF (NT .EQ. 1) GO TO 7 -C -C Optimize the exterior triangulation (set of pseudo- -C triangles) by applying swaps to the pseudo-arcs N1-N2 -C (pairs of adjacent pseudo-triangles KT1 and KT2 > KT1). -C The loop on pseudo-arcs is repeated until no swaps are -C performed. -C - 4 SWP = .FALSE. - DO 6 KT1 = 1,NT-1 - DO 5 I3 = 1,3 - KT2 = LTRI(I3+3,KT1) - IF (KT2 .LE. KT1) GO TO 5 -C -C The LTRI row indexes (I1,I2,I3) of triangle KT1 = -C (N1,N2,N3) are a cyclical permutation of (1,2,3). -C - IF (I3 .EQ. 1) THEN - I1 = 2 - I2 = 3 - ELSEIF (I3 .EQ. 2) THEN - I1 = 3 - I2 = 1 - ELSE - I1 = 1 - I2 = 2 - ENDIF - N1 = LTRI(I1,KT1) - N2 = LTRI(I2,KT1) - N3 = LTRI(I3,KT1) -C -C KT2 = (N2,N1,N4) for N4 = LTRI(I,KT2), where -C LTRI(I+3,KT2) = KT1. -C - IF (LTRI(4,KT2) .EQ. KT1) THEN - I4 = 1 - ELSEIF (LTRI(5,KT2) .EQ. KT1) THEN - I4 = 2 - ELSE - I4 = 3 - ENDIF - N4 = LTRI(I4,KT2) -C -C The empty circumcircle test is reversed for the pseudo- -C triangles. The reversal is implicit in the clockwise -C ordering of the vertices. -C - IF ( .NOT. SWPTST(N1,N2,N3,N4,X,Y,Z) ) GO TO 5 -C -C Swap arc N1-N2 for N3-N4. KTij is the triangle opposite -C Nj as a vertex of KTi. -C - SWP = .TRUE. - KT11 = LTRI(I1+3,KT1) - KT12 = LTRI(I2+3,KT1) - IF (I4 .EQ. 1) THEN - I2 = 2 - I1 = 3 - ELSEIF (I4 .EQ. 2) THEN - I2 = 3 - I1 = 1 - ELSE - I2 = 1 - I1 = 2 - ENDIF - KT21 = LTRI(I1+3,KT2) - KT22 = LTRI(I2+3,KT2) - LTRI(1,KT1) = N4 - LTRI(2,KT1) = N3 - LTRI(3,KT1) = N1 - LTRI(4,KT1) = KT12 - LTRI(5,KT1) = KT22 - LTRI(6,KT1) = KT2 - LTRI(1,KT2) = N3 - LTRI(2,KT2) = N4 - LTRI(3,KT2) = N2 - LTRI(4,KT2) = KT21 - LTRI(5,KT2) = KT11 - LTRI(6,KT2) = KT1 -C -C Correct the KT11 and KT22 entries that changed. -C - IF (KT11 .NE. 0) THEN - I4 = 4 - IF (LTRI(4,KT11) .NE. KT1) THEN - I4 = 5 - IF (LTRI(5,KT11) .NE. KT1) I4 = 6 - ENDIF - LTRI(I4,KT11) = KT2 - ENDIF - IF (KT22 .NE. 0) THEN - I4 = 4 - IF (LTRI(4,KT22) .NE. KT2) THEN - I4 = 5 - IF (LTRI(5,KT22) .NE. KT2) I4 = 6 - ENDIF - LTRI(I4,KT22) = KT1 - ENDIF - 5 CONTINUE - 6 CONTINUE - IF (SWP) GO TO 4 -C -C Compute and store the negative circumcenters and radii of -C the pseudo-triangles in the first NT positions. -C - 7 DO 8 KT = 1,NT - N1 = LTRI(1,KT) - N2 = LTRI(2,KT) - N3 = LTRI(3,KT) - V1(1) = X(N1) - V1(2) = Y(N1) - V1(3) = Z(N1) - V2(1) = X(N2) - V2(2) = Y(N2) - V2(3) = Z(N2) - V3(1) = X(N3) - V3(2) = Y(N3) - V3(3) = Z(N3) - CALL CIRCUM (V1,V2,V3, C,IERR) - IF (IERR .NE. 0) GO TO 23 -C -C Store the negative circumcenter and radius (computed -C from ). -C - XC(KT) = C(1) - YC(KT) = C(2) - ZC(KT) = C(3) - T = V1(1)*C(1) + V1(2)*C(2) + V1(3)*C(3) - IF (T .LT. -1.0) T = -1.0 - IF (T .GT. 1.0) T = 1.0 - RC(KT) = ACOS(T) - 8 CONTINUE -C -C Compute and store the circumcenters and radii of the -C actual triangles in positions KT = NT+1, NT+2, ... -C Also, store the triangle indexes KT in the appropriate -C LISTC positions. -C - 9 KT = NT -C -C Loop on nodes N1. -C - NM2 = NN - 2 - DO 12 N1 = 1,NM2 - LPL = LEND(N1) - LP = LPL - N3 = LIST(LP) -C -C Loop on adjacent neighbors N2,N3 of N1 for which N2 > N1 -C and N3 > N1. -C - 10 LP = LPTR(LP) - N2 = N3 - N3 = ABS(LIST(LP)) - IF (N2 .LE. N1 .OR. N3 .LE. N1) GO TO 11 - KT = KT + 1 -C -C Compute the circumcenter C of triangle KT = (N1,N2,N3). -C - V1(1) = X(N1) - V1(2) = Y(N1) - V1(3) = Z(N1) - V2(1) = X(N2) - V2(2) = Y(N2) - V2(3) = Z(N2) - V3(1) = X(N3) - V3(2) = Y(N3) - V3(3) = Z(N3) - CALL CIRCUM (V1,V2,V3, C,IERR) - IF (IERR .NE. 0) GO TO 23 -C -C Store the circumcenter, radius and triangle index. -C - XC(KT) = C(1) - YC(KT) = C(2) - ZC(KT) = C(3) - T = V1(1)*C(1) + V1(2)*C(2) + V1(3)*C(3) - IF (T .LT. -1.0) T = -1.0 - IF (T .GT. 1.0) T = 1.0 - RC(KT) = ACOS(T) -C -C Store KT in LISTC(LPN), where Abs(LIST(LPN)) is the -C index of N2 as a neighbor of N1, N3 as a neighbor -C of N2, and N1 as a neighbor of N3. -C - LPN = LSTPTR(LPL,N2,LIST,LPTR) - LISTC(LPN) = KT - LPN = LSTPTR(LEND(N2),N3,LIST,LPTR) - LISTC(LPN) = KT - LPN = LSTPTR(LEND(N3),N1,LIST,LPTR) - LISTC(LPN) = KT - 11 IF (LP .NE. LPL) GO TO 10 - 12 CONTINUE - IF (NT .EQ. 0) GO TO 20 -C -C Store the first NT triangle indexes in LISTC. -C -C Find a boundary triangle KT1 = (N1,N2,N3) with a -C boundary arc opposite N3. -C - KT1 = 0 - 13 KT1 = KT1 + 1 - IF (LTRI(4,KT1) .EQ. 0) THEN - I1 = 2 - I2 = 3 - I3 = 1 - GO TO 14 - ELSEIF (LTRI(5,KT1) .EQ. 0) THEN - I1 = 3 - I2 = 1 - I3 = 2 - GO TO 14 - ELSEIF (LTRI(6,KT1) .EQ. 0) THEN - I1 = 1 - I2 = 2 - I3 = 3 - GO TO 14 - ENDIF - GO TO 13 - 14 N1 = LTRI(I1,KT1) - N0 = N1 -C -C Loop on boundary nodes N1 in CCW order, storing the -C indexes of the clockwise-ordered sequence of triangles -C that contain N1. The first triangle overwrites the -C last neighbor position, and the remaining triangles, -C if any, are appended to N1's adjacency list. -C -C A pointer to the first neighbor of N1 is saved in LPN. -C - 15 LP = LEND(N1) - LPN = LPTR(LP) - LISTC(LP) = KT1 -C -C Loop on triangles KT2 containing N1. -C - 16 KT2 = LTRI(I2+3,KT1) - IF (KT2 .NE. 0) THEN -C -C Append KT2 to N1's triangle list. -C - LPTR(LP) = LNEW - LP = LNEW - LISTC(LP) = KT2 - LNEW = LNEW + 1 -C -C Set KT1 to KT2 and update (I1,I2,I3) such that -C LTRI(I1,KT1) = N1. -C - KT1 = KT2 - IF (LTRI(1,KT1) .EQ. N1) THEN - I1 = 1 - I2 = 2 - I3 = 3 - ELSEIF (LTRI(2,KT1) .EQ. N1) THEN - I1 = 2 - I2 = 3 - I3 = 1 - ELSE - I1 = 3 - I2 = 1 - I3 = 2 - ENDIF - GO TO 16 - ENDIF -C -C Store the saved first-triangle pointer in LPTR(LP), set -C N1 to the next boundary node, test for termination, -C and permute the indexes: the last triangle containing -C a boundary node is the first triangle containing the -C next boundary node. -C - LPTR(LP) = LPN - N1 = LTRI(I3,KT1) - IF (N1 .NE. N0) THEN - I4 = I3 - I3 = I2 - I2 = I1 - I1 = I4 - GO TO 15 - ENDIF -C -C No errors encountered. -C - 20 IER = 0 - RETURN -C -C N < 3. -C - 21 IER = 1 - RETURN -C -C Insufficient space reserved for LTRI. -C - 22 IER = 2 - RETURN -C -C Error flag returned by CIRCUM: KT indexes a null triangle. -C - 23 IER = 3 - RETURN - END - SUBROUTINE DELARC (N,IO1,IO2, LIST,LPTR,LEND, - . LNEW, IER) - INTEGER N, IO1, IO2, LIST(*), LPTR(*), LEND(N), LNEW, - . IER -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine deletes a boundary arc from a triangula- -C tion. It may be used to remove a null triangle from the -C convex hull boundary. Note, however, that if the union of -C triangles is rendered nonconvex, Subroutines DELNOD, EDGE, -C and TRFIND (and hence ADDNOD) may fail. Also, Function -C NEARND should not be called following an arc deletion. -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 4. -C -C IO1,IO2 = Indexes (in the range 1 to N) of a pair of -C adjacent boundary nodes defining the arc -C to be removed. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Triangulation data structure -C created by Subroutine TRMESH. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the removal of arc IO1-IO2 -C unless IER > 0. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if N, IO1, or IO2 is outside its valid -C range, or IO1 = IO2. -C IER = 2 if IO1-IO2 is not a boundary arc. -C IER = 3 if the node opposite IO1-IO2 is al- -C ready a boundary node, and thus IO1 -C or IO2 has only two neighbors or a -C deletion would result in two triangu- -C lations sharing a single node. -C IER = 4 if one of the nodes is a neighbor of -C the other, but not vice versa, imply- -C ing an invalid triangulation data -C structure. -C -C Module required by DELARC: DELNB, LSTPTR -C -C Intrinsic function called by DELARC: ABS -C -C*********************************************************** -C - INTEGER LSTPTR - INTEGER LP, LPH, LPL, N1, N2, N3 -C -C Local parameters: -C -C LP = LIST pointer -C LPH = LIST pointer or flag returned by DELNB -C LPL = Pointer to the last neighbor of N1, N2, or N3 -C N1,N2,N3 = Nodal indexes of a triangle such that N1->N2 -C is the directed boundary edge associated -C with IO1-IO2 -C - N1 = IO1 - N2 = IO2 -C -C Test for errors, and set N1->N2 to the directed boundary -C edge associated with IO1-IO2: (N1,N2,N3) is a triangle -C for some N3. -C - IF (N .LT. 4 .OR. N1 .LT. 1 .OR. N1 .GT. N .OR. - . N2 .LT. 1 .OR. N2 .GT. N .OR. N1 .EQ. N2) THEN - IER = 1 - RETURN - ENDIF -C - LPL = LEND(N2) - IF (-LIST(LPL) .NE. N1) THEN - N1 = N2 - N2 = IO1 - LPL = LEND(N2) - IF (-LIST(LPL) .NE. N1) THEN - IER = 2 - RETURN - ENDIF - ENDIF -C -C Set N3 to the node opposite N1->N2 (the second neighbor -C of N1), and test for error 3 (N3 already a boundary -C node). -C - LPL = LEND(N1) - LP = LPTR(LPL) - LP = LPTR(LP) - N3 = ABS(LIST(LP)) - LPL = LEND(N3) - IF (LIST(LPL) .LE. 0) THEN - IER = 3 - RETURN - ENDIF -C -C Delete N2 as a neighbor of N1, making N3 the first -C neighbor, and test for error 4 (N2 not a neighbor -C of N1). Note that previously computed pointers may -C no longer be valid following the call to DELNB. -C - CALL DELNB (N1,N2,N, LIST,LPTR,LEND,LNEW, LPH) - IF (LPH .LT. 0) THEN - IER = 4 - RETURN - ENDIF -C -C Delete N1 as a neighbor of N2, making N3 the new last -C neighbor. -C - CALL DELNB (N2,N1,N, LIST,LPTR,LEND,LNEW, LPH) -C -C Make N3 a boundary node with first neighbor N2 and last -C neighbor N1. -C - LP = LSTPTR(LEND(N3),N1,LIST,LPTR) - LEND(N3) = LP - LIST(LP) = -N1 -C -C No errors encountered. -C - IER = 0 - RETURN - END - SUBROUTINE DELNB (N0,NB,N, LIST,LPTR,LEND,LNEW, LPH) - INTEGER N0, NB, N, LIST(*), LPTR(*), LEND(N), LNEW, - . LPH -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/29/98 -C -C This subroutine deletes a neighbor NB from the adjacency -C list of node N0 (but N0 is not deleted from the adjacency -C list of NB) and, if NB is a boundary node, makes N0 a -C boundary node. For pointer (LIST index) LPH to NB as a -C neighbor of N0, the empty LIST,LPTR location LPH is filled -C in with the values at LNEW-1, pointer LNEW-1 (in LPTR and -C possibly in LEND) is changed to LPH, and LNEW is decremen- -C ted. This requires a search of LEND and LPTR entailing an -C expected operation count of O(N). -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C N0,NB = Indexes, in the range 1 to N, of a pair of -C nodes such that NB is a neighbor of N0. -C (N0 need not be a neighbor of NB.) -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Data structure defining the -C triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the removal of NB from the ad- -C jacency list of N0 unless -C LPH < 0. -C -C LPH = List pointer to the hole (NB as a neighbor of -C N0) filled in by the values at LNEW-1 or error -C indicator: -C LPH > 0 if no errors were encountered. -C LPH = -1 if N0, NB, or N is outside its valid -C range. -C LPH = -2 if NB is not a neighbor of N0. -C -C Modules required by DELNB: None -C -C Intrinsic function called by DELNB: ABS -C -C*********************************************************** -C - INTEGER I, LNW, LP, LPB, LPL, LPP, NN -C -C Local parameters: -C -C I = DO-loop index -C LNW = LNEW-1 (output value of LNEW) -C LP = LIST pointer of the last neighbor of NB -C LPB = Pointer to NB as a neighbor of N0 -C LPL = Pointer to the last neighbor of N0 -C LPP = Pointer to the neighbor of N0 that precedes NB -C NN = Local copy of N -C - NN = N -C -C Test for error 1. -C - IF (N0 .LT. 1 .OR. N0 .GT. NN .OR. NB .LT. 1 .OR. - . NB .GT. NN .OR. NN .LT. 3) THEN - LPH = -1 - RETURN - ENDIF -C -C Find pointers to neighbors of N0: -C -C LPL points to the last neighbor, -C LPP points to the neighbor NP preceding NB, and -C LPB points to NB. -C - LPL = LEND(N0) - LPP = LPL - LPB = LPTR(LPP) - 1 IF (LIST(LPB) .EQ. NB) GO TO 2 - LPP = LPB - LPB = LPTR(LPP) - IF (LPB .NE. LPL) GO TO 1 -C -C Test for error 2 (NB not found). -C - IF (ABS(LIST(LPB)) .NE. NB) THEN - LPH = -2 - RETURN - ENDIF -C -C NB is the last neighbor of N0. Make NP the new last -C neighbor and, if NB is a boundary node, then make N0 -C a boundary node. -C - LEND(N0) = LPP - LP = LEND(NB) - IF (LIST(LP) .LT. 0) LIST(LPP) = -LIST(LPP) - GO TO 3 -C -C NB is not the last neighbor of N0. If NB is a boundary -C node and N0 is not, then make N0 a boundary node with -C last neighbor NP. -C - 2 LP = LEND(NB) - IF (LIST(LP) .LT. 0 .AND. LIST(LPL) .GT. 0) THEN - LEND(N0) = LPP - LIST(LPP) = -LIST(LPP) - ENDIF -C -C Update LPTR so that the neighbor following NB now fol- -C lows NP, and fill in the hole at location LPB. -C - 3 LPTR(LPP) = LPTR(LPB) - LNW = LNEW-1 - LIST(LPB) = LIST(LNW) - LPTR(LPB) = LPTR(LNW) - DO 4 I = NN,1,-1 - IF (LEND(I) .EQ. LNW) THEN - LEND(I) = LPB - GO TO 5 - ENDIF - 4 CONTINUE -C - 5 DO 6 I = 1,LNW-1 - IF (LPTR(I) .EQ. LNW) THEN - LPTR(I) = LPB - ENDIF - 6 CONTINUE -C -C No errors encountered. -C - LNEW = LNW - LPH = LPB - RETURN - END - SUBROUTINE DELNOD (K, N,X,Y,Z,LIST,LPTR,LEND,LNEW,LWK, - . IWK, IER) - INTEGER K, N, LIST(*), LPTR(*), LEND(*), LNEW, LWK, - . IWK(2,*), IER - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 11/30/99 -C -C This subroutine deletes node K (along with all arcs -C incident on node K) from a triangulation of N nodes on the -C unit sphere, and inserts arcs as necessary to produce a -C triangulation of the remaining N-1 nodes. If a Delaunay -C triangulation is input, a Delaunay triangulation will -C result, and thus, DELNOD reverses the effect of a call to -C Subroutine ADDNOD. -C -C -C On input: -C -C K = Index (for X, Y, and Z) of the node to be -C deleted. 1 .LE. K .LE. N. -C -C K is not altered by this routine. -C -C N = Number of nodes in the triangulation on input. -C N .GE. 4. Note that N will be decremented -C following the deletion. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes in the triangula- -C tion. -C -C LIST,LPTR,LEND,LNEW = Data structure defining the -C triangulation. Refer to Sub- -C routine TRMESH. -C -C LWK = Number of columns reserved for IWK. LWK must -C be at least NNB-3, where NNB is the number of -C neighbors of node K, including an extra -C pseudo-node if K is a boundary node. -C -C IWK = Integer work array dimensioned 2 by LWK (or -C array of length .GE. 2*LWK). -C -C On output: -C -C N = Number of nodes in the triangulation on output. -C The input value is decremented unless 1 .LE. IER -C .LE. 4. -C -C X,Y,Z = Updated arrays containing nodal coordinates -C (with elements K+1,...,N+1 shifted up one -C position, thus overwriting element K) unless -C 1 .LE. IER .LE. 4. -C -C LIST,LPTR,LEND,LNEW = Updated triangulation data -C structure reflecting the dele- -C tion unless 1 .LE. IER .LE. 4. -C Note that the data structure -C may have been altered if IER > -C 3. -C -C LWK = Number of IWK columns required unless IER = 1 -C or IER = 3. -C -C IWK = Indexes of the endpoints of the new arcs added -C unless LWK = 0 or 1 .LE. IER .LE. 4. (Arcs -C are associated with columns, or pairs of -C adjacent elements if IWK is declared as a -C singly-subscripted array.) -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if K or N is outside its valid range -C or LWK < 0 on input. -C IER = 2 if more space is required in IWK. -C Refer to LWK. -C IER = 3 if the triangulation data structure is -C invalid on input. -C IER = 4 if K indexes an interior node with -C four or more neighbors, none of which -C can be swapped out due to collineari- -C ty, and K cannot therefore be deleted. -C IER = 5 if an error flag (other than IER = 1) -C was returned by OPTIM. An error -C message is written to the standard -C output unit in this case. -C IER = 6 if error flag 1 was returned by OPTIM. -C This is not necessarily an error, but -C the arcs may not be optimal. -C -C Note that the deletion may result in all remaining nodes -C being collinear. This situation is not flagged. -C -C Modules required by DELNOD: DELNB, LEFT, LSTPTR, NBCNT, -C OPTIM, SWAP, SWPTST -C -C Intrinsic function called by DELNOD: ABS -C -C*********************************************************** -C - INTEGER LSTPTR, NBCNT - INTEGER I, IERR, IWL, J, LNW, LP, LP21, LPF, LPH, LPL, - . LPL2, LPN, LWKL, N1, N2, NFRST, NIT, NL, NN, - . NNB, NR - LOGICAL LEFT - LOGICAL BDRY - REAL X1, X2, XL, XR, Y1, Y2, YL, YR, Z1, Z2, ZL, ZR -C -C Local parameters: -C -C BDRY = Logical variable with value TRUE iff N1 is a -C boundary node -C I,J = DO-loop indexes -C IERR = Error flag returned by OPTIM -C IWL = Number of IWK columns containing arcs -C LNW = Local copy of LNEW -C LP = LIST pointer -C LP21 = LIST pointer returned by SWAP -C LPF,LPL = Pointers to the first and last neighbors of N1 -C LPH = Pointer (or flag) returned by DELNB -C LPL2 = Pointer to the last neighbor of N2 -C LPN = Pointer to a neighbor of N1 -C LWKL = Input value of LWK -C N1 = Local copy of K -C N2 = Neighbor of N1 -C NFRST = First neighbor of N1: LIST(LPF) -C NIT = Number of iterations in OPTIM -C NR,NL = Neighbors of N1 preceding (to the right of) and -C following (to the left of) N2, respectively -C NN = Number of nodes in the triangulation -C NNB = Number of neighbors of N1 (including a pseudo- -C node representing the boundary if N1 is a -C boundary node) -C X1,Y1,Z1 = Coordinates of N1 -C X2,Y2,Z2 = Coordinates of N2 -C XL,YL,ZL = Coordinates of NL -C XR,YR,ZR = Coordinates of NR -C -C -C Set N1 to K and NNB to the number of neighbors of N1 (plus -C one if N1 is a boundary node), and test for errors. LPF -C and LPL are LIST indexes of the first and last neighbors -C of N1, IWL is the number of IWK columns containing arcs, -C and BDRY is TRUE iff N1 is a boundary node. -C - N1 = K - NN = N - IF (N1 .LT. 1 .OR. N1 .GT. NN .OR. NN .LT. 4 .OR. - . LWK .LT. 0) GO TO 21 - LPL = LEND(N1) - LPF = LPTR(LPL) - NNB = NBCNT(LPL,LPTR) - BDRY = LIST(LPL) .LT. 0 - IF (BDRY) NNB = NNB + 1 - IF (NNB .LT. 3) GO TO 23 - LWKL = LWK - LWK = NNB - 3 - IF (LWKL .LT. LWK) GO TO 22 - IWL = 0 - IF (NNB .EQ. 3) GO TO 3 -C -C Initialize for loop on arcs N1-N2 for neighbors N2 of N1, -C beginning with the second neighbor. NR and NL are the -C neighbors preceding and following N2, respectively, and -C LP indexes NL. The loop is exited when all possible -C swaps have been applied to arcs incident on N1. -C - X1 = X(N1) - Y1 = Y(N1) - Z1 = Z(N1) - NFRST = LIST(LPF) - NR = NFRST - XR = X(NR) - YR = Y(NR) - ZR = Z(NR) - LP = LPTR(LPF) - N2 = LIST(LP) - X2 = X(N2) - Y2 = Y(N2) - Z2 = Z(N2) - LP = LPTR(LP) -C -C Top of loop: set NL to the neighbor following N2. -C - 1 NL = ABS(LIST(LP)) - IF (NL .EQ. NFRST .AND. BDRY) GO TO 3 - XL = X(NL) - YL = Y(NL) - ZL = Z(NL) -C -C Test for a convex quadrilateral. To avoid an incorrect -C test caused by collinearity, use the fact that if N1 -C is a boundary node, then N1 LEFT NR->NL and if N2 is -C a boundary node, then N2 LEFT NL->NR. -C - LPL2 = LEND(N2) - IF ( .NOT. ((BDRY .OR. LEFT(XR,YR,ZR,XL,YL,ZL,X1,Y1, - . Z1)) .AND. (LIST(LPL2) .LT. 0 .OR. - . LEFT(XL,YL,ZL,XR,YR,ZR,X2,Y2,Z2))) ) THEN -C -C Nonconvex quadrilateral -- no swap is possible. -C - NR = N2 - XR = X2 - YR = Y2 - ZR = Z2 - GO TO 2 - ENDIF -C -C The quadrilateral defined by adjacent triangles -C (N1,N2,NL) and (N2,N1,NR) is convex. Swap in -C NL-NR and store it in IWK unless NL and NR are -C already adjacent, in which case the swap is not -C possible. Indexes larger than N1 must be decremented -C since N1 will be deleted from X, Y, and Z. -C - CALL SWAP (NL,NR,N1,N2, LIST,LPTR,LEND, LP21) - IF (LP21 .EQ. 0) THEN - NR = N2 - XR = X2 - YR = Y2 - ZR = Z2 - GO TO 2 - ENDIF - IWL = IWL + 1 - IF (NL .LE. N1) THEN - IWK(1,IWL) = NL - ELSE - IWK(1,IWL) = NL - 1 - ENDIF - IF (NR .LE. N1) THEN - IWK(2,IWL) = NR - ELSE - IWK(2,IWL) = NR - 1 - ENDIF -C -C Recompute the LIST indexes and NFRST, and decrement NNB. -C - LPL = LEND(N1) - NNB = NNB - 1 - IF (NNB .EQ. 3) GO TO 3 - LPF = LPTR(LPL) - NFRST = LIST(LPF) - LP = LSTPTR(LPL,NL,LIST,LPTR) - IF (NR .EQ. NFRST) GO TO 2 -C -C NR is not the first neighbor of N1. -C Back up and test N1-NR for a swap again: Set N2 to -C NR and NR to the previous neighbor of N1 -- the -C neighbor of NR which follows N1. LP21 points to NL -C as a neighbor of NR. -C - N2 = NR - X2 = XR - Y2 = YR - Z2 = ZR - LP21 = LPTR(LP21) - LP21 = LPTR(LP21) - NR = ABS(LIST(LP21)) - XR = X(NR) - YR = Y(NR) - ZR = Z(NR) - GO TO 1 -C -C Bottom of loop -- test for termination of loop. -C - 2 IF (N2 .EQ. NFRST) GO TO 3 - N2 = NL - X2 = XL - Y2 = YL - Z2 = ZL - LP = LPTR(LP) - GO TO 1 -C -C Delete N1 and all its incident arcs. If N1 is an interior -C node and either NNB > 3 or NNB = 3 and N2 LEFT NR->NL, -C then N1 must be separated from its neighbors by a plane -C containing the origin -- its removal reverses the effect -C of a call to COVSPH, and all its neighbors become -C boundary nodes. This is achieved by treating it as if -C it were a boundary node (setting BDRY to TRUE, changing -C a sign in LIST, and incrementing NNB). -C - 3 IF (.NOT. BDRY) THEN - IF (NNB .GT. 3) THEN - BDRY = .TRUE. - ELSE - LPF = LPTR(LPL) - NR = LIST(LPF) - LP = LPTR(LPF) - N2 = LIST(LP) - NL = LIST(LPL) - BDRY = LEFT(X(NR),Y(NR),Z(NR),X(NL),Y(NL),Z(NL), - . X(N2),Y(N2),Z(N2)) - ENDIF - IF (BDRY) THEN -C -C IF a boundary node already exists, then N1 and its -C neighbors cannot be converted to boundary nodes. -C (They must be collinear.) This is a problem if -C NNB > 3. -C - DO 4 I = 1,NN - IF (LIST(LEND(I)) .LT. 0) THEN - BDRY = .FALSE. - GO TO 5 - ENDIF - 4 CONTINUE - LIST(LPL) = -LIST(LPL) - NNB = NNB + 1 - ENDIF - ENDIF - 5 IF (.NOT. BDRY .AND. NNB .GT. 3) GO TO 24 -C -C Initialize for loop on neighbors. LPL points to the last -C neighbor of N1. LNEW is stored in local variable LNW. -C - LP = LPL - LNW = LNEW -C -C Loop on neighbors N2 of N1, beginning with the first. -C - 6 LP = LPTR(LP) - N2 = ABS(LIST(LP)) - CALL DELNB (N2,N1,N, LIST,LPTR,LEND,LNW, LPH) - IF (LPH .LT. 0) GO TO 23 -C -C LP and LPL may require alteration. -C - IF (LPL .EQ. LNW) LPL = LPH - IF (LP .EQ. LNW) LP = LPH - IF (LP .NE. LPL) GO TO 6 -C -C Delete N1 from X, Y, Z, and LEND, and remove its adjacency -C list from LIST and LPTR. LIST entries (nodal indexes) -C which are larger than N1 must be decremented. -C - NN = NN - 1 - IF (N1 .GT. NN) GO TO 9 - DO 7 I = N1,NN - X(I) = X(I+1) - Y(I) = Y(I+1) - Z(I) = Z(I+1) - LEND(I) = LEND(I+1) - 7 CONTINUE -C - DO 8 I = 1,LNW-1 - IF (LIST(I) .GT. N1) LIST(I) = LIST(I) - 1 - IF (LIST(I) .LT. -N1) LIST(I) = LIST(I) + 1 - 8 CONTINUE -C -C For LPN = first to last neighbors of N1, delete the -C preceding neighbor (indexed by LP). -C -C Each empty LIST,LPTR location LP is filled in with the -C values at LNW-1, and LNW is decremented. All pointers -C (including those in LPTR and LEND) with value LNW-1 -C must be changed to LP. -C -C LPL points to the last neighbor of N1. -C - 9 IF (BDRY) NNB = NNB - 1 - LPN = LPL - DO 13 J = 1,NNB - LNW = LNW - 1 - LP = LPN - LPN = LPTR(LP) - LIST(LP) = LIST(LNW) - LPTR(LP) = LPTR(LNW) - IF (LPTR(LPN) .EQ. LNW) LPTR(LPN) = LP - IF (LPN .EQ. LNW) LPN = LP - DO 10 I = NN,1,-1 - IF (LEND(I) .EQ. LNW) THEN - LEND(I) = LP - GO TO 11 - ENDIF - 10 CONTINUE -C - 11 DO 12 I = LNW-1,1,-1 - IF (LPTR(I) .EQ. LNW) LPTR(I) = LP - 12 CONTINUE - 13 CONTINUE -C -C Update N and LNEW, and optimize the patch of triangles -C containing K (on input) by applying swaps to the arcs -C in IWK. -C - N = NN - LNEW = LNW - IF (IWL .GT. 0) THEN - NIT = 4*IWL - CALL OPTIM (X,Y,Z,IWL, LIST,LPTR,LEND,NIT,IWK, IERR) - IF (IERR .NE. 0 .AND. IERR .NE. 1) GO TO 25 - IF (IERR .EQ. 1) GO TO 26 - ENDIF -C -C Successful termination. -C - IER = 0 - RETURN -C -C Invalid input parameter. -C - 21 IER = 1 - RETURN -C -C Insufficient space reserved for IWK. -C - 22 IER = 2 - RETURN -C -C Invalid triangulation data structure. NNB < 3 on input or -C N2 is a neighbor of N1 but N1 is not a neighbor of N2. -C - 23 IER = 3 - RETURN -C -C N1 is interior but NNB could not be reduced to 3. -C - 24 IER = 4 - RETURN -C -C Error flag (other than 1) returned by OPTIM. -C - 25 IER = 5 - WRITE (*,100) NIT, IERR - 100 FORMAT (//5X,'*** Error in OPTIM (called from ', - . 'DELNOD): NIT = ',I4,', IER = ',I1,' ***'/) - RETURN -C -C Error flag 1 returned by OPTIM. -C - 26 IER = 6 - RETURN - END - SUBROUTINE EDGE (IN1,IN2,X,Y,Z, LWK,IWK,LIST,LPTR, - . LEND, IER) - INTEGER IN1, IN2, LWK, IWK(2,*), LIST(*), LPTR(*), - . LEND(*), IER - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/30/98 -C -C Given a triangulation of N nodes and a pair of nodal -C indexes IN1 and IN2, this routine swaps arcs as necessary -C to force IN1 and IN2 to be adjacent. Only arcs which -C intersect IN1-IN2 are swapped out. If a Delaunay triangu- -C lation is input, the resulting triangulation is as close -C as possible to a Delaunay triangulation in the sense that -C all arcs other than IN1-IN2 are locally optimal. -C -C A sequence of calls to EDGE may be used to force the -C presence of a set of edges defining the boundary of a non- -C convex and/or multiply connected region, or to introduce -C barriers into the triangulation. Note that Subroutine -C GETNP will not necessarily return closest nodes if the -C triangulation has been constrained by a call to EDGE. -C However, this is appropriate in some applications, such -C as triangle-based interpolation on a nonconvex domain. -C -C -C On input: -C -C IN1,IN2 = Indexes (of X, Y, and Z) in the range 1 to -C N defining a pair of nodes to be connected -C by an arc. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. -C -C The above parameters are not altered by this routine. -C -C LWK = Number of columns reserved for IWK. This must -C be at least NI -- the number of arcs that -C intersect IN1-IN2. (NI is bounded by N-3.) -C -C IWK = Integer work array of length at least 2*LWK. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C On output: -C -C LWK = Number of arcs which intersect IN1-IN2 (but -C not more than the input value of LWK) unless -C IER = 1 or IER = 3. LWK = 0 if and only if -C IN1 and IN2 were adjacent (or LWK=0) on input. -C -C IWK = Array containing the indexes of the endpoints -C of the new arcs other than IN1-IN2 unless -C IER > 0 or LWK = 0. New arcs to the left of -C IN1->IN2 are stored in the first K-1 columns -C (left portion of IWK), column K contains -C zeros, and new arcs to the right of IN1->IN2 -C occupy columns K+1,...,LWK. (K can be deter- -C mined by searching IWK for the zeros.) -C -C LIST,LPTR,LEND = Data structure updated if necessary -C to reflect the presence of an arc -C connecting IN1 and IN2 unless IER > -C 0. The data structure has been -C altered if IER >= 4. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if IN1 < 1, IN2 < 1, IN1 = IN2, -C or LWK < 0 on input. -C IER = 2 if more space is required in IWK. -C Refer to LWK. -C IER = 3 if IN1 and IN2 could not be connected -C due to either an invalid data struc- -C ture or collinear nodes (and floating -C point error). -C IER = 4 if an error flag other than IER = 1 -C was returned by OPTIM. -C IER = 5 if error flag 1 was returned by OPTIM. -C This is not necessarily an error, but -C the arcs other than IN1-IN2 may not -C be optimal. -C -C An error message is written to the standard output unit -C in the case of IER = 3 or IER = 4. -C -C Modules required by EDGE: LEFT, LSTPTR, OPTIM, SWAP, -C SWPTST -C -C Intrinsic function called by EDGE: ABS -C -C*********************************************************** -C - LOGICAL LEFT - INTEGER I, IERR, IWC, IWCP1, IWEND, IWF, IWL, LFT, LP, - . LP21, LPL, N0, N1, N1FRST, N1LST, N2, NEXT, - . NIT, NL, NR - REAL DP12, DP1L, DP1R, DP2L, DP2R, X0, X1, X2, Y0, - . Y1, Y2, Z0, Z1, Z2 -C -C Local parameters: -C -C DPij = Dot product -C I = DO-loop index and column index for IWK -C IERR = Error flag returned by Subroutine OPTIM -C IWC = IWK index between IWF and IWL -- NL->NR is -C stored in IWK(1,IWC)->IWK(2,IWC) -C IWCP1 = IWC + 1 -C IWEND = Input or output value of LWK -C IWF = IWK (column) index of the first (leftmost) arc -C which intersects IN1->IN2 -C IWL = IWK (column) index of the last (rightmost) are -C which intersects IN1->IN2 -C LFT = Flag used to determine if a swap results in the -C new arc intersecting IN1-IN2 -- LFT = 0 iff -C N0 = IN1, LFT = -1 implies N0 LEFT IN1->IN2, -C and LFT = 1 implies N0 LEFT IN2->IN1 -C LP = List pointer (index for LIST and LPTR) -C LP21 = Unused parameter returned by SWAP -C LPL = Pointer to the last neighbor of IN1 or NL -C N0 = Neighbor of N1 or node opposite NR->NL -C N1,N2 = Local copies of IN1 and IN2 -C N1FRST = First neighbor of IN1 -C N1LST = (Signed) last neighbor of IN1 -C NEXT = Node opposite NL->NR -C NIT = Flag or number of iterations employed by OPTIM -C NL,NR = Endpoints of an arc which intersects IN1-IN2 -C with NL LEFT IN1->IN2 -C X0,Y0,Z0 = Coordinates of N0 -C X1,Y1,Z1 = Coordinates of IN1 -C X2,Y2,Z2 = Coordinates of IN2 -C -C -C Store IN1, IN2, and LWK in local variables and test for -C errors. -C - N1 = IN1 - N2 = IN2 - IWEND = LWK - IF (N1 .LT. 1 .OR. N2 .LT. 1 .OR. N1 .EQ. N2 .OR. - . IWEND .LT. 0) GO TO 31 -C -C Test for N2 as a neighbor of N1. LPL points to the last -C neighbor of N1. -C - LPL = LEND(N1) - N0 = ABS(LIST(LPL)) - LP = LPL - 1 IF (N0 .EQ. N2) GO TO 30 - LP = LPTR(LP) - N0 = LIST(LP) - IF (LP .NE. LPL) GO TO 1 -C -C Initialize parameters. -C - IWL = 0 - NIT = 0 -C -C Store the coordinates of N1 and N2. -C - 2 X1 = X(N1) - Y1 = Y(N1) - Z1 = Z(N1) - X2 = X(N2) - Y2 = Y(N2) - Z2 = Z(N2) -C -C Set NR and NL to adjacent neighbors of N1 such that -C NR LEFT N2->N1 and NL LEFT N1->N2, -C (NR Forward N1->N2 or NL Forward N1->N2), and -C (NR Forward N2->N1 or NL Forward N2->N1). -C -C Initialization: Set N1FRST and N1LST to the first and -C (signed) last neighbors of N1, respectively, and -C initialize NL to N1FRST. -C - LPL = LEND(N1) - N1LST = LIST(LPL) - LP = LPTR(LPL) - N1FRST = LIST(LP) - NL = N1FRST - IF (N1LST .LT. 0) GO TO 4 -C -C N1 is an interior node. Set NL to the first candidate -C for NR (NL LEFT N2->N1). -C - 3 IF (LEFT(X2,Y2,Z2,X1,Y1,Z1,X(NL),Y(NL),Z(NL))) GO TO 4 - LP = LPTR(LP) - NL = LIST(LP) - IF (NL .NE. N1FRST) GO TO 3 -C -C All neighbors of N1 are strictly left of N1->N2. -C - GO TO 5 -C -C NL = LIST(LP) LEFT N2->N1. Set NR to NL and NL to the -C following neighbor of N1. -C - 4 NR = NL - LP = LPTR(LP) - NL = ABS(LIST(LP)) - IF (LEFT(X1,Y1,Z1,X2,Y2,Z2,X(NL),Y(NL),Z(NL)) ) THEN -C -C NL LEFT N1->N2 and NR LEFT N2->N1. The Forward tests -C are employed to avoid an error associated with -C collinear nodes. -C - DP12 = X1*X2 + Y1*Y2 + Z1*Z2 - DP1L = X1*X(NL) + Y1*Y(NL) + Z1*Z(NL) - DP2L = X2*X(NL) + Y2*Y(NL) + Z2*Z(NL) - DP1R = X1*X(NR) + Y1*Y(NR) + Z1*Z(NR) - DP2R = X2*X(NR) + Y2*Y(NR) + Z2*Z(NR) - IF ( (DP2L-DP12*DP1L .GE. 0. .OR. - . DP2R-DP12*DP1R .GE. 0.) .AND. - . (DP1L-DP12*DP2L .GE. 0. .OR. - . DP1R-DP12*DP2R .GE. 0.) ) GO TO 6 -C -C NL-NR does not intersect N1-N2. However, there is -C another candidate for the first arc if NL lies on -C the line N1-N2. -C - IF ( .NOT. LEFT(X2,Y2,Z2,X1,Y1,Z1,X(NL),Y(NL), - . Z(NL)) ) GO TO 5 - ENDIF -C -C Bottom of loop. -C - IF (NL .NE. N1FRST) GO TO 4 -C -C Either the triangulation is invalid or N1-N2 lies on the -C convex hull boundary and an edge NR->NL (opposite N1 and -C intersecting N1-N2) was not found due to floating point -C error. Try interchanging N1 and N2 -- NIT > 0 iff this -C has already been done. -C - 5 IF (NIT .GT. 0) GO TO 33 - NIT = 1 - N1 = N2 - N2 = IN1 - GO TO 2 -C -C Store the ordered sequence of intersecting edges NL->NR in -C IWK(1,IWL)->IWK(2,IWL). -C - 6 IWL = IWL + 1 - IF (IWL .GT. IWEND) GO TO 32 - IWK(1,IWL) = NL - IWK(2,IWL) = NR -C -C Set NEXT to the neighbor of NL which follows NR. -C - LPL = LEND(NL) - LP = LPTR(LPL) -C -C Find NR as a neighbor of NL. The search begins with -C the first neighbor. -C - 7 IF (LIST(LP) .EQ. NR) GO TO 8 - LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 7 -C -C NR must be the last neighbor, and NL->NR cannot be a -C boundary edge. -C - IF (LIST(LP) .NE. NR) GO TO 33 -C -C Set NEXT to the neighbor following NR, and test for -C termination of the store loop. -C - 8 LP = LPTR(LP) - NEXT = ABS(LIST(LP)) - IF (NEXT .EQ. N2) GO TO 9 -C -C Set NL or NR to NEXT. -C - IF ( LEFT(X1,Y1,Z1,X2,Y2,Z2,X(NEXT),Y(NEXT),Z(NEXT)) ) - . THEN - NL = NEXT - ELSE - NR = NEXT - ENDIF - GO TO 6 -C -C IWL is the number of arcs which intersect N1-N2. -C Store LWK. -C - 9 LWK = IWL - IWEND = IWL -C -C Initialize for edge swapping loop -- all possible swaps -C are applied (even if the new arc again intersects -C N1-N2), arcs to the left of N1->N2 are stored in the -C left portion of IWK, and arcs to the right are stored in -C the right portion. IWF and IWL index the first and last -C intersecting arcs. -C - IWF = 1 -C -C Top of loop -- set N0 to N1 and NL->NR to the first edge. -C IWC points to the arc currently being processed. LFT -C .LE. 0 iff N0 LEFT N1->N2. -C - 10 LFT = 0 - N0 = N1 - X0 = X1 - Y0 = Y1 - Z0 = Z1 - NL = IWK(1,IWF) - NR = IWK(2,IWF) - IWC = IWF -C -C Set NEXT to the node opposite NL->NR unless IWC is the -C last arc. -C - 11 IF (IWC .EQ. IWL) GO TO 21 - IWCP1 = IWC + 1 - NEXT = IWK(1,IWCP1) - IF (NEXT .NE. NL) GO TO 16 - NEXT = IWK(2,IWCP1) -C -C NEXT RIGHT N1->N2 and IWC .LT. IWL. Test for a possible -C swap. -C - IF ( .NOT. LEFT(X0,Y0,Z0,X(NR),Y(NR),Z(NR),X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 14 - IF (LFT .GE. 0) GO TO 12 - IF ( .NOT. LEFT(X(NL),Y(NL),Z(NL),X0,Y0,Z0,X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 14 -C -C Replace NL->NR with N0->NEXT. -C - CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWC) = N0 - IWK(2,IWC) = NEXT - GO TO 15 -C -C Swap NL-NR for N0-NEXT, shift columns IWC+1,...,IWL to -C the left, and store N0-NEXT in the right portion of -C IWK. -C - 12 CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - DO 13 I = IWCP1,IWL - IWK(1,I-1) = IWK(1,I) - IWK(2,I-1) = IWK(2,I) - 13 CONTINUE - IWK(1,IWL) = N0 - IWK(2,IWL) = NEXT - IWL = IWL - 1 - NR = NEXT - GO TO 11 -C -C A swap is not possible. Set N0 to NR. -C - 14 N0 = NR - X0 = X(N0) - Y0 = Y(N0) - Z0 = Z(N0) - LFT = 1 -C -C Advance to the next arc. -C - 15 NR = NEXT - IWC = IWC + 1 - GO TO 11 -C -C NEXT LEFT N1->N2, NEXT .NE. N2, and IWC .LT. IWL. -C Test for a possible swap. -C - 16 IF ( .NOT. LEFT(X(NL),Y(NL),Z(NL),X0,Y0,Z0,X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 19 - IF (LFT .LE. 0) GO TO 17 - IF ( .NOT. LEFT(X0,Y0,Z0,X(NR),Y(NR),Z(NR),X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 19 -C -C Replace NL->NR with NEXT->N0. -C - CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWC) = NEXT - IWK(2,IWC) = N0 - GO TO 20 -C -C Swap NL-NR for N0-NEXT, shift columns IWF,...,IWC-1 to -C the right, and store N0-NEXT in the left portion of -C IWK. -C - 17 CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - DO 18 I = IWC-1,IWF,-1 - IWK(1,I+1) = IWK(1,I) - IWK(2,I+1) = IWK(2,I) - 18 CONTINUE - IWK(1,IWF) = N0 - IWK(2,IWF) = NEXT - IWF = IWF + 1 - GO TO 20 -C -C A swap is not possible. Set N0 to NL. -C - 19 N0 = NL - X0 = X(N0) - Y0 = Y(N0) - Z0 = Z(N0) - LFT = -1 -C -C Advance to the next arc. -C - 20 NL = NEXT - IWC = IWC + 1 - GO TO 11 -C -C N2 is opposite NL->NR (IWC = IWL). -C - 21 IF (N0 .EQ. N1) GO TO 24 - IF (LFT .LT. 0) GO TO 22 -C -C N0 RIGHT N1->N2. Test for a possible swap. -C - IF ( .NOT. LEFT(X0,Y0,Z0,X(NR),Y(NR),Z(NR),X2,Y2,Z2) ) - . GO TO 10 -C -C Swap NL-NR for N0-N2 and store N0-N2 in the right -C portion of IWK. -C - CALL SWAP (N2,N0,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWL) = N0 - IWK(2,IWL) = N2 - IWL = IWL - 1 - GO TO 10 -C -C N0 LEFT N1->N2. Test for a possible swap. -C - 22 IF ( .NOT. LEFT(X(NL),Y(NL),Z(NL),X0,Y0,Z0,X2,Y2,Z2) ) - . GO TO 10 -C -C Swap NL-NR for N0-N2, shift columns IWF,...,IWL-1 to the -C right, and store N0-N2 in the left portion of IWK. -C - CALL SWAP (N2,N0,NL,NR, LIST,LPTR,LEND, LP21) - I = IWL - 23 IWK(1,I) = IWK(1,I-1) - IWK(2,I) = IWK(2,I-1) - I = I - 1 - IF (I .GT. IWF) GO TO 23 - IWK(1,IWF) = N0 - IWK(2,IWF) = N2 - IWF = IWF + 1 - GO TO 10 -C -C IWF = IWC = IWL. Swap out the last arc for N1-N2 and -C store zeros in IWK. -C - 24 CALL SWAP (N2,N1,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWC) = 0 - IWK(2,IWC) = 0 -C -C Optimization procedure -- -C - IER = 0 - IF (IWC .GT. 1) THEN -C -C Optimize the set of new arcs to the left of IN1->IN2. -C - NIT = 4*(IWC-1) - CALL OPTIM (X,Y,Z,IWC-1, LIST,LPTR,LEND,NIT, - . IWK, IERR) - IF (IERR .NE. 0 .AND. IERR .NE. 1) GO TO 34 - IF (IERR .EQ. 1) IER = 5 - ENDIF - IF (IWC .LT. IWEND) THEN -C -C Optimize the set of new arcs to the right of IN1->IN2. -C - NIT = 4*(IWEND-IWC) - CALL OPTIM (X,Y,Z,IWEND-IWC, LIST,LPTR,LEND,NIT, - . IWK(1,IWC+1), IERR) - IF (IERR .NE. 0 .AND. IERR .NE. 1) GO TO 34 - IF (IERR .EQ. 1) GO TO 35 - ENDIF - IF (IER .EQ. 5) GO TO 35 -C -C Successful termination (IER = 0). -C - RETURN -C -C IN1 and IN2 were adjacent on input. -C - 30 IER = 0 - RETURN -C -C Invalid input parameter. -C - 31 IER = 1 - RETURN -C -C Insufficient space reserved for IWK. -C - 32 IER = 2 - RETURN -C -C Invalid triangulation data structure or collinear nodes -C on convex hull boundary. -C - 33 IER = 3 - WRITE (*,130) IN1, IN2 - 130 FORMAT (//5X,'*** Error in EDGE: Invalid triangula', - . 'tion or null triangles on boundary'/ - . 9X,'IN1 =',I4,', IN2=',I4/) - RETURN -C -C Error flag (other than 1) returned by OPTIM. -C - 34 IER = 4 - WRITE (*,140) NIT, IERR - 140 FORMAT (//5X,'*** Error in OPTIM (called from EDGE):', - . ' NIT = ',I4,', IER = ',I1,' ***'/) - RETURN -C -C Error flag 1 returned by OPTIM. -C - 35 IER = 5 - RETURN - END - SUBROUTINE GETNP (X,Y,Z,LIST,LPTR,LEND,L, NPTS, DF, - . IER) - INTEGER LIST(*), LPTR(*), LEND(*), L, NPTS(L), IER - REAL X(*), Y(*), Z(*), DF -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/28/98 -C -C Given a Delaunay triangulation of N nodes on the unit -C sphere and an array NPTS containing the indexes of L-1 -C nodes ordered by angular distance from NPTS(1), this sub- -C routine sets NPTS(L) to the index of the next node in the -C sequence -- the node, other than NPTS(1),...,NPTS(L-1), -C that is closest to NPTS(1). Thus, the ordered sequence -C of K closest nodes to N1 (including N1) may be determined -C by K-1 calls to GETNP with NPTS(1) = N1 and L = 2,3,...,K -C for K .GE. 2. -C -C The algorithm uses the property of a Delaunay triangula- -C tion that the K-th closest node to N1 is a neighbor of one -C of the K-1 closest nodes to N1. -C -C -C On input: -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. -C -C LIST,LPTR,LEND = Triangulation data structure. Re- -C fer to Subroutine TRMESH. -C -C L = Number of nodes in the sequence on output. 2 -C .LE. L .LE. N. -C -C The above parameters are not altered by this routine. -C -C NPTS = Array of length .GE. L containing the indexes -C of the L-1 closest nodes to NPTS(1) in the -C first L-1 locations. -C -C On output: -C -C NPTS = Array updated with the index of the L-th -C closest node to NPTS(1) in position L unless -C IER = 1. -C -C DF = Value of an increasing function (negative cos- -C ine) of the angular distance between NPTS(1) -C and NPTS(L) unless IER = 1. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if L < 2. -C -C Modules required by GETNP: None -C -C Intrinsic function called by GETNP: ABS -C -C*********************************************************** -C - INTEGER I, LM1, LP, LPL, N1, NB, NI, NP - REAL DNB, DNP, X1, Y1, Z1 -C -C Local parameters: -C -C DNB,DNP = Negative cosines of the angular distances from -C N1 to NB and to NP, respectively -C I = NPTS index and DO-loop index -C LM1 = L-1 -C LP = LIST pointer of a neighbor of NI -C LPL = Pointer to the last neighbor of NI -C N1 = NPTS(1) -C NB = Neighbor of NI and candidate for NP -C NI = NPTS(I) -C NP = Candidate for NPTS(L) -C X1,Y1,Z1 = Coordinates of N1 -C - LM1 = L - 1 - IF (LM1 .LT. 1) GO TO 6 - IER = 0 -C -C Store N1 = NPTS(1) and mark the elements of NPTS. -C - N1 = NPTS(1) - X1 = X(N1) - Y1 = Y(N1) - Z1 = Z(N1) - DO 1 I = 1,LM1 - NI = NPTS(I) - LEND(NI) = -LEND(NI) - 1 CONTINUE -C -C Candidates for NP = NPTS(L) are the unmarked neighbors -C of nodes in NPTS. DNP is initially greater than -cos(PI) -C (the maximum distance). -C - DNP = 2. -C -C Loop on nodes NI in NPTS. -C - DO 4 I = 1,LM1 - NI = NPTS(I) - LPL = -LEND(NI) - LP = LPL -C -C Loop on neighbors NB of NI. -C - 2 NB = ABS(LIST(LP)) - IF (LEND(NB) .LT. 0) GO TO 3 -C -C NB is an unmarked neighbor of NI. Replace NP if NB is -C closer to N1. -C - DNB = -(X(NB)*X1 + Y(NB)*Y1 + Z(NB)*Z1) - IF (DNB .GE. DNP) GO TO 3 - NP = NB - DNP = DNB - 3 LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 2 - 4 CONTINUE - NPTS(L) = NP - DF = DNP -C -C Unmark the elements of NPTS. -C - DO 5 I = 1,LM1 - NI = NPTS(I) - LEND(NI) = -LEND(NI) - 5 CONTINUE - RETURN -C -C L is outside its valid range. -C - 6 IER = 1 - RETURN - END - SUBROUTINE INSERT (K,LP, LIST,LPTR,LNEW ) - INTEGER K, LP, LIST(*), LPTR(*), LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine inserts K as a neighbor of N1 following -C N2, where LP is the LIST pointer of N2 as a neighbor of -C N1. Note that, if N2 is the last neighbor of N1, K will -C become the first neighbor (even if N1 is a boundary node). -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C K = Index of the node to be inserted. -C -C LP = LIST pointer of N2 as a neighbor of N1. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LNEW = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C On output: -C -C LIST,LPTR,LNEW = Data structure updated with the -C addition of node K. -C -C Modules required by INSERT: None -C -C*********************************************************** -C - INTEGER LSAV -C - LSAV = LPTR(LP) - LPTR(LP) = LNEW - LIST(LNEW) = K - LPTR(LNEW) = LSAV - LNEW = LNEW + 1 - RETURN - END - LOGICAL FUNCTION INSIDE (P,LV,XV,YV,ZV,NV,LISTV, IER) - INTEGER LV, NV, LISTV(NV), IER - REAL P(3), XV(LV), YV(LV), ZV(LV) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 12/27/93 -C -C This function locates a point P relative to a polygonal -C region R on the surface of the unit sphere, returning -C INSIDE = TRUE if and only if P is contained in R. R is -C defined by a cyclically ordered sequence of vertices which -C form a positively-oriented simple closed curve. Adjacent -C vertices need not be distinct but the curve must not be -C self-intersecting. Also, while polygon edges are by defi- -C nition restricted to a single hemisphere, R is not so -C restricted. Its interior is the region to the left as the -C vertices are traversed in order. -C -C The algorithm consists of selecting a point Q in R and -C then finding all points at which the great circle defined -C by P and Q intersects the boundary of R. P lies inside R -C if and only if there is an even number of intersection -C points between Q and P. Q is taken to be a point immedi- -C ately to the left of a directed boundary edge -- the first -C one that results in no consistency-check failures. -C -C If P is close to the polygon boundary, the problem is -C ill-conditioned and the decision may be incorrect. Also, -C an incorrect decision may result from a poor choice of Q -C (if, for example, a boundary edge lies on the great cir- -C cle defined by P and Q). A more reliable result could be -C obtained by a sequence of calls to INSIDE with the ver- -C tices cyclically permuted before each call (to alter the -C choice of Q). -C -C -C On input: -C -C P = Array of length 3 containing the Cartesian -C coordinates of the point (unit vector) to be -C located. -C -C LV = Length of arrays XV, YV, and ZV. -C -C XV,YV,ZV = Arrays of length LV containing the Carte- -C sian coordinates of unit vectors (points -C on the unit sphere). These values are -C not tested for validity. -C -C NV = Number of vertices in the polygon. 3 .LE. NV -C .LE. LV. -C -C LISTV = Array of length NV containing the indexes -C (for XV, YV, and ZV) of a cyclically-ordered -C (and CCW-ordered) sequence of vertices that -C define R. The last vertex (indexed by -C LISTV(NV)) is followed by the first (indexed -C by LISTV(1)). LISTV entries must be in the -C range 1 to LV. -C -C Input parameters are not altered by this function. -C -C On output: -C -C INSIDE = TRUE if and only if P lies inside R unless -C IER .NE. 0, in which case the value is not -C altered. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if LV or NV is outside its valid -C range. -C IER = 2 if a LISTV entry is outside its valid -C range. -C IER = 3 if the polygon boundary was found to -C be self-intersecting. This error will -C not necessarily be detected. -C IER = 4 if every choice of Q (one for each -C boundary edge) led to failure of some -C internal consistency check. The most -C likely cause of this error is invalid -C input: P = (0,0,0), a null or self- -C intersecting polygon, etc. -C -C Module required by INSIDE: INTRSC -C -C Intrinsic function called by INSIDE: SQRT -C -C*********************************************************** -C - INTEGER I1, I2, IERR, IMX, K, K0, N, NI - LOGICAL EVEN, LFT1, LFT2, PINR, QINR - REAL B(3), BP, BQ, CN(3), D, EPS, PN(3), Q(3), - . QN(3), QNRM, V1(3), V2(3), VN(3), VNRM -C -C Local parameters: -C -C B = Intersection point between the boundary and -C the great circle defined by P and Q -C BP,BQ = and , respectively, maximized over -C intersection points B that lie between P and -C Q (on the shorter arc) -- used to find the -C closest intersection points to P and Q -C CN = Q X P = normal to the plane of P and Q -C D = Dot product or -C EPS = Parameter used to define Q as the point whose -C orthogonal distance to (the midpoint of) -C boundary edge V1->V2 is approximately EPS/ -C (2*Cos(A/2)), where = Cos(A). -C EVEN = TRUE iff an even number of intersection points -C lie between P and Q (on the shorter arc) -C I1,I2 = Indexes (LISTV elements) of a pair of adjacent -C boundary vertices (endpoints of a boundary -C edge) -C IERR = Error flag for calls to INTRSC (not tested) -C IMX = Local copy of LV and maximum value of I1 and -C I2 -C K = DO-loop index and LISTV index -C K0 = LISTV index of the first endpoint of the -C boundary edge used to compute Q -C LFT1,LFT2 = Logical variables associated with I1 and I2 in -C the boundary traversal: TRUE iff the vertex -C is strictly to the left of Q->P ( > 0) -C N = Local copy of NV -C NI = Number of intersections (between the boundary -C curve and the great circle P-Q) encountered -C PINR = TRUE iff P is to the left of the directed -C boundary edge associated with the closest -C intersection point to P that lies between P -C and Q (a left-to-right intersection as -C viewed from Q), or there is no intersection -C between P and Q (on the shorter arc) -C PN,QN = P X CN and CN X Q, respectively: used to -C locate intersections B relative to arc Q->P -C Q = (V1 + V2 + EPS*VN/VNRM)/QNRM, where V1->V2 is -C the boundary edge indexed by LISTV(K0) -> -C LISTV(K0+1) -C QINR = TRUE iff Q is to the left of the directed -C boundary edge associated with the closest -C intersection point to Q that lies between P -C and Q (a right-to-left intersection as -C viewed from Q), or there is no intersection -C between P and Q (on the shorter arc) -C QNRM = Euclidean norm of V1+V2+EPS*VN/VNRM used to -C compute (normalize) Q -C V1,V2 = Vertices indexed by I1 and I2 in the boundary -C traversal -C VN = V1 X V2, where V1->V2 is the boundary edge -C indexed by LISTV(K0) -> LISTV(K0+1) -C VNRM = Euclidean norm of VN -C - DATA EPS/1.E-3/ -C -C Store local parameters, test for error 1, and initialize -C K0. -C - IMX = LV - N = NV - IF (N .LT. 3 .OR. N .GT. IMX) GO TO 11 - K0 = 0 - I1 = LISTV(1) - IF (I1 .LT. 1 .OR. I1 .GT. IMX) GO TO 12 -C -C Increment K0 and set Q to a point immediately to the left -C of the midpoint of edge V1->V2 = LISTV(K0)->LISTV(K0+1): -C Q = (V1 + V2 + EPS*VN/VNRM)/QNRM, where VN = V1 X V2. -C - 1 K0 = K0 + 1 - IF (K0 .GT. N) GO TO 14 - I1 = LISTV(K0) - IF (K0 .LT. N) THEN - I2 = LISTV(K0+1) - ELSE - I2 = LISTV(1) - ENDIF - IF (I2 .LT. 1 .OR. I2 .GT. IMX) GO TO 12 - VN(1) = YV(I1)*ZV(I2) - ZV(I1)*YV(I2) - VN(2) = ZV(I1)*XV(I2) - XV(I1)*ZV(I2) - VN(3) = XV(I1)*YV(I2) - YV(I1)*XV(I2) - VNRM = SQRT(VN(1)*VN(1) + VN(2)*VN(2) + VN(3)*VN(3)) - IF (VNRM .EQ. 0.) GO TO 1 - Q(1) = XV(I1) + XV(I2) + EPS*VN(1)/VNRM - Q(2) = YV(I1) + YV(I2) + EPS*VN(2)/VNRM - Q(3) = ZV(I1) + ZV(I2) + EPS*VN(3)/VNRM - QNRM = SQRT(Q(1)*Q(1) + Q(2)*Q(2) + Q(3)*Q(3)) - Q(1) = Q(1)/QNRM - Q(2) = Q(2)/QNRM - Q(3) = Q(3)/QNRM -C -C Compute CN = Q X P, PN = P X CN, and QN = CN X Q. -C - CN(1) = Q(2)*P(3) - Q(3)*P(2) - CN(2) = Q(3)*P(1) - Q(1)*P(3) - CN(3) = Q(1)*P(2) - Q(2)*P(1) - IF (CN(1) .EQ. 0. .AND. CN(2) .EQ. 0. .AND. - . CN(3) .EQ. 0.) GO TO 1 - PN(1) = P(2)*CN(3) - P(3)*CN(2) - PN(2) = P(3)*CN(1) - P(1)*CN(3) - PN(3) = P(1)*CN(2) - P(2)*CN(1) - QN(1) = CN(2)*Q(3) - CN(3)*Q(2) - QN(2) = CN(3)*Q(1) - CN(1)*Q(3) - QN(3) = CN(1)*Q(2) - CN(2)*Q(1) -C -C Initialize parameters for the boundary traversal. -C - NI = 0 - EVEN = .TRUE. - BP = -2. - BQ = -2. - PINR = .TRUE. - QINR = .TRUE. - I2 = LISTV(N) - IF (I2 .LT. 1 .OR. I2 .GT. IMX) GO TO 12 - LFT2 = CN(1)*XV(I2) + CN(2)*YV(I2) + - . CN(3)*ZV(I2) .GT. 0. -C -C Loop on boundary arcs I1->I2. -C - DO 2 K = 1,N - I1 = I2 - LFT1 = LFT2 - I2 = LISTV(K) - IF (I2 .LT. 1 .OR. I2 .GT. IMX) GO TO 12 - LFT2 = CN(1)*XV(I2) + CN(2)*YV(I2) + - . CN(3)*ZV(I2) .GT. 0. - IF (LFT1 .EQV. LFT2) GO TO 2 -C -C I1 and I2 are on opposite sides of Q->P. Compute the -C point of intersection B. -C - NI = NI + 1 - V1(1) = XV(I1) - V1(2) = YV(I1) - V1(3) = ZV(I1) - V2(1) = XV(I2) - V2(2) = YV(I2) - V2(3) = ZV(I2) - CALL INTRSC (V1,V2,CN, B,IERR) -C -C B is between Q and P (on the shorter arc) iff -C B Forward Q->P and B Forward P->Q iff -C > 0 and > 0. -C - IF (B(1)*QN(1) + B(2)*QN(2) + B(3)*QN(3) .GT. 0. - . .AND. - . B(1)*PN(1) + B(2)*PN(2) + B(3)*PN(3) .GT. 0.) - . THEN -C -C Update EVEN, BQ, QINR, BP, and PINR. -C - EVEN = .NOT. EVEN - D = B(1)*Q(1) + B(2)*Q(2) + B(3)*Q(3) - IF (D .GT. BQ) THEN - BQ = D - QINR = LFT2 - ENDIF - D = B(1)*P(1) + B(2)*P(2) + B(3)*P(3) - IF (D .GT. BP) THEN - BP = D - PINR = LFT1 - ENDIF - ENDIF - 2 CONTINUE -C -C Test for consistency: NI must be even and QINR must be -C TRUE. -C - IF (NI .NE. 2*(NI/2) .OR. .NOT. QINR) GO TO 1 -C -C Test for error 3: different values of PINR and EVEN. -C - IF (PINR .NEQV. EVEN) GO TO 13 -C -C No error encountered. -C - IER = 0 - INSIDE = EVEN - RETURN -C -C LV or NV is outside its valid range. -C - 11 IER = 1 - RETURN -C -C A LISTV entry is outside its valid range. -C - 12 IER = 2 - RETURN -C -C The polygon boundary is self-intersecting. -C - 13 IER = 3 - RETURN -C -C Consistency tests failed for all values of Q. -C - 14 IER = 4 - RETURN - END - SUBROUTINE INTADD (KK,I1,I2,I3, LIST,LPTR,LEND,LNEW ) - INTEGER KK, I1, I2, I3, LIST(*), LPTR(*), LEND(*), - . LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine adds an interior node to a triangulation -C of a set of points on the unit sphere. The data structure -C is updated with the insertion of node KK into the triangle -C whose vertices are I1, I2, and I3. No optimization of the -C triangulation is performed. -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C KK = Index of the node to be inserted. KK .GE. 1 -C and KK must not be equal to I1, I2, or I3. -C -C I1,I2,I3 = Indexes of the counterclockwise-ordered -C sequence of vertices of a triangle which -C contains node KK. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Data structure defining the -C triangulation. Refer to Sub- -C routine TRMESH. Triangle -C (I1,I2,I3) must be included -C in the triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node KK. KK -C will be connected to nodes I1, -C I2, and I3. -C -C Modules required by INTADD: INSERT, LSTPTR -C -C*********************************************************** -C - INTEGER LSTPTR - INTEGER K, LP, N1, N2, N3 -C -C Local parameters: -C -C K = Local copy of KK -C LP = LIST pointer -C N1,N2,N3 = Local copies of I1, I2, and I3 -C - K = KK -C -C Initialization. -C - N1 = I1 - N2 = I2 - N3 = I3 -C -C Add K as a neighbor of I1, I2, and I3. -C - LP = LSTPTR(LEND(N1),N2,LIST,LPTR) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - LP = LSTPTR(LEND(N2),N3,LIST,LPTR) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - LP = LSTPTR(LEND(N3),N1,LIST,LPTR) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) -C -C Add I1, I2, and I3 as neighbors of K. -C - LIST(LNEW) = N1 - LIST(LNEW+1) = N2 - LIST(LNEW+2) = N3 - LPTR(LNEW) = LNEW + 1 - LPTR(LNEW+1) = LNEW + 2 - LPTR(LNEW+2) = LNEW - LEND(K) = LNEW + 2 - LNEW = LNEW + 3 - RETURN - END - SUBROUTINE INTRSC (P1,P2,CN, P,IER) - INTEGER IER - REAL P1(3), P2(3), CN(3), P(3) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/19/90 -C -C Given a great circle C and points P1 and P2 defining an -C arc A on the surface of the unit sphere, where A is the -C shorter of the two portions of the great circle C12 assoc- -C iated with P1 and P2, this subroutine returns the point -C of intersection P between C and C12 that is closer to A. -C Thus, if P1 and P2 lie in opposite hemispheres defined by -C C, P is the point of intersection of C with A. -C -C -C On input: -C -C P1,P2 = Arrays of length 3 containing the Cartesian -C coordinates of unit vectors. -C -C CN = Array of length 3 containing the Cartesian -C coordinates of a nonzero vector which defines C -C as the intersection of the plane whose normal -C is CN with the unit sphere. Thus, if C is to -C be the great circle defined by P and Q, CN -C should be P X Q. -C -C The above parameters are not altered by this routine. -C -C P = Array of length 3. -C -C On output: -C -C P = Point of intersection defined above unless IER -C .NE. 0, in which case P is not altered. -C -C IER = Error indicator. -C IER = 0 if no errors were encountered. -C IER = 1 if = . This occurs -C iff P1 = P2 or CN = 0 or there are -C two intersection points at the same -C distance from A. -C IER = 2 if P2 = -P1 and the definition of A is -C therefore ambiguous. -C -C Modules required by INTRSC: None -C -C Intrinsic function called by INTRSC: SQRT -C -C*********************************************************** -C - INTEGER I - REAL D1, D2, PP(3), PPN, T -C -C Local parameters: -C -C D1 = -C D2 = -C I = DO-loop index -C PP = P1 + T*(P2-P1) = Parametric representation of the -C line defined by P1 and P2 -C PPN = Norm of PP -C T = D1/(D1-D2) = Parameter value chosen so that PP lies -C in the plane of C -C - D1 = CN(1)*P1(1) + CN(2)*P1(2) + CN(3)*P1(3) - D2 = CN(1)*P2(1) + CN(2)*P2(2) + CN(3)*P2(3) -C - IF (D1 .EQ. D2) THEN - IER = 1 - RETURN - ENDIF -C -C Solve for T such that = 0 and compute PP and PPN. -C - T = D1/(D1-D2) - PPN = 0. - DO 1 I = 1,3 - PP(I) = P1(I) + T*(P2(I)-P1(I)) - PPN = PPN + PP(I)*PP(I) - 1 CONTINUE -C -C PPN = 0 iff PP = 0 iff P2 = -P1 (and T = .5). -C - IF (PPN .EQ. 0.) THEN - IER = 2 - RETURN - ENDIF - PPN = SQRT(PPN) -C -C Compute P = PP/PPN. -C - DO 2 I = 1,3 - P(I) = PP(I)/PPN - 2 CONTINUE - IER = 0 - RETURN - END - INTEGER FUNCTION JRAND (N, IX,IY,IZ ) - INTEGER N, IX, IY, IZ -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/28/98 -C -C This function returns a uniformly distributed pseudo- -C random integer in the range 1 to N. -C -C -C On input: -C -C N = Maximum value to be returned. -C -C N is not altered by this function. -C -C IX,IY,IZ = Integer seeds initialized to values in -C the range 1 to 30,000 before the first -C call to JRAND, and not altered between -C subsequent calls (unless a sequence of -C random numbers is to be repeated by -C reinitializing the seeds). -C -C On output: -C -C IX,IY,IZ = Updated integer seeds. -C -C JRAND = Random integer in the range 1 to N. -C -C Reference: B. A. Wichmann and I. D. Hill, "An Efficient -C and Portable Pseudo-random Number Generator", -C Applied Statistics, Vol. 31, No. 2, 1982, -C pp. 188-190. -C -C Modules required by JRAND: None -C -C Intrinsic functions called by JRAND: INT, MOD, REAL -C -C*********************************************************** -C - REAL U, X -C -C Local parameters: -C -C U = Pseudo-random number uniformly distributed in the -C interval (0,1). -C X = Pseudo-random number in the range 0 to 3 whose frac- -C tional part is U. -C - IX = MOD(171*IX,30269) - IY = MOD(172*IY,30307) - IZ = MOD(170*IZ,30323) - X = (REAL(IX)/30269.) + (REAL(IY)/30307.) + - . (REAL(IZ)/30323.) - U = X - INT(X) - JRAND = REAL(N)*U + 1. - RETURN - END - LOGICAL FUNCTION LEFT (X1,Y1,Z1,X2,Y2,Z2,X0,Y0,Z0) - REAL X1, Y1, Z1, X2, Y2, Z2, X0, Y0, Z0 -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/15/96 -C -C This function determines whether node N0 is in the -C (closed) left hemisphere defined by the plane containing -C N1, N2, and the origin, where left is defined relative to -C an observer at N1 facing N2. -C -C -C On input: -C -C X1,Y1,Z1 = Coordinates of N1. -C -C X2,Y2,Z2 = Coordinates of N2. -C -C X0,Y0,Z0 = Coordinates of N0. -C -C Input parameters are not altered by this function. -C -C On output: -C -C LEFT = TRUE if and only if N0 is in the closed -C left hemisphere. -C -C Modules required by LEFT: None -C -C*********************************************************** -C -C LEFT = TRUE iff = det(N0,N1,N2) .GE. 0. -C - LEFT = X0*(Y1*Z2-Y2*Z1) - Y0*(X1*Z2-X2*Z1) + - . Z0*(X1*Y2-X2*Y1) .GE. 0. - RETURN - END - INTEGER FUNCTION LSTPTR (LPL,NB,LIST,LPTR) - INTEGER LPL, NB, LIST(*), LPTR(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/15/96 -C -C This function returns the index (LIST pointer) of NB in -C the adjacency list for N0, where LPL = LEND(N0). -C -C This function is identical to the similarly named -C function in TRIPACK. -C -C -C On input: -C -C LPL = LEND(N0) -C -C NB = Index of the node whose pointer is to be re- -C turned. NB must be connected to N0. -C -C LIST,LPTR = Data structure defining the triangula- -C tion. Refer to Subroutine TRMESH. -C -C Input parameters are not altered by this function. -C -C On output: -C -C LSTPTR = Pointer such that LIST(LSTPTR) = NB or -C LIST(LSTPTR) = -NB, unless NB is not a -C neighbor of N0, in which case LSTPTR = LPL. -C -C Modules required by LSTPTR: None -C -C*********************************************************** -C - INTEGER LP, ND -C -C Local parameters: -C -C LP = LIST pointer -C ND = Nodal index -C - LP = LPTR(LPL) - 1 ND = LIST(LP) - IF (ND .EQ. NB) GO TO 2 - LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 1 -C - 2 LSTPTR = LP - RETURN - END - INTEGER FUNCTION NBCNT (LPL,LPTR) - INTEGER LPL, LPTR(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/15/96 -C -C This function returns the number of neighbors of a node -C N0 in a triangulation created by Subroutine TRMESH. -C -C This function is identical to the similarly named -C function in TRIPACK. -C -C -C On input: -C -C LPL = LIST pointer to the last neighbor of N0 -- -C LPL = LEND(N0). -C -C LPTR = Array of pointers associated with LIST. -C -C Input parameters are not altered by this function. -C -C On output: -C -C NBCNT = Number of neighbors of N0. -C -C Modules required by NBCNT: None -C -C*********************************************************** -C - INTEGER K, LP -C -C Local parameters: -C -C K = Counter for computing the number of neighbors -C LP = LIST pointer -C - LP = LPL - K = 1 -C - 1 LP = LPTR(LP) - IF (LP .EQ. LPL) GO TO 2 - K = K + 1 - GO TO 1 -C - 2 NBCNT = K - RETURN - END - INTEGER FUNCTION NEARND (P,IST,N,X,Y,Z,LIST,LPTR, - . LEND, AL) - INTEGER IST, N, LIST(*), LPTR(*), LEND(N) - REAL P(3), X(N), Y(N), Z(N), AL -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/28/98 -C -C Given a point P on the surface of the unit sphere and a -C Delaunay triangulation created by Subroutine TRMESH, this -C function returns the index of the nearest triangulation -C node to P. -C -C The algorithm consists of implicitly adding P to the -C triangulation, finding the nearest neighbor to P, and -C implicitly deleting P from the triangulation. Thus, it -C is based on the fact that, if P is a node in a Delaunay -C triangulation, the nearest node to P is a neighbor of P. -C -C -C On input: -C -C P = Array of length 3 containing the Cartesian coor- -C dinates of the point P to be located relative to -C the triangulation. It is assumed without a test -C that P(1)**2 + P(2)**2 + P(3)**2 = 1. -C -C IST = Index of a node at which TRFIND begins the -C search. Search time depends on the proximity -C of this node to P. -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to TRMESH. -C -C Input parameters are not altered by this function. -C -C On output: -C -C NEARND = Nodal index of the nearest node to P, or 0 -C if N < 3 or the triangulation data struc- -C ture is invalid. -C -C AL = Arc length (angular distance in radians) be- -C tween P and NEARND unless NEARND = 0. -C -C Note that the number of candidates for NEARND -C (neighbors of P) is limited to LMAX defined in -C the PARAMETER statement below. -C -C Modules required by NEARND: JRAND, LSTPTR, TRFIND, STORE -C -C Intrinsic functions called by NEARND: ABS, ACOS -C -C*********************************************************** -C - INTEGER LSTPTR - INTEGER LMAX - PARAMETER (LMAX=25) - INTEGER I1, I2, I3, L, LISTP(LMAX), LP, LP1, LP2, - . LPL, LPTRP(LMAX), N1, N2, N3, NN, NR, NST - REAL B1, B2, B3, DS1, DSR, DX1, DX2, DX3, DY1, - . DY2, DY3, DZ1, DZ2, DZ3 -C -C Local parameters: -C -C B1,B2,B3 = Unnormalized barycentric coordinates returned -C by TRFIND -C DS1 = (Negative cosine of the) distance from P to N1 -C DSR = (Negative cosine of the) distance from P to NR -C DX1,..DZ3 = Components of vectors used by the swap test -C I1,I2,I3 = Nodal indexes of a triangle containing P, or -C the rightmost (I1) and leftmost (I2) visible -C boundary nodes as viewed from P -C L = Length of LISTP/LPTRP and number of neighbors -C of P -C LMAX = Maximum value of L -C LISTP = Indexes of the neighbors of P -C LPTRP = Array of pointers in 1-1 correspondence with -C LISTP elements -C LP = LIST pointer to a neighbor of N1 and LISTP -C pointer -C LP1,LP2 = LISTP indexes (pointers) -C LPL = Pointer to the last neighbor of N1 -C N1 = Index of a node visible from P -C N2 = Index of an endpoint of an arc opposite P -C N3 = Index of the node opposite N1->N2 -C NN = Local copy of N -C NR = Index of a candidate for the nearest node to P -C NST = Index of the node at which TRFIND begins the -C search -C -C -C Store local parameters and test for N invalid. -C - NN = N - IF (NN .LT. 3) GO TO 6 - NST = IST - IF (NST .LT. 1 .OR. NST .GT. NN) NST = 1 -C -C Find a triangle (I1,I2,I3) containing P, or the rightmost -C (I1) and leftmost (I2) visible boundary nodes as viewed -C from P. -C - CALL TRFIND (NST,P,N,X,Y,Z,LIST,LPTR,LEND, B1,B2,B3, - . I1,I2,I3) -C -C Test for collinear nodes. -C - IF (I1 .EQ. 0) GO TO 6 -C -C Store the linked list of 'neighbors' of P in LISTP and -C LPTRP. I1 is the first neighbor, and 0 is stored as -C the last neighbor if P is not contained in a triangle. -C L is the length of LISTP and LPTRP, and is limited to -C LMAX. -C - IF (I3 .NE. 0) THEN - LISTP(1) = I1 - LPTRP(1) = 2 - LISTP(2) = I2 - LPTRP(2) = 3 - LISTP(3) = I3 - LPTRP(3) = 1 - L = 3 - ELSE - N1 = I1 - L = 1 - LP1 = 2 - LISTP(L) = N1 - LPTRP(L) = LP1 -C -C Loop on the ordered sequence of visible boundary nodes -C N1 from I1 to I2. -C - 1 LPL = LEND(N1) - N1 = -LIST(LPL) - L = LP1 - LP1 = L+1 - LISTP(L) = N1 - LPTRP(L) = LP1 - IF (N1 .NE. I2 .AND. LP1 .LT. LMAX) GO TO 1 - L = LP1 - LISTP(L) = 0 - LPTRP(L) = 1 - ENDIF -C -C Initialize variables for a loop on arcs N1-N2 opposite P -C in which new 'neighbors' are 'swapped' in. N1 follows -C N2 as a neighbor of P, and LP1 and LP2 are the LISTP -C indexes of N1 and N2. -C - LP2 = 1 - N2 = I1 - LP1 = LPTRP(1) - N1 = LISTP(LP1) -C -C Begin loop: find the node N3 opposite N1->N2. -C - 2 LP = LSTPTR(LEND(N1),N2,LIST,LPTR) - IF (LIST(LP) .LT. 0) GO TO 3 - LP = LPTR(LP) - N3 = ABS(LIST(LP)) -C -C Swap test: Exit the loop if L = LMAX. -C - IF (L .EQ. LMAX) GO TO 4 - DX1 = X(N1) - P(1) - DY1 = Y(N1) - P(2) - DZ1 = Z(N1) - P(3) -C - DX2 = X(N2) - P(1) - DY2 = Y(N2) - P(2) - DZ2 = Z(N2) - P(3) -C - DX3 = X(N3) - P(1) - DY3 = Y(N3) - P(2) - DZ3 = Z(N3) - P(3) - IF ( DX3*(DY2*DZ1 - DY1*DZ2) - - . DY3*(DX2*DZ1 - DX1*DZ2) + - . DZ3*(DX2*DY1 - DX1*DY2) .LE. 0. ) GO TO 3 -C -C Swap: Insert N3 following N2 in the adjacency list for P. -C The two new arcs opposite P must be tested. -C - L = L+1 - LPTRP(LP2) = L - LISTP(L) = N3 - LPTRP(L) = LP1 - LP1 = L - N1 = N3 - GO TO 2 -C -C No swap: Advance to the next arc and test for termination -C on N1 = I1 (LP1 = 1) or N1 followed by 0. -C - 3 IF (LP1 .EQ. 1) GO TO 4 - LP2 = LP1 - N2 = N1 - LP1 = LPTRP(LP1) - N1 = LISTP(LP1) - IF (N1 .EQ. 0) GO TO 4 - GO TO 2 -C -C Set NR and DSR to the index of the nearest node to P and -C an increasing function (negative cosine) of its distance -C from P, respectively. -C - 4 NR = I1 - DSR = -(X(NR)*P(1) + Y(NR)*P(2) + Z(NR)*P(3)) - DO 5 LP = 2,L - N1 = LISTP(LP) - IF (N1 .EQ. 0) GO TO 5 - DS1 = -(X(N1)*P(1) + Y(N1)*P(2) + Z(N1)*P(3)) - IF (DS1 .LT. DSR) THEN - NR = N1 - DSR = DS1 - ENDIF - 5 CONTINUE - DSR = -DSR - IF (DSR .GT. 1.0) DSR = 1.0 - AL = ACOS(DSR) - NEARND = NR - RETURN -C -C Invalid input. -C - 6 NEARND = 0 - RETURN - END - SUBROUTINE OPTIM (X,Y,Z,NA, LIST,LPTR,LEND,NIT, - . IWK, IER) - INTEGER NA, LIST(*), LPTR(*), LEND(*), NIT, IWK(2,NA), - . IER - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/30/98 -C -C Given a set of NA triangulation arcs, this subroutine -C optimizes the portion of the triangulation consisting of -C the quadrilaterals (pairs of adjacent triangles) which -C have the arcs as diagonals by applying the circumcircle -C test and appropriate swaps to the arcs. -C -C An iteration consists of applying the swap test and -C swaps to all NA arcs in the order in which they are -C stored. The iteration is repeated until no swap occurs -C or NIT iterations have been performed. The bound on the -C number of iterations may be necessary to prevent an -C infinite loop caused by cycling (reversing the effect of a -C previous swap) due to floating point inaccuracy when four -C or more nodes are nearly cocircular. -C -C -C On input: -C -C X,Y,Z = Arrays containing the nodal coordinates. -C -C NA = Number of arcs in the set. NA .GE. 0. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C NIT = Maximum number of iterations to be performed. -C NIT = 4*NA should be sufficient. NIT .GE. 1. -C -C IWK = Integer array dimensioned 2 by NA containing -C the nodal indexes of the arc endpoints (pairs -C of endpoints are stored in columns). -C -C On output: -C -C LIST,LPTR,LEND = Updated triangulation data struc- -C ture reflecting the swaps. -C -C NIT = Number of iterations performed. -C -C IWK = Endpoint indexes of the new set of arcs -C reflecting the swaps. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if a swap occurred on the last of -C MAXIT iterations, where MAXIT is the -C value of NIT on input. The new set -C of arcs is not necessarily optimal -C in this case. -C IER = 2 if NA < 0 or NIT < 1 on input. -C IER = 3 if IWK(2,I) is not a neighbor of -C IWK(1,I) for some I in the range 1 -C to NA. A swap may have occurred in -C this case. -C IER = 4 if a zero pointer was returned by -C Subroutine SWAP. -C -C Modules required by OPTIM: LSTPTR, SWAP, SWPTST -C -C Intrinsic function called by OPTIM: ABS -C -C*********************************************************** -C - INTEGER I, IO1, IO2, ITER, LP, LP21, LPL, LPP, MAXIT, - . N1, N2, NNA - LOGICAL SWPTST - LOGICAL SWP -C -C Local parameters: -C -C I = Column index for IWK -C IO1,IO2 = Nodal indexes of the endpoints of an arc in IWK -C ITER = Iteration count -C LP = LIST pointer -C LP21 = Parameter returned by SWAP (not used) -C LPL = Pointer to the last neighbor of IO1 -C LPP = Pointer to the node preceding IO2 as a neighbor -C of IO1 -C MAXIT = Input value of NIT -C N1,N2 = Nodes opposite IO1->IO2 and IO2->IO1, -C respectively -C NNA = Local copy of NA -C SWP = Flag set to TRUE iff a swap occurs in the -C optimization loop -C - NNA = NA - MAXIT = NIT - IF (NNA .LT. 0 .OR. MAXIT .LT. 1) GO TO 7 -C -C Initialize iteration count ITER and test for NA = 0. -C - ITER = 0 - IF (NNA .EQ. 0) GO TO 5 -C -C Top of loop -- -C SWP = TRUE iff a swap occurred in the current iteration. -C - 1 IF (ITER .EQ. MAXIT) GO TO 6 - ITER = ITER + 1 - SWP = .FALSE. -C -C Inner loop on arcs IO1-IO2 -- -C - DO 4 I = 1,NNA - IO1 = IWK(1,I) - IO2 = IWK(2,I) -C -C Set N1 and N2 to the nodes opposite IO1->IO2 and -C IO2->IO1, respectively. Determine the following: -C -C LPL = pointer to the last neighbor of IO1, -C LP = pointer to IO2 as a neighbor of IO1, and -C LPP = pointer to the node N2 preceding IO2. -C - LPL = LEND(IO1) - LPP = LPL - LP = LPTR(LPP) - 2 IF (LIST(LP) .EQ. IO2) GO TO 3 - LPP = LP - LP = LPTR(LPP) - IF (LP .NE. LPL) GO TO 2 -C -C IO2 should be the last neighbor of IO1. Test for no -C arc and bypass the swap test if IO1 is a boundary -C node. -C - IF (ABS(LIST(LP)) .NE. IO2) GO TO 8 - IF (LIST(LP) .LT. 0) GO TO 4 -C -C Store N1 and N2, or bypass the swap test if IO1 is a -C boundary node and IO2 is its first neighbor. -C - 3 N2 = LIST(LPP) - IF (N2 .LT. 0) GO TO 4 - LP = LPTR(LP) - N1 = ABS(LIST(LP)) -C -C Test IO1-IO2 for a swap, and update IWK if necessary. -C - IF ( .NOT. SWPTST(N1,N2,IO1,IO2,X,Y,Z) ) GO TO 4 - CALL SWAP (N1,N2,IO1,IO2, LIST,LPTR,LEND, LP21) - IF (LP21 .EQ. 0) GO TO 9 - SWP = .TRUE. - IWK(1,I) = N1 - IWK(2,I) = N2 - 4 CONTINUE - IF (SWP) GO TO 1 -C -C Successful termination. -C - 5 NIT = ITER - IER = 0 - RETURN -C -C MAXIT iterations performed without convergence. -C - 6 NIT = MAXIT - IER = 1 - RETURN -C -C Invalid input parameter. -C - 7 NIT = 0 - IER = 2 - RETURN -C -C IO2 is not a neighbor of IO1. -C - 8 NIT = ITER - IER = 3 - RETURN -C -C Zero pointer returned by SWAP. -C - 9 NIT = ITER - IER = 4 - RETURN - END - SUBROUTINE SCOORD (PX,PY,PZ, PLAT,PLON,PNRM) - REAL PX, PY, PZ, PLAT, PLON, PNRM -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 08/27/90 -C -C This subroutine converts a point P from Cartesian coor- -C dinates to spherical coordinates. -C -C -C On input: -C -C PX,PY,PZ = Cartesian coordinates of P. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C PLAT = Latitude of P in the range -PI/2 to PI/2, or -C 0 if PNRM = 0. PLAT should be scaled by -C 180/PI to obtain the value in degrees. -C -C PLON = Longitude of P in the range -PI to PI, or 0 -C if P lies on the Z-axis. PLON should be -C scaled by 180/PI to obtain the value in -C degrees. -C -C PNRM = Magnitude (Euclidean norm) of P. -C -C Modules required by SCOORD: None -C -C Intrinsic functions called by SCOORD: ASIN, ATAN2, SQRT -C -C*********************************************************** -C - PNRM = SQRT(PX*PX + PY*PY + PZ*PZ) - IF (PX .NE. 0. .OR. PY .NE. 0.) THEN - PLON = ATAN2(PY,PX) - ELSE - PLON = 0. - ENDIF - IF (PNRM .NE. 0.) THEN - PLAT = ASIN(PZ/PNRM) - ELSE - PLAT = 0. - ENDIF - RETURN - END - REAL FUNCTION STORE (X) - REAL X -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 05/09/92 -C -C This function forces its argument X to be stored in a -C memory location, thus providing a means of determining -C floating point number characteristics (such as the machine -C precision) when it is necessary to avoid computation in -C high precision registers. -C -C -C On input: -C -C X = Value to be stored. -C -C X is not altered by this function. -C -C On output: -C -C STORE = Value of X after it has been stored and -C possibly truncated or rounded to the single -C precision word length. -C -C Modules required by STORE: None -C -C*********************************************************** -C - REAL Y - COMMON/STCOM/Y - Y = X - STORE = Y - RETURN - END - SUBROUTINE SWAP (IN1,IN2,IO1,IO2, LIST,LPTR, - . LEND, LP21) - INTEGER IN1, IN2, IO1, IO2, LIST(*), LPTR(*), LEND(*), - . LP21 -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 06/22/98 -C -C Given a triangulation of a set of points on the unit -C sphere, this subroutine replaces a diagonal arc in a -C strictly convex quadrilateral (defined by a pair of adja- -C cent triangles) with the other diagonal. Equivalently, a -C pair of adjacent triangles is replaced by another pair -C having the same union. -C -C -C On input: -C -C IN1,IN2,IO1,IO2 = Nodal indexes of the vertices of -C the quadrilateral. IO1-IO2 is re- -C placed by IN1-IN2. (IO1,IO2,IN1) -C and (IO2,IO1,IN2) must be trian- -C gles on input. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C On output: -C -C LIST,LPTR,LEND = Data structure updated with the -C swap -- triangles (IO1,IO2,IN1) and -C (IO2,IO1,IN2) are replaced by -C (IN1,IN2,IO2) and (IN2,IN1,IO1) -C unless LP21 = 0. -C -C LP21 = Index of IN1 as a neighbor of IN2 after the -C swap is performed unless IN1 and IN2 are -C adjacent on input, in which case LP21 = 0. -C -C Module required by SWAP: LSTPTR -C -C Intrinsic function called by SWAP: ABS -C -C*********************************************************** -C - INTEGER LSTPTR - INTEGER LP, LPH, LPSAV -C -C Local parameters: -C -C LP,LPH,LPSAV = LIST pointers -C -C -C Test for IN1 and IN2 adjacent. -C - LP = LSTPTR(LEND(IN1),IN2,LIST,LPTR) - IF (ABS(LIST(LP)) .EQ. IN2) THEN - LP21 = 0 - RETURN - ENDIF -C -C Delete IO2 as a neighbor of IO1. -C - LP = LSTPTR(LEND(IO1),IN2,LIST,LPTR) - LPH = LPTR(LP) - LPTR(LP) = LPTR(LPH) -C -C If IO2 is the last neighbor of IO1, make IN2 the -C last neighbor. -C - IF (LEND(IO1) .EQ. LPH) LEND(IO1) = LP -C -C Insert IN2 as a neighbor of IN1 following IO1 -C using the hole created above. -C - LP = LSTPTR(LEND(IN1),IO1,LIST,LPTR) - LPSAV = LPTR(LP) - LPTR(LP) = LPH - LIST(LPH) = IN2 - LPTR(LPH) = LPSAV -C -C Delete IO1 as a neighbor of IO2. -C - LP = LSTPTR(LEND(IO2),IN1,LIST,LPTR) - LPH = LPTR(LP) - LPTR(LP) = LPTR(LPH) -C -C If IO1 is the last neighbor of IO2, make IN1 the -C last neighbor. -C - IF (LEND(IO2) .EQ. LPH) LEND(IO2) = LP -C -C Insert IN1 as a neighbor of IN2 following IO2. -C - LP = LSTPTR(LEND(IN2),IO2,LIST,LPTR) - LPSAV = LPTR(LP) - LPTR(LP) = LPH - LIST(LPH) = IN1 - LPTR(LPH) = LPSAV - LP21 = LPH - RETURN - END - LOGICAL FUNCTION SWPTST (N1,N2,N3,N4,X,Y,Z) - INTEGER N1, N2, N3, N4 - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 03/29/91 -C -C This function decides whether or not to replace a -C diagonal arc in a quadrilateral with the other diagonal. -C The decision will be to swap (SWPTST = TRUE) if and only -C if N4 lies above the plane (in the half-space not contain- -C ing the origin) defined by (N1,N2,N3), or equivalently, if -C the projection of N4 onto this plane is interior to the -C circumcircle of (N1,N2,N3). The decision will be for no -C swap if the quadrilateral is not strictly convex. -C -C -C On input: -C -C N1,N2,N3,N4 = Indexes of the four nodes defining the -C quadrilateral with N1 adjacent to N2, -C and (N1,N2,N3) in counterclockwise -C order. The arc connecting N1 to N2 -C should be replaced by an arc connec- -C ting N3 to N4 if SWPTST = TRUE. Refer -C to Subroutine SWAP. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. (X(I),Y(I),Z(I)) -C define node I for I = N1, N2, N3, and N4. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C SWPTST = TRUE if and only if the arc connecting N1 -C and N2 should be swapped for an arc con- -C necting N3 and N4. -C -C Modules required by SWPTST: None -C -C*********************************************************** -C - REAL DX1, DX2, DX3, DY1, DY2, DY3, DZ1, DZ2, DZ3, - . X4, Y4, Z4 -C -C Local parameters: -C -C DX1,DY1,DZ1 = Coordinates of N4->N1 -C DX2,DY2,DZ2 = Coordinates of N4->N2 -C DX3,DY3,DZ3 = Coordinates of N4->N3 -C X4,Y4,Z4 = Coordinates of N4 -C - X4 = X(N4) - Y4 = Y(N4) - Z4 = Z(N4) - DX1 = X(N1) - X4 - DX2 = X(N2) - X4 - DX3 = X(N3) - X4 - DY1 = Y(N1) - Y4 - DY2 = Y(N2) - Y4 - DY3 = Y(N3) - Y4 - DZ1 = Z(N1) - Z4 - DZ2 = Z(N2) - Z4 - DZ3 = Z(N3) - Z4 -C -C N4 lies above the plane of (N1,N2,N3) iff N3 lies above -C the plane of (N2,N1,N4) iff Det(N3-N4,N2-N4,N1-N4) = -C (N3-N4,N2-N4 X N1-N4) > 0. -C - SWPTST = DX3*(DY2*DZ1 - DY1*DZ2) - . -DY3*(DX2*DZ1 - DX1*DZ2) - . +DZ3*(DX2*DY1 - DX1*DY2) .GT. 0. - RETURN - END - SUBROUTINE TRANS (N,RLAT,RLON, X,Y,Z) - INTEGER N - REAL RLAT(N), RLON(N), X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 04/08/90 -C -C This subroutine transforms spherical coordinates into -C Cartesian coordinates on the unit sphere for input to -C Subroutine TRMESH. Storage for X and Y may coincide with -C storage for RLAT and RLON if the latter need not be saved. -C -C -C On input: -C -C N = Number of nodes (points on the unit sphere) -C whose coordinates are to be transformed. -C -C RLAT = Array of length N containing latitudinal -C coordinates of the nodes in radians. -C -C RLON = Array of length N containing longitudinal -C coordinates of the nodes in radians. -C -C The above parameters are not altered by this routine. -C -C X,Y,Z = Arrays of length at least N. -C -C On output: -C -C X,Y,Z = Cartesian coordinates in the range -1 to 1. -C X(I)**2 + Y(I)**2 + Z(I)**2 = 1 for I = 1 -C to N. -C -C Modules required by TRANS: None -C -C Intrinsic functions called by TRANS: COS, SIN -C -C*********************************************************** -C - INTEGER I, NN - REAL COSPHI, PHI, THETA -C -C Local parameters: -C -C COSPHI = cos(PHI) -C I = DO-loop index -C NN = Local copy of N -C PHI = Latitude -C THETA = Longitude -C - NN = N - DO 1 I = 1,NN - PHI = RLAT(I) - THETA = RLON(I) - COSPHI = COS(PHI) - X(I) = COSPHI*COS(THETA) - Y(I) = COSPHI*SIN(THETA) - Z(I) = SIN(PHI) - 1 CONTINUE - RETURN - END - SUBROUTINE TRFIND (NST,P,N,X,Y,Z,LIST,LPTR,LEND, B1, - . B2,B3,I1,I2,I3) - INTEGER NST, N, LIST(*), LPTR(*), LEND(N), I1, I2, I3 - REAL P(3), X(N), Y(N), Z(N), B1, B2, B3 -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 11/30/99 -C -C This subroutine locates a point P relative to a triangu- -C lation created by Subroutine TRMESH. If P is contained in -C a triangle, the three vertex indexes and barycentric coor- -C dinates are returned. Otherwise, the indexes of the -C visible boundary nodes are returned. -C -C -C On input: -C -C NST = Index of a node at which TRFIND begins its -C search. Search time depends on the proximity -C of this node to P. -C -C P = Array of length 3 containing the x, y, and z -C coordinates (in that order) of the point P to be -C located. -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the triangulation nodes (unit -C vectors). (X(I),Y(I),Z(I)) defines node I -C for I = 1 to N. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C B1,B2,B3 = Unnormalized barycentric coordinates of -C the central projection of P onto the un- -C derlying planar triangle if P is in the -C convex hull of the nodes. These parame- -C ters are not altered if I1 = 0. -C -C I1,I2,I3 = Counterclockwise-ordered vertex indexes -C of a triangle containing P if P is con- -C tained in a triangle. If P is not in the -C convex hull of the nodes, I1 and I2 are -C the rightmost and leftmost (boundary) -C nodes that are visible from P, and -C I3 = 0. (If all boundary nodes are vis- -C ible from P, then I1 and I2 coincide.) -C I1 = I2 = I3 = 0 if P and all of the -C nodes are coplanar (lie on a common great -C circle. -C -C Modules required by TRFIND: JRAND, LSTPTR, STORE -C -C Intrinsic function called by TRFIND: ABS -C -C*********************************************************** -C - INTEGER JRAND, LSTPTR - INTEGER IX, IY, IZ, LP, N0, N1, N1S, N2, N2S, N3, N4, - . NEXT, NF, NL - REAL STORE - REAL DET, EPS, PTN1, PTN2, Q(3), S12, TOL, XP, YP, - . ZP - REAL X0, X1, X2, Y0, Y1, Y2, Z0, Z1, Z2 -C - SAVE IX, IY, IZ - DATA IX/1/, IY/2/, IZ/3/ -C -C Local parameters: -C -C EPS = Machine precision -C IX,IY,IZ = Integer seeds for JRAND -C LP = LIST pointer -C N0,N1,N2 = Nodes in counterclockwise order defining a -C cone (with vertex N0) containing P, or end- -C points of a boundary edge such that P Right -C N1->N2 -C N1S,N2S = Initially-determined values of N1 and N2 -C N3,N4 = Nodes opposite N1->N2 and N2->N1, respectively -C NEXT = Candidate for I1 or I2 when P is exterior -C NF,NL = First and last neighbors of N0, or first -C (rightmost) and last (leftmost) nodes -C visible from P when P is exterior to the -C triangulation -C PTN1 = Scalar product -C PTN2 = Scalar product -C Q = (N2 X N1) X N2 or N1 X (N2 X N1) -- used in -C the boundary traversal when P is exterior -C S12 = Scalar product -C TOL = Tolerance (multiple of EPS) defining an upper -C bound on the magnitude of a negative bary- -C centric coordinate (B1 or B2) for P in a -C triangle -- used to avoid an infinite number -C of restarts with 0 <= B3 < EPS and B1 < 0 or -C B2 < 0 but small in magnitude -C XP,YP,ZP = Local variables containing P(1), P(2), and P(3) -C X0,Y0,Z0 = Dummy arguments for DET -C X1,Y1,Z1 = Dummy arguments for DET -C X2,Y2,Z2 = Dummy arguments for DET -C -C Statement function: -C -C DET(X1,...,Z0) .GE. 0 if and only if (X0,Y0,Z0) is in the -C (closed) left hemisphere defined by -C the plane containing (0,0,0), -C (X1,Y1,Z1), and (X2,Y2,Z2), where -C left is defined relative to an ob- -C server at (X1,Y1,Z1) facing -C (X2,Y2,Z2). -C - DET (X1,Y1,Z1,X2,Y2,Z2,X0,Y0,Z0) = X0*(Y1*Z2-Y2*Z1) - . - Y0*(X1*Z2-X2*Z1) + Z0*(X1*Y2-X2*Y1) -C -C Initialize variables. -C - XP = P(1) - YP = P(2) - ZP = P(3) - N0 = NST - IF (N0 .LT. 1 .OR. N0 .GT. N) - . N0 = JRAND(N, IX,IY,IZ ) -C -C Compute the relative machine precision EPS and TOL. -C - EPS = 1.E0 - 1 EPS = EPS/2.E0 - IF (STORE(EPS+1.E0) .GT. 1.E0) GO TO 1 - EPS = 2.E0*EPS - TOL = 100.E0*EPS -C -C Set NF and NL to the first and last neighbors of N0, and -C initialize N1 = NF. -C - 2 LP = LEND(N0) - NL = LIST(LP) - LP = LPTR(LP) - NF = LIST(LP) - N1 = NF -C -C Find a pair of adjacent neighbors N1,N2 of N0 that define -C a wedge containing P: P LEFT N0->N1 and P RIGHT N0->N2. -C - IF (NL .GT. 0) THEN -C -C N0 is an interior node. Find N1. -C - 3 IF ( DET(X(N0),Y(N0),Z(N0),X(N1),Y(N1),Z(N1), - . XP,YP,ZP) .LT. 0. ) THEN - LP = LPTR(LP) - N1 = LIST(LP) - IF (N1 .EQ. NL) GO TO 6 - GO TO 3 - ENDIF - ELSE -C -C N0 is a boundary node. Test for P exterior. -C - NL = -NL - IF ( DET(X(N0),Y(N0),Z(N0),X(NF),Y(NF),Z(NF), - . XP,YP,ZP) .LT. 0. ) THEN -C -C P is to the right of the boundary edge N0->NF. -C - N1 = N0 - N2 = NF - GO TO 9 - ENDIF - IF ( DET(X(NL),Y(NL),Z(NL),X(N0),Y(N0),Z(N0), - . XP,YP,ZP) .LT. 0. ) THEN -C -C P is to the right of the boundary edge NL->N0. -C - N1 = NL - N2 = N0 - GO TO 9 - ENDIF - ENDIF -C -C P is to the left of arcs N0->N1 and NL->N0. Set N2 to the -C next neighbor of N0 (following N1). -C - 4 LP = LPTR(LP) - N2 = ABS(LIST(LP)) - IF ( DET(X(N0),Y(N0),Z(N0),X(N2),Y(N2),Z(N2), - . XP,YP,ZP) .LT. 0. ) GO TO 7 - N1 = N2 - IF (N1 .NE. NL) GO TO 4 - IF ( DET(X(N0),Y(N0),Z(N0),X(NF),Y(NF),Z(NF), - . XP,YP,ZP) .LT. 0. ) GO TO 6 -C -C P is left of or on arcs N0->NB for all neighbors NB -C of N0. Test for P = +/-N0. -C - IF (STORE(ABS(X(N0)*XP + Y(N0)*YP + Z(N0)*ZP)) - . .LT. 1.0-4.0*EPS) THEN -C -C All points are collinear iff P Left NB->N0 for all -C neighbors NB of N0. Search the neighbors of N0. -C Note: N1 = NL and LP points to NL. -C - 5 IF ( DET(X(N1),Y(N1),Z(N1),X(N0),Y(N0),Z(N0), - . XP,YP,ZP) .GE. 0. ) THEN - LP = LPTR(LP) - N1 = ABS(LIST(LP)) - IF (N1 .EQ. NL) GO TO 14 - GO TO 5 - ENDIF - ENDIF -C -C P is to the right of N1->N0, or P = +/-N0. Set N0 to N1 -C and start over. -C - N0 = N1 - GO TO 2 -C -C P is between arcs N0->N1 and N0->NF. -C - 6 N2 = NF -C -C P is contained in a wedge defined by geodesics N0-N1 and -C N0-N2, where N1 is adjacent to N2. Save N1 and N2 to -C test for cycling. -C - 7 N3 = N0 - N1S = N1 - N2S = N2 -C -C Top of edge-hopping loop: -C - 8 B3 = DET(X(N1),Y(N1),Z(N1),X(N2),Y(N2),Z(N2),XP,YP,ZP) - IF (B3 .LT. 0.) THEN -C -C Set N4 to the first neighbor of N2 following N1 (the -C node opposite N2->N1) unless N1->N2 is a boundary arc. -C - LP = LSTPTR(LEND(N2),N1,LIST,LPTR) - IF (LIST(LP) .LT. 0) GO TO 9 - LP = LPTR(LP) - N4 = ABS(LIST(LP)) -C -C Define a new arc N1->N2 which intersects the geodesic -C N0-P. -C - IF ( DET(X(N0),Y(N0),Z(N0),X(N4),Y(N4),Z(N4), - . XP,YP,ZP) .LT. 0. ) THEN - N3 = N2 - N2 = N4 - N1S = N1 - IF (N2 .NE. N2S .AND. N2 .NE. N0) GO TO 8 - ELSE - N3 = N1 - N1 = N4 - N2S = N2 - IF (N1 .NE. N1S .AND. N1 .NE. N0) GO TO 8 - ENDIF -C -C The starting node N0 or edge N1-N2 was encountered -C again, implying a cycle (infinite loop). Restart -C with N0 randomly selected. -C - N0 = JRAND(N, IX,IY,IZ ) - GO TO 2 - ENDIF -C -C P is in (N1,N2,N3) unless N0, N1, N2, and P are collinear -C or P is close to -N0. -C - IF (B3 .GE. EPS) THEN -C -C B3 .NE. 0. -C - B1 = DET(X(N2),Y(N2),Z(N2),X(N3),Y(N3),Z(N3), - . XP,YP,ZP) - B2 = DET(X(N3),Y(N3),Z(N3),X(N1),Y(N1),Z(N1), - . XP,YP,ZP) - IF (B1 .LT. -TOL .OR. B2 .LT. -TOL) THEN -C -C Restart with N0 randomly selected. -C - N0 = JRAND(N, IX,IY,IZ ) - GO TO 2 - ENDIF - ELSE -C -C B3 = 0 and thus P lies on N1->N2. Compute -C B1 = Det(P,N2 X N1,N2) and B2 = Det(P,N1,N2 X N1). -C - B3 = 0. - S12 = X(N1)*X(N2) + Y(N1)*Y(N2) + Z(N1)*Z(N2) - PTN1 = XP*X(N1) + YP*Y(N1) + ZP*Z(N1) - PTN2 = XP*X(N2) + YP*Y(N2) + ZP*Z(N2) - B1 = PTN1 - S12*PTN2 - B2 = PTN2 - S12*PTN1 - IF (B1 .LT. -TOL .OR. B2 .LT. -TOL) THEN -C -C Restart with N0 randomly selected. -C - N0 = JRAND(N, IX,IY,IZ ) - GO TO 2 - ENDIF - ENDIF -C -C P is in (N1,N2,N3). -C - I1 = N1 - I2 = N2 - I3 = N3 - IF (B1 .LT. 0.0) B1 = 0.0 - IF (B2 .LT. 0.0) B2 = 0.0 - RETURN -C -C P Right N1->N2, where N1->N2 is a boundary edge. -C Save N1 and N2, and set NL = 0 to indicate that -C NL has not yet been found. -C - 9 N1S = N1 - N2S = N2 - NL = 0 -C -C Counterclockwise Boundary Traversal: -C - 10 LP = LEND(N2) - LP = LPTR(LP) - NEXT = LIST(LP) - IF ( DET(X(N2),Y(N2),Z(N2),X(NEXT),Y(NEXT),Z(NEXT), - . XP,YP,ZP) .GE. 0. ) THEN -C -C N2 is the rightmost visible node if P Forward N2->N1 -C or NEXT Forward N2->N1. Set Q to (N2 X N1) X N2. -C - S12 = X(N1)*X(N2) + Y(N1)*Y(N2) + Z(N1)*Z(N2) - Q(1) = X(N1) - S12*X(N2) - Q(2) = Y(N1) - S12*Y(N2) - Q(3) = Z(N1) - S12*Z(N2) - IF (XP*Q(1) + YP*Q(2) + ZP*Q(3) .GE. 0.) GO TO 11 - IF (X(NEXT)*Q(1) + Y(NEXT)*Q(2) + Z(NEXT)*Q(3) - . .GE. 0.) GO TO 11 -C -C N1, N2, NEXT, and P are nearly collinear, and N2 is -C the leftmost visible node. -C - NL = N2 - ENDIF -C -C Bottom of counterclockwise loop: -C - N1 = N2 - N2 = NEXT - IF (N2 .NE. N1S) GO TO 10 -C -C All boundary nodes are visible from P. -C - I1 = N1S - I2 = N1S - I3 = 0 - RETURN -C -C N2 is the rightmost visible node. -C - 11 NF = N2 - IF (NL .EQ. 0) THEN -C -C Restore initial values of N1 and N2, and begin the search -C for the leftmost visible node. -C - N2 = N2S - N1 = N1S -C -C Clockwise Boundary Traversal: -C - 12 LP = LEND(N1) - NEXT = -LIST(LP) - IF ( DET(X(NEXT),Y(NEXT),Z(NEXT),X(N1),Y(N1),Z(N1), - . XP,YP,ZP) .GE. 0. ) THEN -C -C N1 is the leftmost visible node if P or NEXT is -C forward of N1->N2. Compute Q = N1 X (N2 X N1). -C - S12 = X(N1)*X(N2) + Y(N1)*Y(N2) + Z(N1)*Z(N2) - Q(1) = X(N2) - S12*X(N1) - Q(2) = Y(N2) - S12*Y(N1) - Q(3) = Z(N2) - S12*Z(N1) - IF (XP*Q(1) + YP*Q(2) + ZP*Q(3) .GE. 0.) GO TO 13 - IF (X(NEXT)*Q(1) + Y(NEXT)*Q(2) + Z(NEXT)*Q(3) - . .GE. 0.) GO TO 13 -C -C P, NEXT, N1, and N2 are nearly collinear and N1 is the -C rightmost visible node. -C - NF = N1 - ENDIF -C -C Bottom of clockwise loop: -C - N2 = N1 - N1 = NEXT - IF (N1 .NE. N1S) GO TO 12 -C -C All boundary nodes are visible from P. -C - I1 = N1 - I2 = N1 - I3 = 0 - RETURN -C -C N1 is the leftmost visible node. -C - 13 NL = N1 - ENDIF -C -C NF and NL have been found. -C - I1 = NF - I2 = NL - I3 = 0 - RETURN -C -C All points are collinear (coplanar). -C - 14 I1 = 0 - I2 = 0 - I3 = 0 - RETURN - END - SUBROUTINE TRLIST (N,LIST,LPTR,LEND,NROW, NT,LTRI,IER) - INTEGER N, LIST(*), LPTR(*), LEND(N), NROW, NT, - . LTRI(NROW,*), IER -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/20/96 -C -C This subroutine converts a triangulation data structure -C from the linked list created by Subroutine TRMESH to a -C triangle list. -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C LIST,LPTR,LEND = Linked list data structure defin- -C ing the triangulation. Refer to -C Subroutine TRMESH. -C -C NROW = Number of rows (entries per triangle) re- -C served for the triangle list LTRI. The value -C must be 6 if only the vertex indexes and -C neighboring triangle indexes are to be -C stored, or 9 if arc indexes are also to be -C assigned and stored. Refer to LTRI. -C -C The above parameters are not altered by this routine. -C -C LTRI = Integer array of length at least NROW*NT, -C where NT is at most 2N-4. (A sufficient -C length is 12N if NROW=6 or 18N if NROW=9.) -C -C On output: -C -C NT = Number of triangles in the triangulation unless -C IER .NE. 0, in which case NT = 0. NT = 2N-NB-2 -C if NB .GE. 3 or 2N-4 if NB = 0, where NB is the -C number of boundary nodes. -C -C LTRI = NROW by NT array whose J-th column contains -C the vertex nodal indexes (first three rows), -C neighboring triangle indexes (second three -C rows), and, if NROW = 9, arc indexes (last -C three rows) associated with triangle J for -C J = 1,...,NT. The vertices are ordered -C counterclockwise with the first vertex taken -C to be the one with smallest index. Thus, -C LTRI(2,J) and LTRI(3,J) are larger than -C LTRI(1,J) and index adjacent neighbors of -C node LTRI(1,J). For I = 1,2,3, LTRI(I+3,J) -C and LTRI(I+6,J) index the triangle and arc, -C respectively, which are opposite (not shared -C by) node LTRI(I,J), with LTRI(I+3,J) = 0 if -C LTRI(I+6,J) indexes a boundary arc. Vertex -C indexes range from 1 to N, triangle indexes -C from 0 to NT, and, if included, arc indexes -C from 1 to NA, where NA = 3N-NB-3 if NB .GE. 3 -C or 3N-6 if NB = 0. The triangles are or- -C dered on first (smallest) vertex indexes. -C -C IER = Error indicator. -C IER = 0 if no errors were encountered. -C IER = 1 if N or NROW is outside its valid -C range on input. -C IER = 2 if the triangulation data structure -C (LIST,LPTR,LEND) is invalid. Note, -C however, that these arrays are not -C completely tested for validity. -C -C Modules required by TRLIST: None -C -C Intrinsic function called by TRLIST: ABS -C -C*********************************************************** -C - INTEGER I, I1, I2, I3, ISV, J, KA, KN, KT, LP, LP2, - . LPL, LPLN1, N1, N2, N3, NM2 - LOGICAL ARCS -C -C Local parameters: -C -C ARCS = Logical variable with value TRUE iff are -C indexes are to be stored -C I,J = LTRI row indexes (1 to 3) associated with -C triangles KT and KN, respectively -C I1,I2,I3 = Nodal indexes of triangle KN -C ISV = Variable used to permute indexes I1,I2,I3 -C KA = Arc index and number of currently stored arcs -C KN = Index of the triangle that shares arc I1-I2 -C with KT -C KT = Triangle index and number of currently stored -C triangles -C LP = LIST pointer -C LP2 = Pointer to N2 as a neighbor of N1 -C LPL = Pointer to the last neighbor of I1 -C LPLN1 = Pointer to the last neighbor of N1 -C N1,N2,N3 = Nodal indexes of triangle KT -C NM2 = N-2 -C -C -C Test for invalid input parameters. -C - IF (N .LT. 3 .OR. (NROW .NE. 6 .AND. NROW .NE. 9)) - . GO TO 11 -C -C Initialize parameters for loop on triangles KT = (N1,N2, -C N3), where N1 < N2 and N1 < N3. -C -C ARCS = TRUE iff arc indexes are to be stored. -C KA,KT = Numbers of currently stored arcs and triangles. -C NM2 = Upper bound on candidates for N1. -C - ARCS = NROW .EQ. 9 - KA = 0 - KT = 0 - NM2 = N-2 -C -C Loop on nodes N1. -C - DO 9 N1 = 1,NM2 -C -C Loop on pairs of adjacent neighbors (N2,N3). LPLN1 points -C to the last neighbor of N1, and LP2 points to N2. -C - LPLN1 = LEND(N1) - LP2 = LPLN1 - 1 LP2 = LPTR(LP2) - N2 = LIST(LP2) - LP = LPTR(LP2) - N3 = ABS(LIST(LP)) - IF (N2 .LT. N1 .OR. N3 .LT. N1) GO TO 8 -C -C Add a new triangle KT = (N1,N2,N3). -C - KT = KT + 1 - LTRI(1,KT) = N1 - LTRI(2,KT) = N2 - LTRI(3,KT) = N3 -C -C Loop on triangle sides (I2,I1) with neighboring triangles -C KN = (I1,I2,I3). -C - DO 7 I = 1,3 - IF (I .EQ. 1) THEN - I1 = N3 - I2 = N2 - ELSEIF (I .EQ. 2) THEN - I1 = N1 - I2 = N3 - ELSE - I1 = N2 - I2 = N1 - ENDIF -C -C Set I3 to the neighbor of I1 that follows I2 unless -C I2->I1 is a boundary arc. -C - LPL = LEND(I1) - LP = LPTR(LPL) - 2 IF (LIST(LP) .EQ. I2) GO TO 3 - LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 2 -C -C I2 is the last neighbor of I1 unless the data structure -C is invalid. Bypass the search for a neighboring -C triangle if I2->I1 is a boundary arc. -C - IF (ABS(LIST(LP)) .NE. I2) GO TO 12 - KN = 0 - IF (LIST(LP) .LT. 0) GO TO 6 -C -C I2->I1 is not a boundary arc, and LP points to I2 as -C a neighbor of I1. -C - 3 LP = LPTR(LP) - I3 = ABS(LIST(LP)) -C -C Find J such that LTRI(J,KN) = I3 (not used if KN > KT), -C and permute the vertex indexes of KN so that I1 is -C smallest. -C - IF (I1 .LT. I2 .AND. I1 .LT. I3) THEN - J = 3 - ELSEIF (I2 .LT. I3) THEN - J = 2 - ISV = I1 - I1 = I2 - I2 = I3 - I3 = ISV - ELSE - J = 1 - ISV = I1 - I1 = I3 - I3 = I2 - I2 = ISV - ENDIF -C -C Test for KN > KT (triangle index not yet assigned). -C - IF (I1 .GT. N1) GO TO 7 -C -C Find KN, if it exists, by searching the triangle list in -C reverse order. -C - DO 4 KN = KT-1,1,-1 - IF (LTRI(1,KN) .EQ. I1 .AND. LTRI(2,KN) .EQ. - . I2 .AND. LTRI(3,KN) .EQ. I3) GO TO 5 - 4 CONTINUE - GO TO 7 -C -C Store KT as a neighbor of KN. -C - 5 LTRI(J+3,KN) = KT -C -C Store KN as a neighbor of KT, and add a new arc KA. -C - 6 LTRI(I+3,KT) = KN - IF (ARCS) THEN - KA = KA + 1 - LTRI(I+6,KT) = KA - IF (KN .NE. 0) LTRI(J+6,KN) = KA - ENDIF - 7 CONTINUE -C -C Bottom of loop on triangles. -C - 8 IF (LP2 .NE. LPLN1) GO TO 1 - 9 CONTINUE -C -C No errors encountered. -C - NT = KT - IER = 0 - RETURN -C -C Invalid input parameter. -C - 11 NT = 0 - IER = 1 - RETURN -C -C Invalid triangulation data structure: I1 is a neighbor of -C I2, but I2 is not a neighbor of I1. -C - 12 NT = 0 - IER = 2 - RETURN - END - SUBROUTINE TRLPRT (N,X,Y,Z,IFLAG,NROW,NT,LTRI,LOUT) - INTEGER N, IFLAG, NROW, NT, LTRI(NROW,NT), LOUT - REAL X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/02/98 -C -C This subroutine prints the triangle list created by Sub- -C routine TRLIST and, optionally, the nodal coordinates -C (either latitude and longitude or Cartesian coordinates) -C on logical unit LOUT. The numbers of boundary nodes, -C triangles, and arcs are also printed. -C -C -C On input: -C -C N = Number of nodes in the triangulation. -C 3 .LE. N .LE. 9999. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes if IFLAG = 0, or -C (X and Y only) arrays of length N containing -C longitude and latitude, respectively, if -C IFLAG > 0, or unused dummy parameters if -C IFLAG < 0. -C -C IFLAG = Nodal coordinate option indicator: -C IFLAG = 0 if X, Y, and Z (assumed to contain -C Cartesian coordinates) are to be -C printed (to 6 decimal places). -C IFLAG > 0 if only X and Y (assumed to con- -C tain longitude and latitude) are -C to be printed (to 6 decimal -C places). -C IFLAG < 0 if only the adjacency lists are to -C be printed. -C -C NROW = Number of rows (entries per triangle) re- -C served for the triangle list LTRI. The value -C must be 6 if only the vertex indexes and -C neighboring triangle indexes are stored, or 9 -C if arc indexes are also stored. -C -C NT = Number of triangles in the triangulation. -C 1 .LE. NT .LE. 9999. -C -C LTRI = NROW by NT array whose J-th column contains -C the vertex nodal indexes (first three rows), -C neighboring triangle indexes (second three -C rows), and, if NROW = 9, arc indexes (last -C three rows) associated with triangle J for -C J = 1,...,NT. -C -C LOUT = Logical unit number for output. If LOUT is -C not in the range 0 to 99, output is written -C to unit 6. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C The triangle list and nodal coordinates (as specified by -C IFLAG) are written to unit LOUT. -C -C Modules required by TRLPRT: None -C -C*********************************************************** -C - INTEGER I, K, LUN, NA, NB, NL, NLMAX, NMAX - DATA NMAX/9999/, NLMAX/58/ -C -C Local parameters: -C -C I = DO-loop, nodal index, and row index for LTRI -C K = DO-loop and triangle index -C LUN = Logical unit number for output -C NA = Number of triangulation arcs -C NB = Number of boundary nodes -C NL = Number of lines printed on the current page -C NLMAX = Maximum number of print lines per page (except -C for the last page which may have two addi- -C tional lines) -C NMAX = Maximum value of N and NT (4-digit format) -C - LUN = LOUT - IF (LUN .LT. 0 .OR. LUN .GT. 99) LUN = 6 -C -C Print a heading and test for invalid input. -C -C open(18,file='nodes.dat',STATUS= 'UNKNOWN') -C do ig = 1,N -C read(18,*) x(ig),y(ig),z(ig) -C enddo -C close(18) - - WRITE (LUN,100) N - NL = 3 - IF (N .LT. 3 .OR. N .GT. NMAX .OR. - . (NROW .NE. 6 .AND. NROW .NE. 9) .OR. - . NT .LT. 1 .OR. NT .GT. NMAX) THEN -C -C Print an error message and exit. -C - WRITE (LUN,110) N, NROW, NT - RETURN - ENDIF - IF (IFLAG .EQ. 0) THEN -C -C Print X, Y, and Z. -C - WRITE (LUN,101) - NL = 6 - DO 1 I = 1,N - IF (NL .GE. NLMAX) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - WRITE (LUN,103) I, X(I), Y(I), Z(I) - NL = NL + 1 - 1 CONTINUE - ELSEIF (IFLAG .GT. 0) THEN -C -C Print X (longitude) and Y (latitude). -C - WRITE (LUN,102) - NL = 6 - DO 2 I = 1,N - IF (NL .GE. NLMAX) THEN - WRITE (LUN,108) - NL = 0 - ENDIF -! WRITE (LUN,104) I, X(I), Y(I) - WRITE (LUN,103) I, X(I), Y(I), Z(I) - NL = NL + 1 - 2 CONTINUE - ENDIF -C -C Print the triangulation LTRI. -C - IF (NL .GT. NLMAX/2) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - IF (NROW .EQ. 6) THEN - WRITE (LUN,105) - ELSE - WRITE (LUN,106) - ENDIF - NL = NL + 5 - DO 3 K = 1,NT - IF (NL .GE. NLMAX) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - WRITE (LUN,107) K, (LTRI(I,K), I = 1,NROW) - NL = NL + 1 - 3 CONTINUE -C -C Print NB, NA, and NT (boundary nodes, arcs, and -C triangles). -C - NB = 2*N - NT - 2 - IF (NB .LT. 3) THEN - NB = 0 - NA = 3*N - 6 - ELSE - NA = NT + N - 1 - ENDIF - WRITE (LUN,109) NB, NA, NT - RETURN -C -C Print formats: -C - 100 FORMAT (///18X,'STRIPACK (TRLIST) Output, N = ',I4) - 101 FORMAT (//8X,'Node',10X,'X(Node)',10X,'Y(Node)',10X, - . 'Z(Node)'//) -! 102 FORMAT (//16X,'Node',8X,'Longitude',9X,'Latitude'//) - 102 FORMAT (//8X,'Node',8X,'XXXXXXXXX',9X,'YYYYYYYY',9X,'ZZZZZZZZ'//) - - 103 FORMAT (8X,I4,3E17.6) - 104 FORMAT (16X,I4,2E17.6) - 105 FORMAT (//1X,'Triangle',8X,'Vertices',12X,'Neighbors'/ - . 4X,'KT',7X,'N1',5X,'N2',5X,'N3',4X,'KT1',4X, - . 'KT2',4X,'KT3'/) - 106 FORMAT (//1X,'Triangle',8X,'Vertices',12X,'Neighbors', - . 14X,'Arcs'/ - . 4X,'KT',7X,'N1',5X,'N2',5X,'N3',4X,'KT1',4X, - . 'KT2',4X,'KT3',4X,'KA1',4X,'KA2',4X,'KA3'/) - 107 FORMAT (2X,I4,2X,6(3X,I4),3(2X,I5)) - 108 FORMAT (///) - 109 FORMAT (/1X,'NB = ',I4,' Boundary Nodes',5X, - . 'NA = ',I5,' Arcs',5X,'NT = ',I5, - . ' Triangles') - 110 FORMAT (//1X,10X,'*** Invalid Parameter: N =',I5, - . ', NROW =',I5,', NT =',I5,' ***') - END - SUBROUTINE TRMESH (N,X,Y,Z, LIST,LPTR,LEND,LNEW,NEAR, - . NEXT,DIST,IER) - INTEGER N, LIST(*), LPTR(*), LEND(N), LNEW, NEAR(N), - . NEXT(N), IER - REAL X(N), Y(N), Z(N), DIST(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/08/99 -C -C This subroutine creates a Delaunay triangulation of a -C set of N arbitrarily distributed points, referred to as -C nodes, on the surface of the unit sphere. The Delaunay -C triangulation is defined as a set of (spherical) triangles -C with the following five properties: -C -C 1) The triangle vertices are nodes. -C 2) No triangle contains a node other than its vertices. -C 3) The interiors of the triangles are pairwise disjoint. -C 4) The union of triangles is the convex hull of the set -C of nodes (the smallest convex set that contains -C the nodes). If the nodes are not contained in a -C single hemisphere, their convex hull is the en- -C tire sphere and there are no boundary nodes. -C Otherwise, there are at least three boundary nodes. -C 5) The interior of the circumcircle of each triangle -C contains no node. -C -C The first four properties define a triangulation, and the -C last property results in a triangulation which is as close -C as possible to equiangular in a certain sense and which is -C uniquely defined unless four or more nodes lie in a common -C plane. This property makes the triangulation well-suited -C for solving closest-point problems and for triangle-based -C interpolation. -C -C Provided the nodes are randomly ordered, the algorithm -C has expected time complexity O(N*log(N)) for most nodal -C distributions. Note, however, that the complexity may be -C as high as O(N**2) if, for example, the nodes are ordered -C on increasing latitude. -C -C Spherical coordinates (latitude and longitude) may be -C converted to Cartesian coordinates by Subroutine TRANS. -C -C The following is a list of the software package modules -C which a user may wish to call directly: -C -C ADDNOD - Updates the triangulation by appending a new -C node. -C -C AREAS - Returns the area of a spherical triangle. -C -C BNODES - Returns an array containing the indexes of the -C boundary nodes (if any) in counterclockwise -C order. Counts of boundary nodes, triangles, -C and arcs are also returned. -C -C CIRCUM - Returns the circumcenter of a spherical trian- -C gle. -C -C CRLIST - Returns the set of triangle circumcenters -C (Voronoi vertices) and circumradii associated -C with a triangulation. -C -C DELARC - Deletes a boundary arc from a triangulation. -C -C DELNOD - Updates the triangulation with a nodal deletion. -C -C EDGE - Forces an arbitrary pair of nodes to be connec- -C ted by an arc in the triangulation. -C -C GETNP - Determines the ordered sequence of L closest -C nodes to a given node, along with the associ- -C ated distances. -C -C INSIDE - Locates a point relative to a polygon on the -C surface of the sphere. -C -C INTRSC - Returns the point of intersection between a -C pair of great circle arcs. -C -C JRAND - Generates a uniformly distributed pseudo-random -C integer. -C -C LEFT - Locates a point relative to a great circle. -C -C NEARND - Returns the index of the nearest node to an -C arbitrary point, along with its squared -C distance. -C -C SCOORD - Converts a point from Cartesian coordinates to -C spherical coordinates. -C -C STORE - Forces a value to be stored in main memory so -C that the precision of floating point numbers -C in memory locations rather than registers is -C computed. -C -C TRANS - Transforms spherical coordinates into Cartesian -C coordinates on the unit sphere for input to -C Subroutine TRMESH. -C -C TRLIST - Converts the triangulation data structure to a -C triangle list more suitable for use in a fin- -C ite element code. -C -C TRLPRT - Prints the triangle list created by Subroutine -C TRLIST. -C -C TRMESH - Creates a Delaunay triangulation of a set of -C nodes. -C -C TRPLOT - Creates a level-2 Encapsulated Postscript (EPS) -C file containing a triangulation plot. -C -C TRPRNT - Prints the triangulation data structure and, -C optionally, the nodal coordinates. -C -C VRPLOT - Creates a level-2 Encapsulated Postscript (EPS) -C file containing a Voronoi diagram plot. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of distinct nodes. (X(K),Y(K), -C Z(K)) is referred to as node K, and K is re- -C ferred to as a nodal index. It is required -C that X(K)**2 + Y(K)**2 + Z(K)**2 = 1 for all -C K. The first three nodes must not be col- -C linear (lie on a common great circle). -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR = Arrays of length at least 6N-12. -C -C LEND = Array of length at least N. -C -C NEAR,NEXT,DIST = Work space arrays of length at -C least N. The space is used to -C efficiently determine the nearest -C triangulation node to each un- -C processed node for use by ADDNOD. -C -C On output: -C -C LIST = Set of nodal indexes which, along with LPTR, -C LEND, and LNEW, define the triangulation as a -C set of N adjacency lists -- counterclockwise- -C ordered sequences of neighboring nodes such -C that the first and last neighbors of a bound- -C ary node are boundary nodes (the first neigh- -C bor of an interior node is arbitrary). In -C order to distinguish between interior and -C boundary nodes, the last neighbor of each -C boundary node is represented by the negative -C of its index. -C -C LPTR = Set of pointers (LIST indexes) in one-to-one -C correspondence with the elements of LIST. -C LIST(LPTR(I)) indexes the node which follows -C LIST(I) in cyclical counterclockwise order -C (the first neighbor follows the last neigh- -C bor). -C -C LEND = Set of pointers to adjacency lists. LEND(K) -C points to the last neighbor of node K for -C K = 1,...,N. Thus, LIST(LEND(K)) < 0 if and -C only if K is a boundary node. -C -C LNEW = Pointer to the first empty location in LIST -C and LPTR (list length plus one). LIST, LPTR, -C LEND, and LNEW are not altered if IER < 0, -C and are incomplete if IER > 0. -C -C NEAR,NEXT,DIST = Garbage. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = -1 if N < 3 on input. -C IER = -2 if the first three nodes are -C collinear. -C IER = L if nodes L and M coincide for some -C M > L. The data structure represents -C a triangulation of nodes 1 to M-1 in -C this case. -C -C Modules required by TRMESH: ADDNOD, BDYADD, COVSPH, -C INSERT, INTADD, JRAND, -C LEFT, LSTPTR, STORE, SWAP, -C SWPTST, TRFIND -C -C Intrinsic function called by TRMESH: ABS -C -C*********************************************************** -C - INTEGER I, I0, J, K, LP, LPL, NEXTI, NN - LOGICAL LEFT - REAL D, D1, D2, D3 -C -C Local parameters: -C -C D = (Negative cosine of) distance from node K to -C node I -C D1,D2,D3 = Distances from node K to nodes 1, 2, and 3, -C respectively -C I,J = Nodal indexes -C I0 = Index of the node preceding I in a sequence of -C unprocessed nodes: I = NEXT(I0) -C K = Index of node to be added and DO-loop index: -C K > 3 -C LP = LIST index (pointer) of a neighbor of K -C LPL = Pointer to the last neighbor of K -C NEXTI = NEXT(I) -C NN = Local copy of N -C - NN = N - IF (NN .LT. 3) THEN - IER = -1 - RETURN - ENDIF -C -C Store the first triangle in the linked list. -C - IF ( .NOT. LEFT (X(1),Y(1),Z(1),X(2),Y(2),Z(2), - . X(3),Y(3),Z(3)) ) THEN -C -C The first triangle is (3,2,1) = (2,1,3) = (1,3,2). -C - LIST(1) = 3 - LPTR(1) = 2 - LIST(2) = -2 - LPTR(2) = 1 - LEND(1) = 2 -C - LIST(3) = 1 - LPTR(3) = 4 - LIST(4) = -3 - LPTR(4) = 3 - LEND(2) = 4 -C - LIST(5) = 2 - LPTR(5) = 6 - LIST(6) = -1 - LPTR(6) = 5 - LEND(3) = 6 -C - ELSEIF ( .NOT. LEFT(X(2),Y(2),Z(2),X(1),Y(1),Z(1), - . X(3),Y(3),Z(3)) ) - . THEN -C -C The first triangle is (1,2,3): 3 Strictly Left 1->2, -C i.e., node 3 lies in the left hemisphere defined by -C arc 1->2. -C - LIST(1) = 2 - LPTR(1) = 2 - LIST(2) = -3 - LPTR(2) = 1 - LEND(1) = 2 -C - LIST(3) = 3 - LPTR(3) = 4 - LIST(4) = -1 - LPTR(4) = 3 - LEND(2) = 4 -C - LIST(5) = 1 - LPTR(5) = 6 - LIST(6) = -2 - LPTR(6) = 5 - LEND(3) = 6 -C - ELSE -C -C The first three nodes are collinear. -C - IER = -2 - RETURN - ENDIF -C -C Initialize LNEW and test for N = 3. -C - LNEW = 7 - IF (NN .EQ. 3) THEN - IER = 0 - RETURN - ENDIF -C -C A nearest-node data structure (NEAR, NEXT, and DIST) is -C used to obtain an expected-time (N*log(N)) incremental -C algorithm by enabling constant search time for locating -C each new node in the triangulation. -C -C For each unprocessed node K, NEAR(K) is the index of the -C triangulation node closest to K (used as the starting -C point for the search in Subroutine TRFIND) and DIST(K) -C is an increasing function of the arc length (angular -C distance) between nodes K and NEAR(K): -Cos(a) for arc -C length a. -C -C Since it is necessary to efficiently find the subset of -C unprocessed nodes associated with each triangulation -C node J (those that have J as their NEAR entries), the -C subsets are stored in NEAR and NEXT as follows: for -C each node J in the triangulation, I = NEAR(J) is the -C first unprocessed node in J's set (with I = 0 if the -C set is empty), L = NEXT(I) (if I > 0) is the second, -C NEXT(L) (if L > 0) is the third, etc. The nodes in each -C set are initially ordered by increasing indexes (which -C maximizes efficiency) but that ordering is not main- -C tained as the data structure is updated. -C -C Initialize the data structure for the single triangle. -C - NEAR(1) = 0 - NEAR(2) = 0 - NEAR(3) = 0 - DO 1 K = NN,4,-1 - D1 = -(X(K)*X(1) + Y(K)*Y(1) + Z(K)*Z(1)) - D2 = -(X(K)*X(2) + Y(K)*Y(2) + Z(K)*Z(2)) - D3 = -(X(K)*X(3) + Y(K)*Y(3) + Z(K)*Z(3)) - IF (D1 .LE. D2 .AND. D1 .LE. D3) THEN - NEAR(K) = 1 - DIST(K) = D1 - NEXT(K) = NEAR(1) - NEAR(1) = K - ELSEIF (D2 .LE. D1 .AND. D2 .LE. D3) THEN - NEAR(K) = 2 - DIST(K) = D2 - NEXT(K) = NEAR(2) - NEAR(2) = K - ELSE - NEAR(K) = 3 - DIST(K) = D3 - NEXT(K) = NEAR(3) - NEAR(3) = K - ENDIF - 1 CONTINUE -C -C Add the remaining nodes -C - DO 6 K = 4,NN - CALL ADDNOD (NEAR(K),K,X,Y,Z, LIST,LPTR,LEND, - . LNEW, IER) - IF (IER .NE. 0) RETURN -C -C Remove K from the set of unprocessed nodes associated -C with NEAR(K). -C - I = NEAR(K) - IF (NEAR(I) .EQ. K) THEN - NEAR(I) = NEXT(K) - ELSE - I = NEAR(I) - 2 I0 = I - I = NEXT(I0) - IF (I .NE. K) GO TO 2 - NEXT(I0) = NEXT(K) - ENDIF - NEAR(K) = 0 -C -C Loop on neighbors J of node K. -C - LPL = LEND(K) - LP = LPL - 3 LP = LPTR(LP) - J = ABS(LIST(LP)) -C -C Loop on elements I in the sequence of unprocessed nodes -C associated with J: K is a candidate for replacing J -C as the nearest triangulation node to I. The next value -C of I in the sequence, NEXT(I), must be saved before I -C is moved because it is altered by adding I to K's set. -C - I = NEAR(J) - 4 IF (I .EQ. 0) GO TO 5 - NEXTI = NEXT(I) -C -C Test for the distance from I to K less than the distance -C from I to J. -C - D = -(X(I)*X(K) + Y(I)*Y(K) + Z(I)*Z(K)) - IF (D .LT. DIST(I)) THEN -C -C Replace J by K as the nearest triangulation node to I: -C update NEAR(I) and DIST(I), and remove I from J's set -C of unprocessed nodes and add it to K's set. -C - NEAR(I) = K - DIST(I) = D - IF (I .EQ. NEAR(J)) THEN - NEAR(J) = NEXTI - ELSE - NEXT(I0) = NEXTI - ENDIF - NEXT(I) = NEAR(K) - NEAR(K) = I - ELSE - I0 = I - ENDIF -C -C Bottom of loop on I. -C - I = NEXTI - GO TO 4 -C -C Bottom of loop on neighbors J. -C - 5 IF (LP .NE. LPL) GO TO 3 - 6 CONTINUE - RETURN - END - SUBROUTINE TRPLOT (LUN,PLTSIZ,ELAT,ELON,A,N,X,Y,Z, - . LIST,LPTR,LEND,TITLE,NUMBR, IER) - CHARACTER*(*) TITLE - INTEGER LUN, N, LIST(*), LPTR(*), LEND(N), IER - LOGICAL NUMBR - REAL PLTSIZ, ELAT, ELON, A, X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/16/98 -C -C This subroutine creates a level-2 Encapsulated Post- -C script (EPS) file containing a graphical display of a -C triangulation of a set of nodes on the unit sphere. The -C visible nodes are projected onto the plane that contains -C the origin and has normal defined by a user-specified eye- -C position. Projections of adjacent (visible) nodes are -C connected by line segments. -C -C -C On input: -C -C LUN = Logical unit number in the range 0 to 99. -C The unit should be opened with an appropriate -C file name before the call to this routine. -C -C PLTSIZ = Plot size in inches. A circular window in -C the projection plane is mapped to a circu- -C lar viewport with diameter equal to .88* -C PLTSIZ (leaving room for labels outside the -C viewport). The viewport is centered on the -C 8.5 by 11 inch page, and its boundary is -C drawn. 1.0 .LE. PLTSIZ .LE. 8.5. -C -C ELAT,ELON = Latitude and longitude (in degrees) of -C the center of projection E (the center -C of the plot). The projection plane is -C the plane that contains the origin and -C has E as unit normal. In a rotated -C coordinate system for which E is the -C north pole, the projection plane con- -C tains the equator, and only northern -C hemisphere nodes are visible (from the -C point at infinity in the direction E). -C These are projected orthogonally onto -C the projection plane (by zeroing the z- -C component in the rotated coordinate -C system). ELAT and ELON must be in the -C range -90 to 90 and -180 to 180, respec- -C tively. -C -C A = Angular distance in degrees from E to the boun- -C dary of a circular window against which the -C triangulation is clipped. The projected window -C is a disk of radius r = Sin(A) centered at the -C origin, and only visible nodes whose projections -C are within distance r of the origin are included -C in the plot. Thus, if A = 90, the plot includes -C the entire hemisphere centered at E. 0 .LT. A -C .LE. 90. -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes (unit vectors). -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C TITLE = Type CHARACTER variable or constant contain- -C ing a string to be centered above the plot. -C The string must be enclosed in parentheses; -C i.e., the first and last characters must be -C '(' and ')', respectively, but these are not -C displayed. TITLE may have at most 80 char- -C acters including the parentheses. -C -C NUMBR = Option indicator: If NUMBR = TRUE, the -C nodal indexes are plotted next to the nodes. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if LUN, PLTSIZ, or N is outside its -C valid range. -C IER = 2 if ELAT, ELON, or A is outside its -C valid range. -C IER = 3 if an error was encountered in writing -C to unit LUN. -C -C The values in the data statement below may be altered -C in order to modify various plotting options. -C -C Modules required by TRPLOT: None -C -C Intrinsic functions called by TRPLOT: ABS, ATAN, COS, -C NINT, REAL, SIN, -C SQRT -C -C*********************************************************** -C - INTEGER IPX1, IPX2, IPY1, IPY2, IR, LP, LPL, N0, N1 - LOGICAL ANNOT - REAL CF, CT, EX, EY, EZ, FSIZN, FSIZT, R11, R12, - . R21, R22, R23, SF, T, TX, TY, WR, WRS, X0, X1, - . Y0, Y1, Z0, Z1 -C - DATA ANNOT/.TRUE./, FSIZN/10.0/, FSIZT/16.0/ -C -C Local parameters: -C -C ANNOT = Logical variable with value TRUE iff the plot -C is to be annotated with the values of ELAT, -C ELON, and A -C CF = Conversion factor for degrees to radians -C CT = Cos(ELAT) -C EX,EY,EZ = Cartesian coordinates of the eye-position E -C FSIZN = Font size in points for labeling nodes with -C their indexes if NUMBR = TRUE -C FSIZT = Font size in points for the title (and -C annotation if ANNOT = TRUE) -C IPX1,IPY1 = X and y coordinates (in points) of the lower -C left corner of the bounding box or viewport -C box -C IPX2,IPY2 = X and y coordinates (in points) of the upper -C right corner of the bounding box or viewport -C box -C IR = Half the width (height) of the bounding box or -C viewport box in points -- viewport radius -C LP = LIST index (pointer) -C LPL = Pointer to the last neighbor of N0 -C N0 = Index of a node whose incident arcs are to be -C drawn -C N1 = Neighbor of N0 -C R11...R23 = Components of the first two rows of a rotation -C that maps E to the north pole (0,0,1) -C SF = Scale factor for mapping world coordinates -C (window coordinates in [-WR,WR] X [-WR,WR]) -C to viewport coordinates in [IPX1,IPX2] X -C [IPY1,IPY2] -C T = Temporary variable -C TX,TY = Translation vector for mapping world coordi- -C nates to viewport coordinates -C WR = Window radius r = Sin(A) -C WRS = WR**2 -C X0,Y0,Z0 = Coordinates of N0 in the rotated coordinate -C system or label location (X0,Y0) -C X1,Y1,Z1 = Coordinates of N1 in the rotated coordinate -C system or intersection of edge N0-N1 with -C the equator (in the rotated coordinate -C system) -C -C -C Test for invalid parameters. -C - IF (LUN .LT. 0 .OR. LUN .GT. 99 .OR. - . PLTSIZ .LT. 1.0 .OR. PLTSIZ .GT. 8.5 .OR. - . N .LT. 3) - . GO TO 11 - IF (ABS(ELAT) .GT. 90.0 .OR. ABS(ELON) .GT. 180.0 - . .OR. A .GT. 90.0) GO TO 12 -C -C Compute a conversion factor CF for degrees to radians -C and compute the window radius WR. -C - CF = ATAN(1.0)/45.0 - WR = SIN(CF*A) - WRS = WR*WR -C -C Compute the lower left (IPX1,IPY1) and upper right -C (IPX2,IPY2) corner coordinates of the bounding box. -C The coordinates, specified in default user space units -C (points, at 72 points/inch with origin at the lower -C left corner of the page), are chosen to preserve the -C square aspect ratio, and to center the plot on the 8.5 -C by 11 inch page. The center of the page is (306,396), -C and IR = PLTSIZ/2 in points. -C - IR = NINT(36.0*PLTSIZ) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Output header comments. -C - WRITE (LUN,100,ERR=13) IPX1, IPY1, IPX2, IPY2 - 100 FORMAT ('%!PS-Adobe-3.0 EPSF-3.0'/ - . '%%BoundingBox:',4I4/ - . '%%Title: Triangulation'/ - . '%%Creator: STRIPACK'/ - . '%%EndComments') -C -C Set (IPX1,IPY1) and (IPX2,IPY2) to the corner coordinates -C of a viewport box obtained by shrinking the bounding box -C by 12% in each dimension. -C - IR = NINT(0.88*REAL(IR)) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Set the line thickness to 2 points, and draw the -C viewport boundary. -C - T = 2.0 - WRITE (LUN,110,ERR=13) T - WRITE (LUN,120,ERR=13) IR - WRITE (LUN,130,ERR=13) - 110 FORMAT (F12.6,' setlinewidth') - 120 FORMAT ('306 396 ',I3,' 0 360 arc') - 130 FORMAT ('stroke') -C -C Set up an affine mapping from the window box [-WR,WR] X -C [-WR,WR] to the viewport box. -C - SF = REAL(IR)/WR - TX = IPX1 + SF*WR - TY = IPY1 + SF*WR - WRITE (LUN,140,ERR=13) TX, TY, SF, SF - 140 FORMAT (2F12.6,' translate'/ - . 2F12.6,' scale') -C -C The line thickness must be changed to reflect the new -C scaling which is applied to all subsequent output. -C Set it to 1.0 point. -C - T = 1.0/SF - WRITE (LUN,110,ERR=13) T -C -C Save the current graphics state, and set the clip path to -C the boundary of the window. -C - WRITE (LUN,150,ERR=13) - WRITE (LUN,160,ERR=13) WR - WRITE (LUN,170,ERR=13) - 150 FORMAT ('gsave') - 160 FORMAT ('0 0 ',F12.6,' 0 360 arc') - 170 FORMAT ('clip newpath') -C -C Compute the Cartesian coordinates of E and the components -C of a rotation R which maps E to the north pole (0,0,1). -C R is taken to be a rotation about the z-axis (into the -C yz-plane) followed by a rotation about the x-axis chosen -C so that the view-up direction is (0,0,1), or (-1,0,0) if -C E is the north or south pole. -C -C ( R11 R12 0 ) -C R = ( R21 R22 R23 ) -C ( EX EY EZ ) -C - T = CF*ELON - CT = COS(CF*ELAT) - EX = CT*COS(T) - EY = CT*SIN(T) - EZ = SIN(CF*ELAT) - IF (CT .NE. 0.0) THEN - R11 = -EY/CT - R12 = EX/CT - ELSE - R11 = 0.0 - R12 = 1.0 - ENDIF - R21 = -EZ*R12 - R22 = EZ*R11 - R23 = CT -C -C Loop on visible nodes N0 that project to points (X0,Y0) in -C the window. -C - DO 3 N0 = 1,N - Z0 = EX*X(N0) + EY*Y(N0) + EZ*Z(N0) - IF (Z0 .LT. 0.) GO TO 3 - X0 = R11*X(N0) + R12*Y(N0) - Y0 = R21*X(N0) + R22*Y(N0) + R23*Z(N0) - IF (X0*X0 + Y0*Y0 .GT. WRS) GO TO 3 - LPL = LEND(N0) - LP = LPL -C -C Loop on neighbors N1 of N0. LPL points to the last -C neighbor of N0. Copy the components of N1 into P. -C - 1 LP = LPTR(LP) - N1 = ABS(LIST(LP)) - X1 = R11*X(N1) + R12*Y(N1) - Y1 = R21*X(N1) + R22*Y(N1) + R23*Z(N1) - Z1 = EX*X(N1) + EY*Y(N1) + EZ*Z(N1) - IF (Z1 .LT. 0.) THEN -C -C N1 is a 'southern hemisphere' point. Move it to the -C intersection of edge N0-N1 with the equator so that -C the edge is clipped properly. Z1 is implicitly set -C to 0. -C - X1 = Z0*X1 - Z1*X0 - Y1 = Z0*Y1 - Z1*Y0 - T = SQRT(X1*X1+Y1*Y1) - X1 = X1/T - Y1 = Y1/T - ENDIF -C -C If node N1 is in the window and N1 < N0, bypass edge -C N0->N1 (since edge N1->N0 has already been drawn). -C - IF ( Z1 .GE. 0.0 .AND. X1*X1 + Y1*Y1 .LE. WRS - . .AND. N1 .LT. N0 ) GO TO 2 -C -C Add the edge to the path. -C - WRITE (LUN,180,ERR=13) X0, Y0, X1, Y1 - 180 FORMAT (2F12.6,' moveto',2F12.6,' lineto') -C -C Bottom of loops. -C - 2 IF (LP .NE. LPL) GO TO 1 - 3 CONTINUE -C -C Paint the path and restore the saved graphics state (with -C no clip path). -C - WRITE (LUN,130,ERR=13) - WRITE (LUN,190,ERR=13) - 190 FORMAT ('grestore') - IF (NUMBR) THEN -C -C Nodes in the window are to be labeled with their indexes. -C Convert FSIZN from points to world coordinates, and -C output the commands to select a font and scale it. -C - T = FSIZN/SF - WRITE (LUN,200,ERR=13) T - 200 FORMAT ('/Helvetica findfont'/ - . F12.6,' scalefont setfont') -C -C Loop on visible nodes N0 that project to points (X0,Y0) in -C the window. -C - DO 4 N0 = 1,N - IF (EX*X(N0) + EY*Y(N0) + EZ*Z(N0) .LT. 0.) - . GO TO 4 - X0 = R11*X(N0) + R12*Y(N0) - Y0 = R21*X(N0) + R22*Y(N0) + R23*Z(N0) - IF (X0*X0 + Y0*Y0 .GT. WRS) GO TO 4 -C -C Move to (X0,Y0) and draw the label N0. The first char- -C acter will will have its lower left corner about one -C character width to the right of the nodal position. -C - WRITE (LUN,210,ERR=13) X0, Y0 - 210 FORMAT (2F12.6,' moveto') - WRITE (LUN,220,ERR=13) N0 - 220 FORMAT ('(',I3,') show') - 4 CONTINUE - ENDIF -C -C Convert FSIZT from points to world coordinates, and output -C the commands to select a font and scale it. -C - T = FSIZT/SF - WRITE (LUN,200,ERR=13) T -C -C Display TITLE centered above the plot: -C -C Y0 = WR + 3.0*T -C WRITE (LUN,230,ERR=13) TITLE, Y0 -C 230 FORMAT (A80/' stringwidth pop 2 div neg ',F12.6, -C . ' moveto') -C WRITE (LUN,240,ERR=13) TITLE -C 240 FORMAT (A80/' show') -C IF (ANNOT) THEN -C -C Display the window center and radius below the plot. -C -C X0 = -WR -C Y0 = -WR - 50.0/SF -C WRITE (LUN,210,ERR=13) X0, Y0 -C WRITE (LUN,250,ERR=13) ELAT, ELON -C Y0 = Y0 - 2.0*T -C WRITE (LUN,210,ERR=13) X0, Y0 -C WRITE (LUN,260,ERR=13) A -C 250 FORMAT ('(Window center: ELAT = ',F7.2, -C . ', ELON = ',F8.2,') show') -C 260 FORMAT ('(Angular extent: A = ',F5.2,') show') -C ENDIF -C -C Paint the path and output the showpage command and -C end-of-file indicator. -C - WRITE (LUN,270,ERR=13) - 270 FORMAT ('stroke'/ - . 'showpage'/ - . '%%EOF') -C -C HP's interpreters require a one-byte End-of-PostScript-Job -C indicator (to eliminate a timeout error message): -C ASCII 4. -C - WRITE (LUN,280,ERR=13) CHAR(4) - 280 FORMAT (A1) -C -C No error encountered. -C - IER = 0 - RETURN -C -C Invalid input parameter LUN, PLTSIZ, or N. -C - 11 IER = 1 - RETURN -C -C Invalid input parameter ELAT, ELON, or A. -C - 12 IER = 2 - RETURN -C -C Error writing to unit LUN. -C - 13 IER = 3 - RETURN - END - SUBROUTINE TRPRNT (N,X,Y,Z,IFLAG,LIST,LPTR,LEND,LOUT) - INTEGER N, IFLAG, LIST(*), LPTR(*), LEND(N), LOUT - REAL X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/25/98 -C -C This subroutine prints the triangulation adjacency lists -C created by Subroutine TRMESH and, optionally, the nodal -C coordinates (either latitude and longitude or Cartesian -C coordinates) on logical unit LOUT. The list of neighbors -C of a boundary node is followed by index 0. The numbers of -C boundary nodes, triangles, and arcs are also printed. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3 -C and N .LE. 9999. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes if IFLAG = 0, or -C (X and Y only) arrays of length N containing -C longitude and latitude, respectively, if -C IFLAG > 0, or unused dummy parameters if -C IFLAG < 0. -C -C IFLAG = Nodal coordinate option indicator: -C IFLAG = 0 if X, Y, and Z (assumed to contain -C Cartesian coordinates) are to be -C printed (to 6 decimal places). -C IFLAG > 0 if only X and Y (assumed to con- -C tain longitude and latitude) are -C to be printed (to 6 decimal -C places). -C IFLAG < 0 if only the adjacency lists are to -C be printed. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C LOUT = Logical unit for output. If LOUT is not in -C the range 0 to 99, output is written to -C logical unit 6. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C The adjacency lists and nodal coordinates (as specified -C by IFLAG) are written to unit LOUT. -C -C Modules required by TRPRNT: None -C -C*********************************************************** -C - INTEGER I, INC, K, LP, LPL, LUN, NA, NABOR(400), NB, - . ND, NL, NLMAX, NMAX, NODE, NN, NT - DATA NMAX/9999/, NLMAX/58/ -C -C Local parameters: -C -C I = NABOR index (1 to K) -C INC = Increment for NL associated with an adjacency list -C K = Counter and number of neighbors of NODE -C LP = LIST pointer of a neighbor of NODE -C LPL = Pointer to the last neighbor of NODE -C LUN = Logical unit for output (copy of LOUT) -C NA = Number of arcs in the triangulation -C NABOR = Array containing the adjacency list associated -C with NODE, with zero appended if NODE is a -C boundary node -C NB = Number of boundary nodes encountered -C ND = Index of a neighbor of NODE (or negative index) -C NL = Number of lines that have been printed on the -C current page -C NLMAX = Maximum number of print lines per page (except -C for the last page which may have two addi- -C tional lines) -C NMAX = Upper bound on N (allows 4-digit indexes) -C NODE = Index of a node and DO-loop index (1 to N) -C NN = Local copy of N -C NT = Number of triangles in the triangulation -C - NN = N - LUN = LOUT - IF (LUN .LT. 0 .OR. LUN .GT. 99) LUN = 6 -C -C Print a heading and test the range of N. -C - WRITE (LUN,100) NN - IF (NN .LT. 3 .OR. NN .GT. NMAX) THEN -C -C N is outside its valid range. -C - WRITE (LUN,110) - RETURN - ENDIF -C -C Initialize NL (the number of lines printed on the current -C page) and NB (the number of boundary nodes encountered). -C - NL = 6 - NB = 0 - IF (IFLAG .LT. 0) THEN -C -C Print LIST only. K is the number of neighbors of NODE -C that have been stored in NABOR. -C - WRITE (LUN,101) - DO 2 NODE = 1,NN - LPL = LEND(NODE) - LP = LPL - K = 0 -C - 1 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NABOR(K) = ND - IF (LP .NE. LPL) GO TO 1 - IF (ND .LE. 0) THEN -C -C NODE is a boundary node. Correct the sign of the last -C neighbor, add 0 to the end of the list, and increment -C NB. -C - NABOR(K) = -ND - K = K + 1 - NABOR(K) = 0 - NB = NB + 1 - ENDIF -C -C Increment NL and print the list of neighbors. -C - INC = (K-1)/14 + 2 - NL = NL + INC - IF (NL .GT. NLMAX) THEN - WRITE (LUN,108) - NL = INC - ENDIF - WRITE (LUN,104) NODE, (NABOR(I), I = 1,K) - IF (K .NE. 14) WRITE (LUN,107) - 2 CONTINUE - ELSEIF (IFLAG .GT. 0) THEN -C -C Print X (longitude), Y (latitude), and LIST. -C - WRITE (LUN,102) - DO 4 NODE = 1,NN - LPL = LEND(NODE) - LP = LPL - K = 0 -C - 3 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NABOR(K) = ND - IF (LP .NE. LPL) GO TO 3 - IF (ND .LE. 0) THEN -C -C NODE is a boundary node. -C - NABOR(K) = -ND - K = K + 1 - NABOR(K) = 0 - NB = NB + 1 - ENDIF -C -C Increment NL and print X, Y, and NABOR. -C - INC = (K-1)/8 + 2 - NL = NL + INC - IF (NL .GT. NLMAX) THEN - WRITE (LUN,108) - NL = INC - ENDIF - WRITE (LUN,105) NODE, (NABOR(I), I = 1,K) -! WRITE (LUN,105) NODE, X(NODE), Y(NODE), -! . (NABOR(I), I = 1,K) - IF (K .NE. 8) WRITE (LUN,107) - 4 CONTINUE - ELSE -C -C Print X, Y, Z, and LIST. -C - WRITE (LUN,103) - DO 6 NODE = 1,NN - LPL = LEND(NODE) - LP = LPL - K = 0 -C - 5 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NABOR(K) = ND - IF (LP .NE. LPL) GO TO 5 - IF (ND .LE. 0) THEN -C -C NODE is a boundary node. -C - NABOR(K) = -ND - K = K + 1 - NABOR(K) = 0 - NB = NB + 1 - ENDIF -C -C Increment NL and print X, Y, Z, and NABOR. -C - INC = (K-1)/5 + 2 - NL = NL + INC - IF (NL .GT. NLMAX) THEN - WRITE (LUN,108) - NL = INC - ENDIF - WRITE (LUN,106) NODE, X(NODE), Y(NODE), - . Z(NODE), (NABOR(I), I = 1,K) - IF (K .NE. 5) WRITE (LUN,107) - 6 CONTINUE - ENDIF -C -C Print NB, NA, and NT (boundary nodes, arcs, and -C triangles). -C - IF (NB .NE. 0) THEN - NA = 3*NN - NB - 3 - NT = 2*NN - NB - 2 - ELSE - NA = 3*NN - 6 - NT = 2*NN - 4 - ENDIF - WRITE (LUN,109) NB, NA, NT - RETURN -C -C Print formats: -C - 100 FORMAT (///15X,'STRIPACK Triangulation Data ', - . 'Structure, N = ',I5//) - 101 FORMAT (1X,'Node',31X,'Neighbors of Node'//) -! 102 FORMAT (1X,'Node',5X,'Longitude',6X,'Latitude', -! . 18X,'Neighbors of Node'//) - 102 FORMAT (1X,'Node', 8X,'Neighbors of Node'//) - 103 FORMAT (1X,'Node',5X,'X(Node)',8X,'Y(Node)',8X, - . 'Z(Node)',11X,'Neighbors of Node'//) - 104 FORMAT (1X,I4,4X,14I5/(1X,8X,14I5)) -! 105 FORMAT (1X,I4,2E15.6,4X,8I5/(1X,38X,8I5)) - 105 FORMAT (1X,I4,4X,8I5/(1X,38X,8I5)) - 106 FORMAT (1X,I4,3E15.6,4X,5I5/(1X,53X,5I5)) - 107 FORMAT (1X) - 108 FORMAT (///) - 109 FORMAT (/1X,'NB = ',I4,' Boundary Nodes',5X, - . 'NA = ',I5,' Arcs',5X,'NT = ',I5, - . ' Triangles') - 110 FORMAT (1X,10X,'*** N is outside its valid', - . ' range ***') - END - SUBROUTINE VRPLOT (LUN,PLTSIZ,ELAT,ELON,A,N,X,Y,Z, - . NT,LISTC,LPTR,LEND,XC,YC,ZC,TITLE, - . NUMBR, IER) - CHARACTER*(*) TITLE - INTEGER LUN, N, NT, LISTC(*), LPTR(*), LEND(N), IER - LOGICAL NUMBR - REAL PLTSIZ, ELAT, ELON, A, X(N), Y(N), Z(N), - . XC(NT), YC(NT), ZC(NT) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/16/98 -C -C This subroutine creates a level-2 Encapsulated Post- -C script (EPS) file containing a graphical depiction of a -C Voronoi diagram of a set of nodes on the unit sphere. -C The visible vertices are projected onto the plane that -C contains the origin and has normal defined by a user- -C specified eye-position. Projections of adjacent (visible) -C Voronoi vertices are connected by line segments. -C -C The parameters defining the Voronoi diagram may be com- -C puted by Subroutine CRLIST. -C -C -C On input: -C -C LUN = Logical unit number in the range 0 to 99. -C The unit should be opened with an appropriate -C file name before the call to this routine. -C -C PLTSIZ = Plot size in inches. A circular window in -C the projection plane is mapped to a circu- -C lar viewport with diameter equal to .88* -C PLTSIZ (leaving room for labels outside the -C viewport). The viewport is centered on the -C 8.5 by 11 inch page, and its boundary is -C drawn. 1.0 .LE. PLTSIZ .LE. 8.5. -C -C ELAT,ELON = Latitude and longitude (in degrees) of -C the center of projection E (the center -C of the plot). The projection plane is -C the plane that contains the origin and -C has E as unit normal. In a rotated -C coordinate system for which E is the -C north pole, the projection plane con- -C tains the equator, and only northern -C hemisphere points are visible (from the -C point at infinity in the direction E). -C These are projected orthogonally onto -C the projection plane (by zeroing the z- -C component in the rotated coordinate -C system). ELAT and ELON must be in the -C range -90 to 90 and -180 to 180, respec- -C tively. -C -C A = Angular distance in degrees from E to the boun- -C dary of a circular window against which the -C Voronoi diagram is clipped. The projected win- -C dow is a disk of radius r = Sin(A) centered at -C the origin, and only visible vertices whose -C projections are within distance r of the origin -C are included in the plot. Thus, if A = 90, the -C plot includes the entire hemisphere centered at -C E. 0 .LT. A .LE. 90. -C -C N = Number of nodes (Voronoi centers) and Voronoi -C regions. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes (unit vectors). -C -C NT = Number of Voronoi region vertices (triangles, -C including those in the extended triangulation -C if the number of boundary nodes NB is nonzero): -C NT = 2*N-4. -C -C LISTC = Array of length 3*NT containing triangle -C indexes (indexes to XC, YC, and ZC) stored -C in 1-1 correspondence with LIST/LPTR entries -C (or entries that would be stored in LIST for -C the extended triangulation): the index of -C triangle (N1,N2,N3) is stored in LISTC(K), -C LISTC(L), and LISTC(M), where LIST(K), -C LIST(L), and LIST(M) are the indexes of N2 -C as a neighbor of N1, N3 as a neighbor of N2, -C and N1 as a neighbor of N3. The Voronoi -C region associated with a node is defined by -C the CCW-ordered sequence of circumcenters in -C one-to-one correspondence with its adjacency -C list (in the extended triangulation). -C -C LPTR = Array of length 3*NT = 6*N-12 containing a -C set of pointers (LISTC indexes) in one-to-one -C correspondence with the elements of LISTC. -C LISTC(LPTR(I)) indexes the triangle which -C follows LISTC(I) in cyclical counterclockwise -C order (the first neighbor follows the last -C neighbor). -C -C LEND = Array of length N containing a set of -C pointers to triangle lists. LP = LEND(K) -C points to a triangle (indexed by LISTC(LP)) -C containing node K for K = 1 to N. -C -C XC,YC,ZC = Arrays of length NT containing the -C Cartesian coordinates of the triangle -C circumcenters (Voronoi vertices). -C XC(I)**2 + YC(I)**2 + ZC(I)**2 = 1. -C -C TITLE = Type CHARACTER variable or constant contain- -C ing a string to be centered above the plot. -C The string must be enclosed in parentheses; -C i.e., the first and last characters must be -C '(' and ')', respectively, but these are not -C displayed. TITLE may have at most 80 char- -C acters including the parentheses. -C -C NUMBR = Option indicator: If NUMBR = TRUE, the -C nodal indexes are plotted at the Voronoi -C region centers. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if LUN, PLTSIZ, N, or NT is outside -C its valid range. -C IER = 2 if ELAT, ELON, or A is outside its -C valid range. -C IER = 3 if an error was encountered in writing -C to unit LUN. -C -C Modules required by VRPLOT: None -C -C Intrinsic functions called by VRPLOT: ABS, ATAN, COS, -C NINT, REAL, SIN, -C SQRT -C -C*********************************************************** -C - INTEGER IPX1, IPX2, IPY1, IPY2, IR, KV1, KV2, LP, LPL, - . N0 - LOGICAL ANNOT, IN1, IN2 - REAL CF, CT, EX, EY, EZ, FSIZN, FSIZT, R11, R12, - . R21, R22, R23, SF, T, TX, TY, WR, WRS, X0, X1, - . X2, Y0, Y1, Y2, Z1, Z2 -C - DATA ANNOT/.TRUE./, FSIZN/10.0/, FSIZT/16.0/ -C -C Local parameters: -C -C ANNOT = Logical variable with value TRUE iff the plot -C is to be annotated with the values of ELAT, -C ELON, and A -C CF = Conversion factor for degrees to radians -C CT = Cos(ELAT) -C EX,EY,EZ = Cartesian coordinates of the eye-position E -C FSIZN = Font size in points for labeling nodes with -C their indexes if NUMBR = TRUE -C FSIZT = Font size in points for the title (and -C annotation if ANNOT = TRUE) -C IN1,IN2 = Logical variables with value TRUE iff the -C projections of vertices KV1 and KV2, respec- -C tively, are inside the window -C IPX1,IPY1 = X and y coordinates (in points) of the lower -C left corner of the bounding box or viewport -C box -C IPX2,IPY2 = X and y coordinates (in points) of the upper -C right corner of the bounding box or viewport -C box -C IR = Half the width (height) of the bounding box or -C viewport box in points -- viewport radius -C KV1,KV2 = Endpoint indexes of a Voronoi edge -C LP = LIST index (pointer) -C LPL = Pointer to the last neighbor of N0 -C N0 = Index of a node -C R11...R23 = Components of the first two rows of a rotation -C that maps E to the north pole (0,0,1) -C SF = Scale factor for mapping world coordinates -C (window coordinates in [-WR,WR] X [-WR,WR]) -C to viewport coordinates in [IPX1,IPX2] X -C [IPY1,IPY2] -C T = Temporary variable -C TX,TY = Translation vector for mapping world coordi- -C nates to viewport coordinates -C WR = Window radius r = Sin(A) -C WRS = WR**2 -C X0,Y0 = Projection plane coordinates of node N0 or -C label location -C X1,Y1,Z1 = Coordinates of vertex KV1 in the rotated -C coordinate system -C X2,Y2,Z2 = Coordinates of vertex KV2 in the rotated -C coordinate system or intersection of edge -C KV1-KV2 with the equator (in the rotated -C coordinate system) -C -C -C Test for invalid parameters. -C - IF (LUN .LT. 0 .OR. LUN .GT. 99 .OR. - . PLTSIZ .LT. 1.0 .OR. PLTSIZ .GT. 8.5 .OR. - . N .LT. 3 .OR. NT .NE. 2*N-4) - . GO TO 11 - IF (ABS(ELAT) .GT. 90.0 .OR. ABS(ELON) .GT. 180.0 - . .OR. A .GT. 90.0) GO TO 12 -C -C Compute a conversion factor CF for degrees to radians -C and compute the window radius WR. -C - CF = ATAN(1.0)/45.0 - WR = SIN(CF*A) - WRS = WR*WR -C -C Compute the lower left (IPX1,IPY1) and upper right -C (IPX2,IPY2) corner coordinates of the bounding box. -C The coordinates, specified in default user space units -C (points, at 72 points/inch with origin at the lower -C left corner of the page), are chosen to preserve the -C square aspect ratio, and to center the plot on the 8.5 -C by 11 inch page. The center of the page is (306,396), -C and IR = PLTSIZ/2 in points. -C - IR = NINT(36.0*PLTSIZ) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Output header comments. -C - WRITE (LUN,100,ERR=13) IPX1, IPY1, IPX2, IPY2 - 100 FORMAT ('%!PS-Adobe-3.0 EPSF-3.0'/ - . '%%BoundingBox:',4I4/ - . '%%Title: Voronoi diagram'/ - . '%%Creator: STRIPACK'/ - . '%%EndComments') -C -C Set (IPX1,IPY1) and (IPX2,IPY2) to the corner coordinates -C of a viewport box obtained by shrinking the bounding box -C by 12% in each dimension. -C - IR = NINT(0.88*REAL(IR)) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Set the line thickness to 2 points, and draw the -C viewport boundary. -C - T = 2.0 - WRITE (LUN,110,ERR=13) T - WRITE (LUN,120,ERR=13) IR - WRITE (LUN,130,ERR=13) - 110 FORMAT (F12.6,' setlinewidth') - 120 FORMAT ('306 396 ',I3,' 0 360 arc') - 130 FORMAT ('stroke') -C -C Set up an affine mapping from the window box [-WR,WR] X -C [-WR,WR] to the viewport box. -C - SF = REAL(IR)/WR - TX = IPX1 + SF*WR - TY = IPY1 + SF*WR - WRITE (LUN,140,ERR=13) TX, TY, SF, SF - 140 FORMAT (2F12.6,' translate'/ - . 2F12.6,' scale') -C -C The line thickness must be changed to reflect the new -C scaling which is applied to all subsequent output. -C Set it to 1.0 point. -C - T = 1.0/SF - WRITE (LUN,110,ERR=13) T -C -C Save the current graphics state, and set the clip path to -C the boundary of the window. -C - WRITE (LUN,150,ERR=13) - WRITE (LUN,160,ERR=13) WR - WRITE (LUN,170,ERR=13) - 150 FORMAT ('gsave') - 160 FORMAT ('0 0 ',F12.6,' 0 360 arc') - 170 FORMAT ('clip newpath') -C -C Compute the Cartesian coordinates of E and the components -C of a rotation R which maps E to the north pole (0,0,1). -C R is taken to be a rotation about the z-axis (into the -C yz-plane) followed by a rotation about the x-axis chosen -C so that the view-up direction is (0,0,1), or (-1,0,0) if -C E is the north or south pole. -C -C ( R11 R12 0 ) -C R = ( R21 R22 R23 ) -C ( EX EY EZ ) -C - T = CF*ELON - CT = COS(CF*ELAT) - EX = CT*COS(T) - EY = CT*SIN(T) - EZ = SIN(CF*ELAT) - IF (CT .NE. 0.0) THEN - R11 = -EY/CT - R12 = EX/CT - ELSE - R11 = 0.0 - R12 = 1.0 - ENDIF - R21 = -EZ*R12 - R22 = EZ*R11 - R23 = CT -C -C Loop on nodes (Voronoi centers) N0. -C LPL indexes the last neighbor of N0. -C - DO 3 N0 = 1,N - LPL = LEND(N0) -C -C Set KV2 to the first (and last) vertex index and compute -C its coordinates (X2,Y2,Z2) in the rotated coordinate -C system. -C - KV2 = LISTC(LPL) - X2 = R11*XC(KV2) + R12*YC(KV2) - Y2 = R21*XC(KV2) + R22*YC(KV2) + R23*ZC(KV2) - Z2 = EX*XC(KV2) + EY*YC(KV2) + EZ*ZC(KV2) -C -C IN2 = TRUE iff KV2 is in the window. -C - IN2 = Z2 .GE. 0. .AND. X2*X2 + Y2*Y2 .LE. WRS -C -C Loop on neighbors N1 of N0. For each triangulation edge -C N0-N1, KV1-KV2 is the corresponding Voronoi edge. -C - LP = LPL - 1 LP = LPTR(LP) - KV1 = KV2 - X1 = X2 - Y1 = Y2 - Z1 = Z2 - IN1 = IN2 - KV2 = LISTC(LP) -C -C Compute the new values of (X2,Y2,Z2) and IN2. -C - X2 = R11*XC(KV2) + R12*YC(KV2) - Y2 = R21*XC(KV2) + R22*YC(KV2) + R23*ZC(KV2) - Z2 = EX*XC(KV2) + EY*YC(KV2) + EZ*ZC(KV2) - IN2 = Z2 .GE. 0. .AND. X2*X2 + Y2*Y2 .LE. WRS -C -C Add edge KV1-KV2 to the path iff both endpoints are inside -C the window and KV2 > KV1, or KV1 is inside and KV2 is -C outside (so that the edge is drawn only once). -C - IF (.NOT. IN1 .OR. (IN2 .AND. KV2 .LE. KV1)) - . GO TO 2 - IF (Z2 .LT. 0.) THEN -C -C KV2 is a 'southern hemisphere' point. Move it to the -C intersection of edge KV1-KV2 with the equator so that -C the edge is clipped properly. Z2 is implicitly set -C to 0. -C - X2 = Z1*X2 - Z2*X1 - Y2 = Z1*Y2 - Z2*Y1 - T = SQRT(X2*X2+Y2*Y2) - X2 = X2/T - Y2 = Y2/T - ENDIF - WRITE (LUN,180,ERR=13) X1, Y1, X2, Y2 - 180 FORMAT (2F12.6,' moveto',2F12.6,' lineto') -C -C Bottom of loops. -C - 2 IF (LP .NE. LPL) GO TO 1 - 3 CONTINUE -C -C Paint the path and restore the saved graphics state (with -C no clip path). -C - WRITE (LUN,130,ERR=13) - WRITE (LUN,190,ERR=13) - 190 FORMAT ('grestore') - IF (NUMBR) THEN -C -C Nodes in the window are to be labeled with their indexes. -C Convert FSIZN from points to world coordinates, and -C output the commands to select a font and scale it. -C - T = FSIZN/SF - WRITE (LUN,200,ERR=13) T - 200 FORMAT ('/Helvetica findfont'/ - . F12.6,' scalefont setfont') -C -C Loop on visible nodes N0 that project to points (X0,Y0) in -C the window. -C - DO 4 N0 = 1,N - IF (EX*X(N0) + EY*Y(N0) + EZ*Z(N0) .LT. 0.) - . GO TO 4 - X0 = R11*X(N0) + R12*Y(N0) - Y0 = R21*X(N0) + R22*Y(N0) + R23*Z(N0) - IF (X0*X0 + Y0*Y0 .GT. WRS) GO TO 4 -C -C Move to (X0,Y0), and draw the label N0 with the origin -C of the first character at (X0,Y0). -C - WRITE (LUN,210,ERR=13) X0, Y0 -C WRITE (LUN,220,ERR=13) - WRITE (LUN,220,ERR=13) N0 - 210 FORMAT (2F12.6,' moveto') -C 220 FORMAT ('(','.',') show') - 220 FORMAT ('(',I3,') show') - 4 CONTINUE - ENDIF -C -C Convert FSIZT from points to world coordinates, and output -C the commands to select a font and scale it. -C - T = FSIZT/SF - WRITE (LUN,200,ERR=13) T -C -C Display TITLE centered above the plot: -C -C Y0 = WR + 3.0*T -C WRITE (LUN,230,ERR=13) TITLE, Y0 -C 230 FORMAT (A80/' stringwidth pop 2 div neg ',F12.6, -C . ' moveto') -C WRITE (LUN,240,ERR=13) TITLE -C 240 FORMAT (A80/' show') -C IF (ANNOT) THEN -C -C Display the window center and radius below the plot. -C -C X0 = -WR -C Y0 = -WR - 50.0/SF -C WRITE (LUN,210,ERR=13) X0, Y0 -C WRITE (LUN,250,ERR=13) ELAT, ELON -C Y0 = Y0 - 2.0*T -C WRITE (LUN,210,ERR=13) X0, Y0 -C WRITE (LUN,260,ERR=13) A -C 250 FORMAT ('(Window center: ELAT = ',F7.2, -C . ', ELON = ',F8.2,') show') -C 260 FORMAT ('(Angular extent: A = ',F5.2,') show') -C ENDIF -C -C Paint the path and output the showpage command and -C end-of-file indicator. -C - WRITE (LUN,270,ERR=13) - 270 FORMAT ('stroke'/ - . 'showpage'/ - . '%%EOF') -C -C HP's interpreters require a one-byte End-of-PostScript-Job -C indicator (to eliminate a timeout error message): -C ASCII 4. -C - WRITE (LUN,280,ERR=13) CHAR(4) - 280 FORMAT (A1) -C -C No error encountered. -C - IER = 0 - RETURN -C -C Invalid input parameter LUN, PLTSIZ, N, or NT. -C - 11 IER = 1 - RETURN -C -C Invalid input parameter ELAT, ELON, or A. -C - 12 IER = 2 - RETURN -C -C Error writing to unit LUN. -C - 13 IER = 3 - RETURN - END diff --git a/grid_gen/global_scvt/runit.csh b/grid_gen/global_scvt/runit.csh deleted file mode 100644 index 80f1df522..000000000 --- a/grid_gen/global_scvt/runit.csh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/csh -rm -f locs.dat* -cp centroids.162.dat locs.dat - -make clean -make - -setenv NAME x1 - -foreach RES (162 642 2562 10242 40962) -#foreach RES (162 642 2562 10242 40962 163842) - -setenv RES 162 -echo "&domains" > namelist.input -echo " np = "$RES"" >> namelist.input -echo " locs_as_xyz = .true." >> namelist.input -echo " n_scvt_iterations = 10000" >> namelist.input -cat convergence >> namelist.input -echo "/" >> namelist.input -grid_gen -grid_ref -mv -f grid.nc grid.$NAME.$RES.nc -mv -f locs.dat.out locs.$NAME.$RES.dat -mv -f locs.dat.out.refined locs.dat - -end diff --git a/grid_gen/global_scvt/src/Makefile b/grid_gen/global_scvt/src/Makefile deleted file mode 100644 index 9f44d4d9e..000000000 --- a/grid_gen/global_scvt/src/Makefile +++ /dev/null @@ -1,39 +0,0 @@ -.SUFFIXES: .F .f .o - -OBJS = STRIPACK.o module_grid_params.o module_grid_constants.o module_data_types.o module_sphere_utilities.o module_voronoi_utils.o module_grid_gen_utils.o module_scvt.o module_write_netcdf.o module_grid_meta.o grid_gen.o - -all: $(OBJS) - $(FC) $(PROMOTION) $(LDFLAGS) -o grid_gen $(OBJS) -L$(NETCDF)/lib -lnetcdff -lnetcdf - - -grid_gen.o: module_grid_params.o module_grid_constants.o module_data_types.o module_grid_gen_utils.o module_voronoi_utils.o STRIPACK.o module_scvt.o module_grid_meta.o - -module_grid_gen_utils.o: module_sphere_utilities.o - -module_scvt.o: module_data_types.o module_sphere_utilities.o module_voronoi_utils.o module_grid_constants.o module_grid_params.o - -module_write_netcdf.o: module_grid_params.o - -module_data_types.o: - -module_grid_meta.o: module_data_types.o module_grid_constants.o module_sphere_utilities.o module_write_netcdf.o - -module_sphere_utilities.o: module_data_types.o - -module_grid_constants.o: - -module_grid_params.o: - -module_voronoi_utils.o: module_grid_constants.o STRIPACK.o - - -.F.o: - cpp -C -P -traditional $(CPPFLAGS) $< > $*.f90 - $(FC) $(FFLAGS) $(PROMOTION) -c $*.f90 -I$(NETCDF)/include - rm -f $*.f90 - -.f.o: - $(FC) $(F77FLAGS) $(PROMOTION) -c $< - -clean: - rm -f *.o *.mod grid_gen diff --git a/grid_gen/global_scvt/src/STRIPACK.f b/grid_gen/global_scvt/src/STRIPACK.f deleted file mode 100644 index 968c818a5..000000000 --- a/grid_gen/global_scvt/src/STRIPACK.f +++ /dev/null @@ -1,6706 +0,0 @@ - MODULE STRIPACK - - CONTAINS - - SUBROUTINE ADDNOD (NST,K,X,Y,Z, LIST,LPTR,LEND, - . LNEW, IER) - INTEGER NST, K, LIST(*), LPTR(*), LEND(K), LNEW, IER - REAL X(K), Y(K), Z(K) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/08/99 -C -C This subroutine adds node K to a triangulation of the -C convex hull of nodes 1,...,K-1, producing a triangulation -C of the convex hull of nodes 1,...,K. -C -C The algorithm consists of the following steps: node K -C is located relative to the triangulation (TRFIND), its -C index is added to the data structure (INTADD or BDYADD), -C and a sequence of swaps (SWPTST and SWAP) are applied to -C the arcs opposite K so that all arcs incident on node K -C and opposite node K are locally optimal (satisfy the cir- -C cumcircle test). Thus, if a Delaunay triangulation is -C input, a Delaunay triangulation will result. -C -C -C On input: -C -C NST = Index of a node at which TRFIND begins its -C search. Search time depends on the proximity -C of this node to K. If NST < 1, the search is -C begun at node K-1. -C -C K = Nodal index (index for X, Y, Z, and LEND) of the -C new node to be added. K .GE. 4. -C -C X,Y,Z = Arrays of length .GE. K containing Car- -C tesian coordinates of the nodes. -C (X(I),Y(I),Z(I)) defines node I for -C I = 1,...,K. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Data structure associated with -C the triangulation of nodes 1 -C to K-1. The array lengths are -C assumed to be large enough to -C add node K. Refer to Subrou- -C tine TRMESH. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node K as the -C last entry unless IER .NE. 0 -C and IER .NE. -3, in which case -C the arrays are not altered. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = -1 if K is outside its valid range -C on input. -C IER = -2 if all nodes (including K) are col- -C linear (lie on a common geodesic). -C IER = L if nodes L and K coincide for some -C L < K. -C -C Modules required by ADDNOD: BDYADD, COVSPH, INSERT, -C INTADD, JRAND, LSTPTR, -C STORE, SWAP, SWPTST, -C TRFIND -C -C Intrinsic function called by ADDNOD: ABS -C -C*********************************************************** -C - INTEGER I1, I2, I3, IO1, IO2, IN1, IST, KK, KM1, L, - . LP, LPF, LPO1, LPO1S - REAL B1, B2, B3, P(3) -C -C Local parameters: -C -C B1,B2,B3 = Unnormalized barycentric coordinates returned -C by TRFIND. -C I1,I2,I3 = Vertex indexes of a triangle containing K -C IN1 = Vertex opposite K: first neighbor of IO2 -C that precedes IO1. IN1,IO1,IO2 are in -C counterclockwise order. -C IO1,IO2 = Adjacent neighbors of K defining an arc to -C be tested for a swap -C IST = Index of node at which TRFIND begins its search -C KK = Local copy of K -C KM1 = K-1 -C L = Vertex index (I1, I2, or I3) returned in IER -C if node K coincides with a vertex -C LP = LIST pointer -C LPF = LIST pointer to the first neighbor of K -C LPO1 = LIST pointer to IO1 -C LPO1S = Saved value of LPO1 -C P = Cartesian coordinates of node K -C - KK = K - IF (KK .LT. 4) GO TO 3 -C -C Initialization: -C - KM1 = KK - 1 - IST = NST - IF (IST .LT. 1) IST = KM1 - P(1) = X(KK) - P(2) = Y(KK) - P(3) = Z(KK) -C -C Find a triangle (I1,I2,I3) containing K or the rightmost -C (I1) and leftmost (I2) visible boundary nodes as viewed -C from node K. -C - CALL TRFIND (IST,P,KM1,X,Y,Z,LIST,LPTR,LEND, B1,B2,B3, - . I1,I2,I3) -C -C Test for collinear or duplicate nodes. -C - IF (I1 .EQ. 0) GO TO 4 - IF (I3 .NE. 0) THEN - L = I1 - IF (P(1) .EQ. X(L) .AND. P(2) .EQ. Y(L) .AND. - . P(3) .EQ. Z(L)) GO TO 5 - L = I2 - IF (P(1) .EQ. X(L) .AND. P(2) .EQ. Y(L) .AND. - . P(3) .EQ. Z(L)) GO TO 5 - L = I3 - IF (P(1) .EQ. X(L) .AND. P(2) .EQ. Y(L) .AND. - . P(3) .EQ. Z(L)) GO TO 5 - CALL INTADD (KK,I1,I2,I3, LIST,LPTR,LEND,LNEW ) - ELSE - IF (I1 .NE. I2) THEN - CALL BDYADD (KK,I1,I2, LIST,LPTR,LEND,LNEW ) - ELSE - CALL COVSPH (KK,I1, LIST,LPTR,LEND,LNEW ) - ENDIF - ENDIF - IER = 0 -C -C Initialize variables for optimization of the -C triangulation. -C - LP = LEND(KK) - LPF = LPTR(LP) - IO2 = LIST(LPF) - LPO1 = LPTR(LPF) - IO1 = ABS(LIST(LPO1)) -C -C Begin loop: find the node opposite K. -C - 1 LP = LSTPTR(LEND(IO1),IO2,LIST,LPTR) - IF (LIST(LP) .LT. 0) GO TO 2 - LP = LPTR(LP) - IN1 = ABS(LIST(LP)) -C -C Swap test: if a swap occurs, two new arcs are -C opposite K and must be tested. -C - LPO1S = LPO1 - IF ( .NOT. SWPTST(IN1,KK,IO1,IO2,X,Y,Z) ) GO TO 2 - CALL SWAP (IN1,KK,IO1,IO2, LIST,LPTR,LEND, LPO1) - IF (LPO1 .EQ. 0) THEN -C -C A swap is not possible because KK and IN1 are already -C adjacent. This error in SWPTST only occurs in the -C neutral case and when there are nearly duplicate -C nodes. -C - LPO1 = LPO1S - GO TO 2 - ENDIF - IO1 = IN1 - GO TO 1 -C -C No swap occurred. Test for termination and reset -C IO2 and IO1. -C - 2 IF (LPO1 .EQ. LPF .OR. LIST(LPO1) .LT. 0) RETURN - IO2 = IO1 - LPO1 = LPTR(LPO1) - IO1 = ABS(LIST(LPO1)) - GO TO 1 -C -C KK < 4. -C - 3 IER = -1 - RETURN -C -C All nodes are collinear. -C - 4 IER = -2 - RETURN -C -C Nodes L and K coincide. -C - 5 IER = L - RETURN - END SUBROUTINE - REAL FUNCTION AREAS (V1,V2,V3) - REAL V1(3), V2(3), V3(3) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 09/18/90 -C -C This function returns the area of a spherical triangle -C on the unit sphere. -C -C -C On input: -C -C V1,V2,V3 = Arrays of length 3 containing the Carte- -C sian coordinates of unit vectors (the -C three triangle vertices in any order). -C These vectors, if nonzero, are implicitly -C scaled to have length 1. -C -C Input parameters are not altered by this function. -C -C On output: -C -C AREAS = Area of the spherical triangle defined by -C V1, V2, and V3 in the range 0 to 2*PI (the -C area of a hemisphere). AREAS = 0 (or 2*PI) -C if and only if V1, V2, and V3 lie in (or -C close to) a plane containing the origin. -C -C Modules required by AREAS: None -C -C Intrinsic functions called by AREAS: ACOS, DBLE, REAL, -C SQRT -C -C*********************************************************** -C - DOUBLE PRECISION A1, A2, A3, CA1, CA2, CA3, DV1(3), - . DV2(3), DV3(3), S12, S23, S31, - . U12(3), U23(3), U31(3) - INTEGER I -C -C Local parameters: -C -C A1,A2,A3 = Interior angles of the spherical triangle -C CA1,CA2,CA3 = cos(A1), cos(A2), and cos(A3), respectively -C DV1,DV2,DV3 = Double Precision copies of V1, V2, and V3 -C I = DO-loop index and index for Uij -C S12,S23,S31 = Sum of squared components of U12, U23, U31 -C U12,U23,U31 = Unit normal vectors to the planes defined by -C pairs of triangle vertices -C - DO 1 I = 1,3 - DV1(I) = DBLE(V1(I)) - DV2(I) = DBLE(V2(I)) - DV3(I) = DBLE(V3(I)) - 1 CONTINUE -C -C Compute cross products Uij = Vi X Vj. -C - U12(1) = DV1(2)*DV2(3) - DV1(3)*DV2(2) - U12(2) = DV1(3)*DV2(1) - DV1(1)*DV2(3) - U12(3) = DV1(1)*DV2(2) - DV1(2)*DV2(1) -C - U23(1) = DV2(2)*DV3(3) - DV2(3)*DV3(2) - U23(2) = DV2(3)*DV3(1) - DV2(1)*DV3(3) - U23(3) = DV2(1)*DV3(2) - DV2(2)*DV3(1) -C - U31(1) = DV3(2)*DV1(3) - DV3(3)*DV1(2) - U31(2) = DV3(3)*DV1(1) - DV3(1)*DV1(3) - U31(3) = DV3(1)*DV1(2) - DV3(2)*DV1(1) -C -C Normalize Uij to unit vectors. -C - S12 = 0.D0 - S23 = 0.D0 - S31 = 0.D0 - DO 2 I = 1,3 - S12 = S12 + U12(I)*U12(I) - S23 = S23 + U23(I)*U23(I) - S31 = S31 + U31(I)*U31(I) - 2 CONTINUE -C -C Test for a degenerate triangle associated with collinear -C vertices. -C - IF (S12 .EQ. 0.D0 .OR. S23 .EQ. 0.D0 .OR. - . S31 .EQ. 0.D0) THEN - AREAS = 0. - RETURN - ENDIF - S12 = SQRT(S12) - S23 = SQRT(S23) - S31 = SQRT(S31) - DO 3 I = 1,3 - U12(I) = U12(I)/S12 - U23(I) = U23(I)/S23 - U31(I) = U31(I)/S31 - 3 CONTINUE -C -C Compute interior angles Ai as the dihedral angles between -C planes: -C CA1 = cos(A1) = - -C CA2 = cos(A2) = - -C CA3 = cos(A3) = - -C - CA1 = -U12(1)*U31(1)-U12(2)*U31(2)-U12(3)*U31(3) - CA2 = -U23(1)*U12(1)-U23(2)*U12(2)-U23(3)*U12(3) - CA3 = -U31(1)*U23(1)-U31(2)*U23(2)-U31(3)*U23(3) - IF (CA1 .LT. -1.D0) CA1 = -1.D0 - IF (CA1 .GT. 1.D0) CA1 = 1.D0 - IF (CA2 .LT. -1.D0) CA2 = -1.D0 - IF (CA2 .GT. 1.D0) CA2 = 1.D0 - IF (CA3 .LT. -1.D0) CA3 = -1.D0 - IF (CA3 .GT. 1.D0) CA3 = 1.D0 - A1 = ACOS(CA1) - A2 = ACOS(CA2) - A3 = ACOS(CA3) -C -C Compute AREAS = A1 + A2 + A3 - PI. -C - AREAS = REAL(A1 + A2 + A3 - ACOS(-1.D0)) - IF (AREAS .LT. 0.) AREAS = 0. - RETURN - END FUNCTION - SUBROUTINE BDYADD (KK,I1,I2, LIST,LPTR,LEND,LNEW ) - INTEGER KK, I1, I2, LIST(*), LPTR(*), LEND(*), LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/11/96 -C -C This subroutine adds a boundary node to a triangulation -C of a set of KK-1 points on the unit sphere. The data -C structure is updated with the insertion of node KK, but no -C optimization is performed. -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C KK = Index of a node to be connected to the sequence -C of all visible boundary nodes. KK .GE. 1 and -C KK must not be equal to I1 or I2. -C -C I1 = First (rightmost as viewed from KK) boundary -C node in the triangulation that is visible from -C node KK (the line segment KK-I1 intersects no -C arcs. -C -C I2 = Last (leftmost) boundary node that is visible -C from node KK. I1 and I2 may be determined by -C Subroutine TRFIND. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Triangulation data structure -C created by Subroutine TRMESH. -C Nodes I1 and I2 must be in- -C cluded in the triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node KK. Node -C KK is connected to I1, I2, and -C all boundary nodes in between. -C -C Module required by BDYADD: INSERT -C -C*********************************************************** -C - INTEGER K, LP, LSAV, N1, N2, NEXT, NSAV -C -C Local parameters: -C -C K = Local copy of KK -C LP = LIST pointer -C LSAV = LIST pointer -C N1,N2 = Local copies of I1 and I2, respectively -C NEXT = Boundary node visible from K -C NSAV = Boundary node visible from K -C - K = KK - N1 = I1 - N2 = I2 -C -C Add K as the last neighbor of N1. -C - LP = LEND(N1) - LSAV = LPTR(LP) - LPTR(LP) = LNEW - LIST(LNEW) = -K - LPTR(LNEW) = LSAV - LEND(N1) = LNEW - LNEW = LNEW + 1 - NEXT = -LIST(LP) - LIST(LP) = NEXT - NSAV = NEXT -C -C Loop on the remaining boundary nodes between N1 and N2, -C adding K as the first neighbor. -C - 1 LP = LEND(NEXT) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - IF (NEXT .EQ. N2) GO TO 2 - NEXT = -LIST(LP) - LIST(LP) = NEXT - GO TO 1 -C -C Add the boundary nodes between N1 and N2 as neighbors -C of node K. -C - 2 LSAV = LNEW - LIST(LNEW) = N1 - LPTR(LNEW) = LNEW + 1 - LNEW = LNEW + 1 - NEXT = NSAV -C - 3 IF (NEXT .EQ. N2) GO TO 4 - LIST(LNEW) = NEXT - LPTR(LNEW) = LNEW + 1 - LNEW = LNEW + 1 - LP = LEND(NEXT) - NEXT = LIST(LP) - GO TO 3 -C - 4 LIST(LNEW) = -N2 - LPTR(LNEW) = LSAV - LEND(K) = LNEW - LNEW = LNEW + 1 - RETURN - END SUBROUTINE - SUBROUTINE BNODES (N,LIST,LPTR,LEND, NODES,NB,NA,NT) - INTEGER N, LIST(*), LPTR(*), LEND(N), NODES(*), NB, - . NA, NT -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 06/26/96 -C -C Given a triangulation of N nodes on the unit sphere -C created by Subroutine TRMESH, this subroutine returns an -C array containing the indexes (if any) of the counterclock- -C wise-ordered sequence of boundary nodes -- the nodes on -C the boundary of the convex hull of the set of nodes. (The -C boundary is empty if the nodes do not lie in a single -C hemisphere.) The numbers of boundary nodes, arcs, and -C triangles are also returned. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C The above parameters are not altered by this routine. -C -C NODES = Integer array of length at least NB -C (NB .LE. N). -C -C On output: -C -C NODES = Ordered sequence of boundary node indexes -C in the range 1 to N (in the first NB loca- -C tions). -C -C NB = Number of boundary nodes. -C -C NA,NT = Number of arcs and triangles, respectively, -C in the triangulation. -C -C Modules required by BNODES: None -C -C*********************************************************** -C - INTEGER K, LP, N0, NN, NST -C -C Local parameters: -C -C K = NODES index -C LP = LIST pointer -C N0 = Boundary node to be added to NODES -C NN = Local copy of N -C NST = First element of nodes (arbitrarily chosen to be -C the one with smallest index) -C - NN = N -C -C Search for a boundary node. -C - DO 1 NST = 1,NN - LP = LEND(NST) - IF (LIST(LP) .LT. 0) GO TO 2 - 1 CONTINUE -C -C The triangulation contains no boundary nodes. -C - NB = 0 - NA = 3*(NN-2) - NT = 2*(NN-2) - RETURN -C -C NST is the first boundary node encountered. Initialize -C for traversal of the boundary. -C - 2 NODES(1) = NST - K = 1 - N0 = NST -C -C Traverse the boundary in counterclockwise order. -C - 3 LP = LEND(N0) - LP = LPTR(LP) - N0 = LIST(LP) - IF (N0 .EQ. NST) GO TO 4 - K = K + 1 - NODES(K) = N0 - GO TO 3 -C -C Store the counts. -C - 4 NB = K - NT = 2*N - NB - 2 - NA = NT + N - 1 - RETURN - END SUBROUTINE - SUBROUTINE CIRCUM (V1,V2,V3, C,IER) - INTEGER IER - REAL V1(3), V2(3), V3(3), C(3) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 06/29/95 -C -C This subroutine returns the circumcenter of a spherical -C triangle on the unit sphere: the point on the sphere sur- -C face that is equally distant from the three triangle -C vertices and lies in the same hemisphere, where distance -C is taken to be arc-length on the sphere surface. -C -C -C On input: -C -C V1,V2,V3 = Arrays of length 3 containing the Carte- -C sian coordinates of the three triangle -C vertices (unit vectors) in CCW order. -C -C The above parameters are not altered by this routine. -C -C C = Array of length 3. -C -C On output: -C -C C = Cartesian coordinates of the circumcenter unless -C IER > 0, in which case C is not defined. C = -C (V2-V1) X (V3-V1) normalized to a unit vector. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if V1, V2, and V3 lie on a common -C line: (V2-V1) X (V3-V1) = 0. -C (The vertices are not tested for validity.) -C -C Modules required by CIRCUM: None -C -C Intrinsic function called by CIRCUM: SQRT -C -C*********************************************************** -C - INTEGER I - REAL CNORM, CU(3), E1(3), E2(3) -C -C Local parameters: -C -C CNORM = Norm of CU: used to compute C -C CU = Scalar multiple of C: E1 X E2 -C E1,E2 = Edges of the underlying planar triangle: -C V2-V1 and V3-V1, respectively -C I = DO-loop index -C - DO 1 I = 1,3 - E1(I) = V2(I) - V1(I) - E2(I) = V3(I) - V1(I) - 1 CONTINUE -C -C Compute CU = E1 X E2 and CNORM**2. -C - CU(1) = E1(2)*E2(3) - E1(3)*E2(2) - CU(2) = E1(3)*E2(1) - E1(1)*E2(3) - CU(3) = E1(1)*E2(2) - E1(2)*E2(1) - CNORM = CU(1)*CU(1) + CU(2)*CU(2) + CU(3)*CU(3) -C -C The vertices lie on a common line if and only if CU is -C the zero vector. -C - IF (CNORM .NE. 0.) THEN -C -C No error: compute C. -C - CNORM = SQRT(CNORM) - DO 2 I = 1,3 - C(I) = CU(I)/CNORM - 2 CONTINUE - IER = 0 - ELSE -C -C CU = 0. -C - IER = 1 - ENDIF - RETURN - END SUBROUTINE - SUBROUTINE COVSPH (KK,N0, LIST,LPTR,LEND,LNEW ) - INTEGER KK, N0, LIST(*), LPTR(*), LEND(*), LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine connects an exterior node KK to all -C boundary nodes of a triangulation of KK-1 points on the -C unit sphere, producing a triangulation that covers the -C sphere. The data structure is updated with the addition -C of node KK, but no optimization is performed. All boun- -C dary nodes must be visible from node KK. -C -C -C On input: -C -C KK = Index of the node to be connected to the set of -C all boundary nodes. KK .GE. 4. -C -C N0 = Index of a boundary node (in the range 1 to -C KK-1). N0 may be determined by Subroutine -C TRFIND. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Triangulation data structure -C created by Subroutine TRMESH. -C Node N0 must be included in -C the triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node KK as the -C last entry. The updated -C triangulation contains no -C boundary nodes. -C -C Module required by COVSPH: INSERT -C -C*********************************************************** -C - INTEGER K, LP, LSAV, NEXT, NST -C -C Local parameters: -C -C K = Local copy of KK -C LP = LIST pointer -C LSAV = LIST pointer -C NEXT = Boundary node visible from K -C NST = Local copy of N0 -C - K = KK - NST = N0 -C -C Traverse the boundary in clockwise order, inserting K as -C the first neighbor of each boundary node, and converting -C the boundary node to an interior node. -C - NEXT = NST - 1 LP = LEND(NEXT) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - NEXT = -LIST(LP) - LIST(LP) = NEXT - IF (NEXT .NE. NST) GO TO 1 -C -C Traverse the boundary again, adding each node to K's -C adjacency list. -C - LSAV = LNEW - 2 LP = LEND(NEXT) - LIST(LNEW) = NEXT - LPTR(LNEW) = LNEW + 1 - LNEW = LNEW + 1 - NEXT = LIST(LP) - IF (NEXT .NE. NST) GO TO 2 -C - LPTR(LNEW-1) = LSAV - LEND(K) = LNEW - 1 - RETURN - END SUBROUTINE - SUBROUTINE CRLIST (N,NCOL,X,Y,Z,LIST,LEND, LPTR,LNEW, - . LTRI, LISTC,NB,XC,YC,ZC,RC,IER) - INTEGER N, NCOL, LIST(*), LEND(N), LPTR(*), LNEW, - . LTRI(6,NCOL), LISTC(*), NB, IER - REAL X(N), Y(N), Z(N), XC(*), YC(*), ZC(*), RC(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/05/98 -C -C Given a Delaunay triangulation of nodes on the surface -C of the unit sphere, this subroutine returns the set of -C triangle circumcenters corresponding to Voronoi vertices, -C along with the circumradii and a list of triangle indexes -C LISTC stored in one-to-one correspondence with LIST/LPTR -C entries. -C -C A triangle circumcenter is the point (unit vector) lying -C at the same angular distance from the three vertices and -C contained in the same hemisphere as the vertices. (Note -C that the negative of a circumcenter is also equidistant -C from the vertices.) If the triangulation covers the sur- -C face, the Voronoi vertices are the circumcenters of the -C triangles in the Delaunay triangulation. LPTR, LEND, and -C LNEW are not altered in this case. -C -C On the other hand, if the nodes are contained in a sin- -C gle hemisphere, the triangulation is implicitly extended -C to the entire surface by adding pseudo-arcs (of length -C greater than 180 degrees) between boundary nodes forming -C pseudo-triangles whose 'circumcenters' are included in the -C list. This extension to the triangulation actually con- -C sists of a triangulation of the set of boundary nodes in -C which the swap test is reversed (a non-empty circumcircle -C test). The negative circumcenters are stored as the -C pseudo-triangle 'circumcenters'. LISTC, LPTR, LEND, and -C LNEW contain a data structure corresponding to the ex- -C tended triangulation (Voronoi diagram), but LIST is not -C altered in this case. Thus, if it is necessary to retain -C the original (unextended) triangulation data structure, -C copies of LPTR and LNEW must be saved before calling this -C routine. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C Note that, if N = 3, there are only two Voronoi -C vertices separated by 180 degrees, and the -C Voronoi regions are not well defined. -C -C NCOL = Number of columns reserved for LTRI. This -C must be at least NB-2, where NB is the number -C of boundary nodes. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes (unit vectors). -C -C LIST = Integer array containing the set of adjacency -C lists. Refer to Subroutine TRMESH. -C -C LEND = Set of pointers to ends of adjacency lists. -C Refer to Subroutine TRMESH. -C -C The above parameters are not altered by this routine. -C -C LPTR = Array of pointers associated with LIST. Re- -C fer to Subroutine TRMESH. -C -C LNEW = Pointer to the first empty location in LIST -C and LPTR (list length plus one). -C -C LTRI = Integer work space array dimensioned 6 by -C NCOL, or unused dummy parameter if NB = 0. -C -C LISTC = Integer array of length at least 3*NT, where -C NT = 2*N-4 is the number of triangles in the -C triangulation (after extending it to cover -C the entire surface if necessary). -C -C XC,YC,ZC,RC = Arrays of length NT = 2*N-4. -C -C On output: -C -C LPTR = Array of pointers associated with LISTC: -C updated for the addition of pseudo-triangles -C if the original triangulation contains -C boundary nodes (NB > 0). -C -C LNEW = Pointer to the first empty location in LISTC -C and LPTR (list length plus one). LNEW is not -C altered if NB = 0. -C -C LTRI = Triangle list whose first NB-2 columns con- -C tain the indexes of a clockwise-ordered -C sequence of vertices (first three rows) -C followed by the LTRI column indexes of the -C triangles opposite the vertices (or 0 -C denoting the exterior region) in the last -C three rows. This array is not generally of -C any use. -C -C LISTC = Array containing triangle indexes (indexes -C to XC, YC, ZC, and RC) stored in 1-1 corres- -C pondence with LIST/LPTR entries (or entries -C that would be stored in LIST for the -C extended triangulation): the index of tri- -C angle (N1,N2,N3) is stored in LISTC(K), -C LISTC(L), and LISTC(M), where LIST(K), -C LIST(L), and LIST(M) are the indexes of N2 -C as a neighbor of N1, N3 as a neighbor of N2, -C and N1 as a neighbor of N3. The Voronoi -C region associated with a node is defined by -C the CCW-ordered sequence of circumcenters in -C one-to-one correspondence with its adjacency -C list (in the extended triangulation). -C -C NB = Number of boundary nodes unless IER = 1. -C -C XC,YC,ZC = Arrays containing the Cartesian coordi- -C nates of the triangle circumcenters -C (Voronoi vertices). XC(I)**2 + YC(I)**2 -C + ZC(I)**2 = 1. The first NB-2 entries -C correspond to pseudo-triangles if NB > 0. -C -C RC = Array containing circumradii (the arc lengths -C or angles between the circumcenters and associ- -C ated triangle vertices) in 1-1 correspondence -C with circumcenters. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if N < 3. -C IER = 2 if NCOL < NB-2. -C IER = 3 if a triangle is degenerate (has ver- -C tices lying on a common geodesic). -C -C Modules required by CRLIST: CIRCUM, LSTPTR, SWPTST -C -C Intrinsic functions called by CRLIST: ABS, ACOS -C -C*********************************************************** -C - INTEGER I1, I2, I3, I4, IERR, KT, KT1, KT2, KT11, - . KT12, KT21, KT22, LP, LPL, LPN, N0, N1, N2, - . N3, N4, NM2, NN, NT - LOGICAL SWP - REAL C(3), T, V1(3), V2(3), V3(3) -C -C Local parameters: -C -C C = Circumcenter returned by Subroutine CIRCUM -C I1,I2,I3 = Permutation of (1,2,3): LTRI row indexes -C I4 = LTRI row index in the range 1 to 3 -C IERR = Error flag for calls to CIRCUM -C KT = Triangle index -C KT1,KT2 = Indexes of a pair of adjacent pseudo-triangles -C KT11,KT12 = Indexes of the pseudo-triangles opposite N1 -C and N2 as vertices of KT1 -C KT21,KT22 = Indexes of the pseudo-triangles opposite N1 -C and N2 as vertices of KT2 -C LP,LPN = LIST pointers -C LPL = LIST pointer of the last neighbor of N1 -C N0 = Index of the first boundary node (initial -C value of N1) in the loop on boundary nodes -C used to store the pseudo-triangle indexes -C in LISTC -C N1,N2,N3 = Nodal indexes defining a triangle (CCW order) -C or pseudo-triangle (clockwise order) -C N4 = Index of the node opposite N2 -> N1 -C NM2 = N-2 -C NN = Local copy of N -C NT = Number of pseudo-triangles: NB-2 -C SWP = Logical variable set to TRUE in each optimiza- -C tion loop (loop on pseudo-arcs) iff a swap -C is performed -C V1,V2,V3 = Vertices of triangle KT = (N1,N2,N3) sent to -C Subroutine CIRCUM -C - NN = N - NB = 0 - NT = 0 - IF (NN .LT. 3) GO TO 21 -C -C Search for a boundary node N1. -C - DO 1 N1 = 1,NN - LP = LEND(N1) - IF (LIST(LP) .LT. 0) GO TO 2 - 1 CONTINUE -C -C The triangulation already covers the sphere. -C - GO TO 9 -C -C There are NB .GE. 3 boundary nodes. Add NB-2 pseudo- -C triangles (N1,N2,N3) by connecting N3 to the NB-3 -C boundary nodes to which it is not already adjacent. -C -C Set N3 and N2 to the first and last neighbors, -C respectively, of N1. -C - 2 N2 = -LIST(LP) - LP = LPTR(LP) - N3 = LIST(LP) -C -C Loop on boundary arcs N1 -> N2 in clockwise order, -C storing triangles (N1,N2,N3) in column NT of LTRI -C along with the indexes of the triangles opposite -C the vertices. -C - 3 NT = NT + 1 - IF (NT .LE. NCOL) THEN - LTRI(1,NT) = N1 - LTRI(2,NT) = N2 - LTRI(3,NT) = N3 - LTRI(4,NT) = NT + 1 - LTRI(5,NT) = NT - 1 - LTRI(6,NT) = 0 - ENDIF - N1 = N2 - LP = LEND(N1) - N2 = -LIST(LP) - IF (N2 .NE. N3) GO TO 3 -C - NB = NT + 2 - IF (NCOL .LT. NT) GO TO 22 - LTRI(4,NT) = 0 - IF (NT .EQ. 1) GO TO 7 -C -C Optimize the exterior triangulation (set of pseudo- -C triangles) by applying swaps to the pseudo-arcs N1-N2 -C (pairs of adjacent pseudo-triangles KT1 and KT2 > KT1). -C The loop on pseudo-arcs is repeated until no swaps are -C performed. -C - 4 SWP = .FALSE. - DO 6 KT1 = 1,NT-1 - DO 5 I3 = 1,3 - KT2 = LTRI(I3+3,KT1) - IF (KT2 .LE. KT1) GO TO 5 -C -C The LTRI row indexes (I1,I2,I3) of triangle KT1 = -C (N1,N2,N3) are a cyclical permutation of (1,2,3). -C - IF (I3 .EQ. 1) THEN - I1 = 2 - I2 = 3 - ELSEIF (I3 .EQ. 2) THEN - I1 = 3 - I2 = 1 - ELSE - I1 = 1 - I2 = 2 - ENDIF - N1 = LTRI(I1,KT1) - N2 = LTRI(I2,KT1) - N3 = LTRI(I3,KT1) -C -C KT2 = (N2,N1,N4) for N4 = LTRI(I,KT2), where -C LTRI(I+3,KT2) = KT1. -C - IF (LTRI(4,KT2) .EQ. KT1) THEN - I4 = 1 - ELSEIF (LTRI(5,KT2) .EQ. KT1) THEN - I4 = 2 - ELSE - I4 = 3 - ENDIF - N4 = LTRI(I4,KT2) -C -C The empty circumcircle test is reversed for the pseudo- -C triangles. The reversal is implicit in the clockwise -C ordering of the vertices. -C - IF ( .NOT. SWPTST(N1,N2,N3,N4,X,Y,Z) ) GO TO 5 -C -C Swap arc N1-N2 for N3-N4. KTij is the triangle opposite -C Nj as a vertex of KTi. -C - SWP = .TRUE. - KT11 = LTRI(I1+3,KT1) - KT12 = LTRI(I2+3,KT1) - IF (I4 .EQ. 1) THEN - I2 = 2 - I1 = 3 - ELSEIF (I4 .EQ. 2) THEN - I2 = 3 - I1 = 1 - ELSE - I2 = 1 - I1 = 2 - ENDIF - KT21 = LTRI(I1+3,KT2) - KT22 = LTRI(I2+3,KT2) - LTRI(1,KT1) = N4 - LTRI(2,KT1) = N3 - LTRI(3,KT1) = N1 - LTRI(4,KT1) = KT12 - LTRI(5,KT1) = KT22 - LTRI(6,KT1) = KT2 - LTRI(1,KT2) = N3 - LTRI(2,KT2) = N4 - LTRI(3,KT2) = N2 - LTRI(4,KT2) = KT21 - LTRI(5,KT2) = KT11 - LTRI(6,KT2) = KT1 -C -C Correct the KT11 and KT22 entries that changed. -C - IF (KT11 .NE. 0) THEN - I4 = 4 - IF (LTRI(4,KT11) .NE. KT1) THEN - I4 = 5 - IF (LTRI(5,KT11) .NE. KT1) I4 = 6 - ENDIF - LTRI(I4,KT11) = KT2 - ENDIF - IF (KT22 .NE. 0) THEN - I4 = 4 - IF (LTRI(4,KT22) .NE. KT2) THEN - I4 = 5 - IF (LTRI(5,KT22) .NE. KT2) I4 = 6 - ENDIF - LTRI(I4,KT22) = KT1 - ENDIF - 5 CONTINUE - 6 CONTINUE - IF (SWP) GO TO 4 -C -C Compute and store the negative circumcenters and radii of -C the pseudo-triangles in the first NT positions. -C - 7 DO 8 KT = 1,NT - N1 = LTRI(1,KT) - N2 = LTRI(2,KT) - N3 = LTRI(3,KT) - V1(1) = X(N1) - V1(2) = Y(N1) - V1(3) = Z(N1) - V2(1) = X(N2) - V2(2) = Y(N2) - V2(3) = Z(N2) - V3(1) = X(N3) - V3(2) = Y(N3) - V3(3) = Z(N3) - CALL CIRCUM (V1,V2,V3, C,IERR) - IF (IERR .NE. 0) GO TO 23 -C -C Store the negative circumcenter and radius (computed -C from ). -C - XC(KT) = C(1) - YC(KT) = C(2) - ZC(KT) = C(3) - T = V1(1)*C(1) + V1(2)*C(2) + V1(3)*C(3) - IF (T .LT. -1.0) T = -1.0 - IF (T .GT. 1.0) T = 1.0 - RC(KT) = ACOS(T) - 8 CONTINUE -C -C Compute and store the circumcenters and radii of the -C actual triangles in positions KT = NT+1, NT+2, ... -C Also, store the triangle indexes KT in the appropriate -C LISTC positions. -C - 9 KT = NT -C -C Loop on nodes N1. -C - NM2 = NN - 2 - DO 12 N1 = 1,NM2 - LPL = LEND(N1) - LP = LPL - N3 = LIST(LP) -C -C Loop on adjacent neighbors N2,N3 of N1 for which N2 > N1 -C and N3 > N1. -C - 10 LP = LPTR(LP) - N2 = N3 - N3 = ABS(LIST(LP)) - IF (N2 .LE. N1 .OR. N3 .LE. N1) GO TO 11 - KT = KT + 1 -C -C Compute the circumcenter C of triangle KT = (N1,N2,N3). -C - V1(1) = X(N1) - V1(2) = Y(N1) - V1(3) = Z(N1) - V2(1) = X(N2) - V2(2) = Y(N2) - V2(3) = Z(N2) - V3(1) = X(N3) - V3(2) = Y(N3) - V3(3) = Z(N3) - CALL CIRCUM (V1,V2,V3, C,IERR) - IF (IERR .NE. 0) GO TO 23 -C -C Store the circumcenter, radius and triangle index. -C - XC(KT) = C(1) - YC(KT) = C(2) - ZC(KT) = C(3) - T = V1(1)*C(1) + V1(2)*C(2) + V1(3)*C(3) - IF (T .LT. -1.0) T = -1.0 - IF (T .GT. 1.0) T = 1.0 - RC(KT) = ACOS(T) -C -C Store KT in LISTC(LPN), where Abs(LIST(LPN)) is the -C index of N2 as a neighbor of N1, N3 as a neighbor -C of N2, and N1 as a neighbor of N3. -C - LPN = LSTPTR(LPL,N2,LIST,LPTR) - LISTC(LPN) = KT - LPN = LSTPTR(LEND(N2),N3,LIST,LPTR) - LISTC(LPN) = KT - LPN = LSTPTR(LEND(N3),N1,LIST,LPTR) - LISTC(LPN) = KT - 11 IF (LP .NE. LPL) GO TO 10 - 12 CONTINUE - IF (NT .EQ. 0) GO TO 20 -C -C Store the first NT triangle indexes in LISTC. -C -C Find a boundary triangle KT1 = (N1,N2,N3) with a -C boundary arc opposite N3. -C - KT1 = 0 - 13 KT1 = KT1 + 1 - IF (LTRI(4,KT1) .EQ. 0) THEN - I1 = 2 - I2 = 3 - I3 = 1 - GO TO 14 - ELSEIF (LTRI(5,KT1) .EQ. 0) THEN - I1 = 3 - I2 = 1 - I3 = 2 - GO TO 14 - ELSEIF (LTRI(6,KT1) .EQ. 0) THEN - I1 = 1 - I2 = 2 - I3 = 3 - GO TO 14 - ENDIF - GO TO 13 - 14 N1 = LTRI(I1,KT1) - N0 = N1 -C -C Loop on boundary nodes N1 in CCW order, storing the -C indexes of the clockwise-ordered sequence of triangles -C that contain N1. The first triangle overwrites the -C last neighbor position, and the remaining triangles, -C if any, are appended to N1's adjacency list. -C -C A pointer to the first neighbor of N1 is saved in LPN. -C - 15 LP = LEND(N1) - LPN = LPTR(LP) - LISTC(LP) = KT1 -C -C Loop on triangles KT2 containing N1. -C - 16 KT2 = LTRI(I2+3,KT1) - IF (KT2 .NE. 0) THEN -C -C Append KT2 to N1's triangle list. -C - LPTR(LP) = LNEW - LP = LNEW - LISTC(LP) = KT2 - LNEW = LNEW + 1 -C -C Set KT1 to KT2 and update (I1,I2,I3) such that -C LTRI(I1,KT1) = N1. -C - KT1 = KT2 - IF (LTRI(1,KT1) .EQ. N1) THEN - I1 = 1 - I2 = 2 - I3 = 3 - ELSEIF (LTRI(2,KT1) .EQ. N1) THEN - I1 = 2 - I2 = 3 - I3 = 1 - ELSE - I1 = 3 - I2 = 1 - I3 = 2 - ENDIF - GO TO 16 - ENDIF -C -C Store the saved first-triangle pointer in LPTR(LP), set -C N1 to the next boundary node, test for termination, -C and permute the indexes: the last triangle containing -C a boundary node is the first triangle containing the -C next boundary node. -C - LPTR(LP) = LPN - N1 = LTRI(I3,KT1) - IF (N1 .NE. N0) THEN - I4 = I3 - I3 = I2 - I2 = I1 - I1 = I4 - GO TO 15 - ENDIF -C -C No errors encountered. -C - 20 IER = 0 - RETURN -C -C N < 3. -C - 21 IER = 1 - RETURN -C -C Insufficient space reserved for LTRI. -C - 22 IER = 2 - RETURN -C -C Error flag returned by CIRCUM: KT indexes a null triangle. -C - 23 IER = 3 - RETURN - END SUBROUTINE - SUBROUTINE DELARC (N,IO1,IO2, LIST,LPTR,LEND, - . LNEW, IER) - INTEGER N, IO1, IO2, LIST(*), LPTR(*), LEND(N), LNEW, - . IER -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine deletes a boundary arc from a triangula- -C tion. It may be used to remove a null triangle from the -C convex hull boundary. Note, however, that if the union of -C triangles is rendered nonconvex, Subroutines DELNOD, EDGE, -C and TRFIND (and hence ADDNOD) may fail. Also, Function -C NEARND should not be called following an arc deletion. -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 4. -C -C IO1,IO2 = Indexes (in the range 1 to N) of a pair of -C adjacent boundary nodes defining the arc -C to be removed. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Triangulation data structure -C created by Subroutine TRMESH. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the removal of arc IO1-IO2 -C unless IER > 0. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if N, IO1, or IO2 is outside its valid -C range, or IO1 = IO2. -C IER = 2 if IO1-IO2 is not a boundary arc. -C IER = 3 if the node opposite IO1-IO2 is al- -C ready a boundary node, and thus IO1 -C or IO2 has only two neighbors or a -C deletion would result in two triangu- -C lations sharing a single node. -C IER = 4 if one of the nodes is a neighbor of -C the other, but not vice versa, imply- -C ing an invalid triangulation data -C structure. -C -C Module required by DELARC: DELNB, LSTPTR -C -C Intrinsic function called by DELARC: ABS -C -C*********************************************************** -C - INTEGER LP, LPH, LPL, N1, N2, N3 -C -C Local parameters: -C -C LP = LIST pointer -C LPH = LIST pointer or flag returned by DELNB -C LPL = Pointer to the last neighbor of N1, N2, or N3 -C N1,N2,N3 = Nodal indexes of a triangle such that N1->N2 -C is the directed boundary edge associated -C with IO1-IO2 -C - N1 = IO1 - N2 = IO2 -C -C Test for errors, and set N1->N2 to the directed boundary -C edge associated with IO1-IO2: (N1,N2,N3) is a triangle -C for some N3. -C - IF (N .LT. 4 .OR. N1 .LT. 1 .OR. N1 .GT. N .OR. - . N2 .LT. 1 .OR. N2 .GT. N .OR. N1 .EQ. N2) THEN - IER = 1 - RETURN - ENDIF -C - LPL = LEND(N2) - IF (-LIST(LPL) .NE. N1) THEN - N1 = N2 - N2 = IO1 - LPL = LEND(N2) - IF (-LIST(LPL) .NE. N1) THEN - IER = 2 - RETURN - ENDIF - ENDIF -C -C Set N3 to the node opposite N1->N2 (the second neighbor -C of N1), and test for error 3 (N3 already a boundary -C node). -C - LPL = LEND(N1) - LP = LPTR(LPL) - LP = LPTR(LP) - N3 = ABS(LIST(LP)) - LPL = LEND(N3) - IF (LIST(LPL) .LE. 0) THEN - IER = 3 - RETURN - ENDIF -C -C Delete N2 as a neighbor of N1, making N3 the first -C neighbor, and test for error 4 (N2 not a neighbor -C of N1). Note that previously computed pointers may -C no longer be valid following the call to DELNB. -C - CALL DELNB (N1,N2,N, LIST,LPTR,LEND,LNEW, LPH) - IF (LPH .LT. 0) THEN - IER = 4 - RETURN - ENDIF -C -C Delete N1 as a neighbor of N2, making N3 the new last -C neighbor. -C - CALL DELNB (N2,N1,N, LIST,LPTR,LEND,LNEW, LPH) -C -C Make N3 a boundary node with first neighbor N2 and last -C neighbor N1. -C - LP = LSTPTR(LEND(N3),N1,LIST,LPTR) - LEND(N3) = LP - LIST(LP) = -N1 -C -C No errors encountered. -C - IER = 0 - RETURN - END SUBROUTINE - SUBROUTINE DELNB (N0,NB,N, LIST,LPTR,LEND,LNEW, LPH) - INTEGER N0, NB, N, LIST(*), LPTR(*), LEND(N), LNEW, - . LPH -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/29/98 -C -C This subroutine deletes a neighbor NB from the adjacency -C list of node N0 (but N0 is not deleted from the adjacency -C list of NB) and, if NB is a boundary node, makes N0 a -C boundary node. For pointer (LIST index) LPH to NB as a -C neighbor of N0, the empty LIST,LPTR location LPH is filled -C in with the values at LNEW-1, pointer LNEW-1 (in LPTR and -C possibly in LEND) is changed to LPH, and LNEW is decremen- -C ted. This requires a search of LEND and LPTR entailing an -C expected operation count of O(N). -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C N0,NB = Indexes, in the range 1 to N, of a pair of -C nodes such that NB is a neighbor of N0. -C (N0 need not be a neighbor of NB.) -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Data structure defining the -C triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the removal of NB from the ad- -C jacency list of N0 unless -C LPH < 0. -C -C LPH = List pointer to the hole (NB as a neighbor of -C N0) filled in by the values at LNEW-1 or error -C indicator: -C LPH > 0 if no errors were encountered. -C LPH = -1 if N0, NB, or N is outside its valid -C range. -C LPH = -2 if NB is not a neighbor of N0. -C -C Modules required by DELNB: None -C -C Intrinsic function called by DELNB: ABS -C -C*********************************************************** -C - INTEGER I, LNW, LP, LPB, LPL, LPP, NN -C -C Local parameters: -C -C I = DO-loop index -C LNW = LNEW-1 (output value of LNEW) -C LP = LIST pointer of the last neighbor of NB -C LPB = Pointer to NB as a neighbor of N0 -C LPL = Pointer to the last neighbor of N0 -C LPP = Pointer to the neighbor of N0 that precedes NB -C NN = Local copy of N -C - NN = N -C -C Test for error 1. -C - IF (N0 .LT. 1 .OR. N0 .GT. NN .OR. NB .LT. 1 .OR. - . NB .GT. NN .OR. NN .LT. 3) THEN - LPH = -1 - RETURN - ENDIF -C -C Find pointers to neighbors of N0: -C -C LPL points to the last neighbor, -C LPP points to the neighbor NP preceding NB, and -C LPB points to NB. -C - LPL = LEND(N0) - LPP = LPL - LPB = LPTR(LPP) - 1 IF (LIST(LPB) .EQ. NB) GO TO 2 - LPP = LPB - LPB = LPTR(LPP) - IF (LPB .NE. LPL) GO TO 1 -C -C Test for error 2 (NB not found). -C - IF (ABS(LIST(LPB)) .NE. NB) THEN - LPH = -2 - RETURN - ENDIF -C -C NB is the last neighbor of N0. Make NP the new last -C neighbor and, if NB is a boundary node, then make N0 -C a boundary node. -C - LEND(N0) = LPP - LP = LEND(NB) - IF (LIST(LP) .LT. 0) LIST(LPP) = -LIST(LPP) - GO TO 3 -C -C NB is not the last neighbor of N0. If NB is a boundary -C node and N0 is not, then make N0 a boundary node with -C last neighbor NP. -C - 2 LP = LEND(NB) - IF (LIST(LP) .LT. 0 .AND. LIST(LPL) .GT. 0) THEN - LEND(N0) = LPP - LIST(LPP) = -LIST(LPP) - ENDIF -C -C Update LPTR so that the neighbor following NB now fol- -C lows NP, and fill in the hole at location LPB. -C - 3 LPTR(LPP) = LPTR(LPB) - LNW = LNEW-1 - LIST(LPB) = LIST(LNW) - LPTR(LPB) = LPTR(LNW) - DO 4 I = NN,1,-1 - IF (LEND(I) .EQ. LNW) THEN - LEND(I) = LPB - GO TO 5 - ENDIF - 4 CONTINUE -C - 5 DO 6 I = 1,LNW-1 - IF (LPTR(I) .EQ. LNW) THEN - LPTR(I) = LPB - ENDIF - 6 CONTINUE -C -C No errors encountered. -C - LNEW = LNW - LPH = LPB - RETURN - END SUBROUTINE - SUBROUTINE DELNOD (K, N,X,Y,Z,LIST,LPTR,LEND,LNEW,LWK, - . IWK, IER) - INTEGER K, N, LIST(*), LPTR(*), LEND(*), LNEW, LWK, - . IWK(2,*), IER - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 11/30/99 -C -C This subroutine deletes node K (along with all arcs -C incident on node K) from a triangulation of N nodes on the -C unit sphere, and inserts arcs as necessary to produce a -C triangulation of the remaining N-1 nodes. If a Delaunay -C triangulation is input, a Delaunay triangulation will -C result, and thus, DELNOD reverses the effect of a call to -C Subroutine ADDNOD. -C -C -C On input: -C -C K = Index (for X, Y, and Z) of the node to be -C deleted. 1 .LE. K .LE. N. -C -C K is not altered by this routine. -C -C N = Number of nodes in the triangulation on input. -C N .GE. 4. Note that N will be decremented -C following the deletion. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes in the triangula- -C tion. -C -C LIST,LPTR,LEND,LNEW = Data structure defining the -C triangulation. Refer to Sub- -C routine TRMESH. -C -C LWK = Number of columns reserved for IWK. LWK must -C be at least NNB-3, where NNB is the number of -C neighbors of node K, including an extra -C pseudo-node if K is a boundary node. -C -C IWK = Integer work array dimensioned 2 by LWK (or -C array of length .GE. 2*LWK). -C -C On output: -C -C N = Number of nodes in the triangulation on output. -C The input value is decremented unless 1 .LE. IER -C .LE. 4. -C -C X,Y,Z = Updated arrays containing nodal coordinates -C (with elements K+1,...,N+1 shifted up one -C position, thus overwriting element K) unless -C 1 .LE. IER .LE. 4. -C -C LIST,LPTR,LEND,LNEW = Updated triangulation data -C structure reflecting the dele- -C tion unless 1 .LE. IER .LE. 4. -C Note that the data structure -C may have been altered if IER > -C 3. -C -C LWK = Number of IWK columns required unless IER = 1 -C or IER = 3. -C -C IWK = Indexes of the endpoints of the new arcs added -C unless LWK = 0 or 1 .LE. IER .LE. 4. (Arcs -C are associated with columns, or pairs of -C adjacent elements if IWK is declared as a -C singly-subscripted array.) -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if K or N is outside its valid range -C or LWK < 0 on input. -C IER = 2 if more space is required in IWK. -C Refer to LWK. -C IER = 3 if the triangulation data structure is -C invalid on input. -C IER = 4 if K indexes an interior node with -C four or more neighbors, none of which -C can be swapped out due to collineari- -C ty, and K cannot therefore be deleted. -C IER = 5 if an error flag (other than IER = 1) -C was returned by OPTIM. An error -C message is written to the standard -C output unit in this case. -C IER = 6 if error flag 1 was returned by OPTIM. -C This is not necessarily an error, but -C the arcs may not be optimal. -C -C Note that the deletion may result in all remaining nodes -C being collinear. This situation is not flagged. -C -C Modules required by DELNOD: DELNB, LEFT, LSTPTR, NBCNT, -C OPTIM, SWAP, SWPTST -C -C Intrinsic function called by DELNOD: ABS -C -C*********************************************************** -C - INTEGER I, IERR, IWL, J, LNW, LP, LP21, LPF, LPH, LPL, - . LPL2, LPN, LWKL, N1, N2, NFRST, NIT, NL, NN, - . NNB, NR - LOGICAL BDRY - REAL X1, X2, XL, XR, Y1, Y2, YL, YR, Z1, Z2, ZL, ZR -C -C Local parameters: -C -C BDRY = Logical variable with value TRUE iff N1 is a -C boundary node -C I,J = DO-loop indexes -C IERR = Error flag returned by OPTIM -C IWL = Number of IWK columns containing arcs -C LNW = Local copy of LNEW -C LP = LIST pointer -C LP21 = LIST pointer returned by SWAP -C LPF,LPL = Pointers to the first and last neighbors of N1 -C LPH = Pointer (or flag) returned by DELNB -C LPL2 = Pointer to the last neighbor of N2 -C LPN = Pointer to a neighbor of N1 -C LWKL = Input value of LWK -C N1 = Local copy of K -C N2 = Neighbor of N1 -C NFRST = First neighbor of N1: LIST(LPF) -C NIT = Number of iterations in OPTIM -C NR,NL = Neighbors of N1 preceding (to the right of) and -C following (to the left of) N2, respectively -C NN = Number of nodes in the triangulation -C NNB = Number of neighbors of N1 (including a pseudo- -C node representing the boundary if N1 is a -C boundary node) -C X1,Y1,Z1 = Coordinates of N1 -C X2,Y2,Z2 = Coordinates of N2 -C XL,YL,ZL = Coordinates of NL -C XR,YR,ZR = Coordinates of NR -C -C -C Set N1 to K and NNB to the number of neighbors of N1 (plus -C one if N1 is a boundary node), and test for errors. LPF -C and LPL are LIST indexes of the first and last neighbors -C of N1, IWL is the number of IWK columns containing arcs, -C and BDRY is TRUE iff N1 is a boundary node. -C - N1 = K - NN = N - IF (N1 .LT. 1 .OR. N1 .GT. NN .OR. NN .LT. 4 .OR. - . LWK .LT. 0) GO TO 21 - LPL = LEND(N1) - LPF = LPTR(LPL) - NNB = NBCNT(LPL,LPTR) - BDRY = LIST(LPL) .LT. 0 - IF (BDRY) NNB = NNB + 1 - IF (NNB .LT. 3) GO TO 23 - LWKL = LWK - LWK = NNB - 3 - IF (LWKL .LT. LWK) GO TO 22 - IWL = 0 - IF (NNB .EQ. 3) GO TO 3 -C -C Initialize for loop on arcs N1-N2 for neighbors N2 of N1, -C beginning with the second neighbor. NR and NL are the -C neighbors preceding and following N2, respectively, and -C LP indexes NL. The loop is exited when all possible -C swaps have been applied to arcs incident on N1. -C - X1 = X(N1) - Y1 = Y(N1) - Z1 = Z(N1) - NFRST = LIST(LPF) - NR = NFRST - XR = X(NR) - YR = Y(NR) - ZR = Z(NR) - LP = LPTR(LPF) - N2 = LIST(LP) - X2 = X(N2) - Y2 = Y(N2) - Z2 = Z(N2) - LP = LPTR(LP) -C -C Top of loop: set NL to the neighbor following N2. -C - 1 NL = ABS(LIST(LP)) - IF (NL .EQ. NFRST .AND. BDRY) GO TO 3 - XL = X(NL) - YL = Y(NL) - ZL = Z(NL) -C -C Test for a convex quadrilateral. To avoid an incorrect -C test caused by collinearity, use the fact that if N1 -C is a boundary node, then N1 LEFT NR->NL and if N2 is -C a boundary node, then N2 LEFT NL->NR. -C - LPL2 = LEND(N2) - IF ( .NOT. ((BDRY .OR. LEFT(XR,YR,ZR,XL,YL,ZL,X1,Y1, - . Z1)) .AND. (LIST(LPL2) .LT. 0 .OR. - . LEFT(XL,YL,ZL,XR,YR,ZR,X2,Y2,Z2))) ) THEN -C -C Nonconvex quadrilateral -- no swap is possible. -C - NR = N2 - XR = X2 - YR = Y2 - ZR = Z2 - GO TO 2 - ENDIF -C -C The quadrilateral defined by adjacent triangles -C (N1,N2,NL) and (N2,N1,NR) is convex. Swap in -C NL-NR and store it in IWK unless NL and NR are -C already adjacent, in which case the swap is not -C possible. Indexes larger than N1 must be decremented -C since N1 will be deleted from X, Y, and Z. -C - CALL SWAP (NL,NR,N1,N2, LIST,LPTR,LEND, LP21) - IF (LP21 .EQ. 0) THEN - NR = N2 - XR = X2 - YR = Y2 - ZR = Z2 - GO TO 2 - ENDIF - IWL = IWL + 1 - IF (NL .LE. N1) THEN - IWK(1,IWL) = NL - ELSE - IWK(1,IWL) = NL - 1 - ENDIF - IF (NR .LE. N1) THEN - IWK(2,IWL) = NR - ELSE - IWK(2,IWL) = NR - 1 - ENDIF -C -C Recompute the LIST indexes and NFRST, and decrement NNB. -C - LPL = LEND(N1) - NNB = NNB - 1 - IF (NNB .EQ. 3) GO TO 3 - LPF = LPTR(LPL) - NFRST = LIST(LPF) - LP = LSTPTR(LPL,NL,LIST,LPTR) - IF (NR .EQ. NFRST) GO TO 2 -C -C NR is not the first neighbor of N1. -C Back up and test N1-NR for a swap again: Set N2 to -C NR and NR to the previous neighbor of N1 -- the -C neighbor of NR which follows N1. LP21 points to NL -C as a neighbor of NR. -C - N2 = NR - X2 = XR - Y2 = YR - Z2 = ZR - LP21 = LPTR(LP21) - LP21 = LPTR(LP21) - NR = ABS(LIST(LP21)) - XR = X(NR) - YR = Y(NR) - ZR = Z(NR) - GO TO 1 -C -C Bottom of loop -- test for termination of loop. -C - 2 IF (N2 .EQ. NFRST) GO TO 3 - N2 = NL - X2 = XL - Y2 = YL - Z2 = ZL - LP = LPTR(LP) - GO TO 1 -C -C Delete N1 and all its incident arcs. If N1 is an interior -C node and either NNB > 3 or NNB = 3 and N2 LEFT NR->NL, -C then N1 must be separated from its neighbors by a plane -C containing the origin -- its removal reverses the effect -C of a call to COVSPH, and all its neighbors become -C boundary nodes. This is achieved by treating it as if -C it were a boundary node (setting BDRY to TRUE, changing -C a sign in LIST, and incrementing NNB). -C - 3 IF (.NOT. BDRY) THEN - IF (NNB .GT. 3) THEN - BDRY = .TRUE. - ELSE - LPF = LPTR(LPL) - NR = LIST(LPF) - LP = LPTR(LPF) - N2 = LIST(LP) - NL = LIST(LPL) - BDRY = LEFT(X(NR),Y(NR),Z(NR),X(NL),Y(NL),Z(NL), - . X(N2),Y(N2),Z(N2)) - ENDIF - IF (BDRY) THEN -C -C IF a boundary node already exists, then N1 and its -C neighbors cannot be converted to boundary nodes. -C (They must be collinear.) This is a problem if -C NNB > 3. -C - DO 4 I = 1,NN - IF (LIST(LEND(I)) .LT. 0) THEN - BDRY = .FALSE. - GO TO 5 - ENDIF - 4 CONTINUE - LIST(LPL) = -LIST(LPL) - NNB = NNB + 1 - ENDIF - ENDIF - 5 IF (.NOT. BDRY .AND. NNB .GT. 3) GO TO 24 -C -C Initialize for loop on neighbors. LPL points to the last -C neighbor of N1. LNEW is stored in local variable LNW. -C - LP = LPL - LNW = LNEW -C -C Loop on neighbors N2 of N1, beginning with the first. -C - 6 LP = LPTR(LP) - N2 = ABS(LIST(LP)) - CALL DELNB (N2,N1,N, LIST,LPTR,LEND,LNW, LPH) - IF (LPH .LT. 0) GO TO 23 -C -C LP and LPL may require alteration. -C - IF (LPL .EQ. LNW) LPL = LPH - IF (LP .EQ. LNW) LP = LPH - IF (LP .NE. LPL) GO TO 6 -C -C Delete N1 from X, Y, Z, and LEND, and remove its adjacency -C list from LIST and LPTR. LIST entries (nodal indexes) -C which are larger than N1 must be decremented. -C - NN = NN - 1 - IF (N1 .GT. NN) GO TO 9 - DO 7 I = N1,NN - X(I) = X(I+1) - Y(I) = Y(I+1) - Z(I) = Z(I+1) - LEND(I) = LEND(I+1) - 7 CONTINUE -C - DO 8 I = 1,LNW-1 - IF (LIST(I) .GT. N1) LIST(I) = LIST(I) - 1 - IF (LIST(I) .LT. -N1) LIST(I) = LIST(I) + 1 - 8 CONTINUE -C -C For LPN = first to last neighbors of N1, delete the -C preceding neighbor (indexed by LP). -C -C Each empty LIST,LPTR location LP is filled in with the -C values at LNW-1, and LNW is decremented. All pointers -C (including those in LPTR and LEND) with value LNW-1 -C must be changed to LP. -C -C LPL points to the last neighbor of N1. -C - 9 IF (BDRY) NNB = NNB - 1 - LPN = LPL - DO 13 J = 1,NNB - LNW = LNW - 1 - LP = LPN - LPN = LPTR(LP) - LIST(LP) = LIST(LNW) - LPTR(LP) = LPTR(LNW) - IF (LPTR(LPN) .EQ. LNW) LPTR(LPN) = LP - IF (LPN .EQ. LNW) LPN = LP - DO 10 I = NN,1,-1 - IF (LEND(I) .EQ. LNW) THEN - LEND(I) = LP - GO TO 11 - ENDIF - 10 CONTINUE -C - 11 DO 12 I = LNW-1,1,-1 - IF (LPTR(I) .EQ. LNW) LPTR(I) = LP - 12 CONTINUE - 13 CONTINUE -C -C Update N and LNEW, and optimize the patch of triangles -C containing K (on input) by applying swaps to the arcs -C in IWK. -C - N = NN - LNEW = LNW - IF (IWL .GT. 0) THEN - NIT = 4*IWL - CALL OPTIM (X,Y,Z,IWL, LIST,LPTR,LEND,NIT,IWK, IERR) - IF (IERR .NE. 0 .AND. IERR .NE. 1) GO TO 25 - IF (IERR .EQ. 1) GO TO 26 - ENDIF -C -C Successful termination. -C - IER = 0 - RETURN -C -C Invalid input parameter. -C - 21 IER = 1 - RETURN -C -C Insufficient space reserved for IWK. -C - 22 IER = 2 - RETURN -C -C Invalid triangulation data structure. NNB < 3 on input or -C N2 is a neighbor of N1 but N1 is not a neighbor of N2. -C - 23 IER = 3 - RETURN -C -C N1 is interior but NNB could not be reduced to 3. -C - 24 IER = 4 - RETURN -C -C Error flag (other than 1) returned by OPTIM. -C - 25 IER = 5 - WRITE (*,100) NIT, IERR - 100 FORMAT (//5X,'*** Error in OPTIM (called from ', - . 'DELNOD): NIT = ',I4,', IER = ',I1,' ***'/) - RETURN -C -C Error flag 1 returned by OPTIM. -C - 26 IER = 6 - RETURN - END SUBROUTINE - SUBROUTINE EDGE (IN1,IN2,X,Y,Z, LWK,IWK,LIST,LPTR, - . LEND, IER) - INTEGER IN1, IN2, LWK, IWK(2,*), LIST(*), LPTR(*), - . LEND(*), IER - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/30/98 -C -C Given a triangulation of N nodes and a pair of nodal -C indexes IN1 and IN2, this routine swaps arcs as necessary -C to force IN1 and IN2 to be adjacent. Only arcs which -C intersect IN1-IN2 are swapped out. If a Delaunay triangu- -C lation is input, the resulting triangulation is as close -C as possible to a Delaunay triangulation in the sense that -C all arcs other than IN1-IN2 are locally optimal. -C -C A sequence of calls to EDGE may be used to force the -C presence of a set of edges defining the boundary of a non- -C convex and/or multiply connected region, or to introduce -C barriers into the triangulation. Note that Subroutine -C GETNP will not necessarily return closest nodes if the -C triangulation has been constrained by a call to EDGE. -C However, this is appropriate in some applications, such -C as triangle-based interpolation on a nonconvex domain. -C -C -C On input: -C -C IN1,IN2 = Indexes (of X, Y, and Z) in the range 1 to -C N defining a pair of nodes to be connected -C by an arc. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. -C -C The above parameters are not altered by this routine. -C -C LWK = Number of columns reserved for IWK. This must -C be at least NI -- the number of arcs that -C intersect IN1-IN2. (NI is bounded by N-3.) -C -C IWK = Integer work array of length at least 2*LWK. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C On output: -C -C LWK = Number of arcs which intersect IN1-IN2 (but -C not more than the input value of LWK) unless -C IER = 1 or IER = 3. LWK = 0 if and only if -C IN1 and IN2 were adjacent (or LWK=0) on input. -C -C IWK = Array containing the indexes of the endpoints -C of the new arcs other than IN1-IN2 unless -C IER > 0 or LWK = 0. New arcs to the left of -C IN1->IN2 are stored in the first K-1 columns -C (left portion of IWK), column K contains -C zeros, and new arcs to the right of IN1->IN2 -C occupy columns K+1,...,LWK. (K can be deter- -C mined by searching IWK for the zeros.) -C -C LIST,LPTR,LEND = Data structure updated if necessary -C to reflect the presence of an arc -C connecting IN1 and IN2 unless IER > -C 0. The data structure has been -C altered if IER >= 4. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if IN1 < 1, IN2 < 1, IN1 = IN2, -C or LWK < 0 on input. -C IER = 2 if more space is required in IWK. -C Refer to LWK. -C IER = 3 if IN1 and IN2 could not be connected -C due to either an invalid data struc- -C ture or collinear nodes (and floating -C point error). -C IER = 4 if an error flag other than IER = 1 -C was returned by OPTIM. -C IER = 5 if error flag 1 was returned by OPTIM. -C This is not necessarily an error, but -C the arcs other than IN1-IN2 may not -C be optimal. -C -C An error message is written to the standard output unit -C in the case of IER = 3 or IER = 4. -C -C Modules required by EDGE: LEFT, LSTPTR, OPTIM, SWAP, -C SWPTST -C -C Intrinsic function called by EDGE: ABS -C -C*********************************************************** -C - INTEGER I, IERR, IWC, IWCP1, IWEND, IWF, IWL, LFT, LP, - . LP21, LPL, N0, N1, N1FRST, N1LST, N2, NEXT, - . NIT, NL, NR - REAL DP12, DP1L, DP1R, DP2L, DP2R, X0, X1, X2, Y0, - . Y1, Y2, Z0, Z1, Z2 -C -C Local parameters: -C -C DPij = Dot product -C I = DO-loop index and column index for IWK -C IERR = Error flag returned by Subroutine OPTIM -C IWC = IWK index between IWF and IWL -- NL->NR is -C stored in IWK(1,IWC)->IWK(2,IWC) -C IWCP1 = IWC + 1 -C IWEND = Input or output value of LWK -C IWF = IWK (column) index of the first (leftmost) arc -C which intersects IN1->IN2 -C IWL = IWK (column) index of the last (rightmost) are -C which intersects IN1->IN2 -C LFT = Flag used to determine if a swap results in the -C new arc intersecting IN1-IN2 -- LFT = 0 iff -C N0 = IN1, LFT = -1 implies N0 LEFT IN1->IN2, -C and LFT = 1 implies N0 LEFT IN2->IN1 -C LP = List pointer (index for LIST and LPTR) -C LP21 = Unused parameter returned by SWAP -C LPL = Pointer to the last neighbor of IN1 or NL -C N0 = Neighbor of N1 or node opposite NR->NL -C N1,N2 = Local copies of IN1 and IN2 -C N1FRST = First neighbor of IN1 -C N1LST = (Signed) last neighbor of IN1 -C NEXT = Node opposite NL->NR -C NIT = Flag or number of iterations employed by OPTIM -C NL,NR = Endpoints of an arc which intersects IN1-IN2 -C with NL LEFT IN1->IN2 -C X0,Y0,Z0 = Coordinates of N0 -C X1,Y1,Z1 = Coordinates of IN1 -C X2,Y2,Z2 = Coordinates of IN2 -C -C -C Store IN1, IN2, and LWK in local variables and test for -C errors. -C - N1 = IN1 - N2 = IN2 - IWEND = LWK - IF (N1 .LT. 1 .OR. N2 .LT. 1 .OR. N1 .EQ. N2 .OR. - . IWEND .LT. 0) GO TO 31 -C -C Test for N2 as a neighbor of N1. LPL points to the last -C neighbor of N1. -C - LPL = LEND(N1) - N0 = ABS(LIST(LPL)) - LP = LPL - 1 IF (N0 .EQ. N2) GO TO 30 - LP = LPTR(LP) - N0 = LIST(LP) - IF (LP .NE. LPL) GO TO 1 -C -C Initialize parameters. -C - IWL = 0 - NIT = 0 -C -C Store the coordinates of N1 and N2. -C - 2 X1 = X(N1) - Y1 = Y(N1) - Z1 = Z(N1) - X2 = X(N2) - Y2 = Y(N2) - Z2 = Z(N2) -C -C Set NR and NL to adjacent neighbors of N1 such that -C NR LEFT N2->N1 and NL LEFT N1->N2, -C (NR Forward N1->N2 or NL Forward N1->N2), and -C (NR Forward N2->N1 or NL Forward N2->N1). -C -C Initialization: Set N1FRST and N1LST to the first and -C (signed) last neighbors of N1, respectively, and -C initialize NL to N1FRST. -C - LPL = LEND(N1) - N1LST = LIST(LPL) - LP = LPTR(LPL) - N1FRST = LIST(LP) - NL = N1FRST - IF (N1LST .LT. 0) GO TO 4 -C -C N1 is an interior node. Set NL to the first candidate -C for NR (NL LEFT N2->N1). -C - 3 IF (LEFT(X2,Y2,Z2,X1,Y1,Z1,X(NL),Y(NL),Z(NL))) GO TO 4 - LP = LPTR(LP) - NL = LIST(LP) - IF (NL .NE. N1FRST) GO TO 3 -C -C All neighbors of N1 are strictly left of N1->N2. -C - GO TO 5 -C -C NL = LIST(LP) LEFT N2->N1. Set NR to NL and NL to the -C following neighbor of N1. -C - 4 NR = NL - LP = LPTR(LP) - NL = ABS(LIST(LP)) - IF (LEFT(X1,Y1,Z1,X2,Y2,Z2,X(NL),Y(NL),Z(NL)) ) THEN -C -C NL LEFT N1->N2 and NR LEFT N2->N1. The Forward tests -C are employed to avoid an error associated with -C collinear nodes. -C - DP12 = X1*X2 + Y1*Y2 + Z1*Z2 - DP1L = X1*X(NL) + Y1*Y(NL) + Z1*Z(NL) - DP2L = X2*X(NL) + Y2*Y(NL) + Z2*Z(NL) - DP1R = X1*X(NR) + Y1*Y(NR) + Z1*Z(NR) - DP2R = X2*X(NR) + Y2*Y(NR) + Z2*Z(NR) - IF ( (DP2L-DP12*DP1L .GE. 0. .OR. - . DP2R-DP12*DP1R .GE. 0.) .AND. - . (DP1L-DP12*DP2L .GE. 0. .OR. - . DP1R-DP12*DP2R .GE. 0.) ) GO TO 6 -C -C NL-NR does not intersect N1-N2. However, there is -C another candidate for the first arc if NL lies on -C the line N1-N2. -C - IF ( .NOT. LEFT(X2,Y2,Z2,X1,Y1,Z1,X(NL),Y(NL), - . Z(NL)) ) GO TO 5 - ENDIF -C -C Bottom of loop. -C - IF (NL .NE. N1FRST) GO TO 4 -C -C Either the triangulation is invalid or N1-N2 lies on the -C convex hull boundary and an edge NR->NL (opposite N1 and -C intersecting N1-N2) was not found due to floating point -C error. Try interchanging N1 and N2 -- NIT > 0 iff this -C has already been done. -C - 5 IF (NIT .GT. 0) GO TO 33 - NIT = 1 - N1 = N2 - N2 = IN1 - GO TO 2 -C -C Store the ordered sequence of intersecting edges NL->NR in -C IWK(1,IWL)->IWK(2,IWL). -C - 6 IWL = IWL + 1 - IF (IWL .GT. IWEND) GO TO 32 - IWK(1,IWL) = NL - IWK(2,IWL) = NR -C -C Set NEXT to the neighbor of NL which follows NR. -C - LPL = LEND(NL) - LP = LPTR(LPL) -C -C Find NR as a neighbor of NL. The search begins with -C the first neighbor. -C - 7 IF (LIST(LP) .EQ. NR) GO TO 8 - LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 7 -C -C NR must be the last neighbor, and NL->NR cannot be a -C boundary edge. -C - IF (LIST(LP) .NE. NR) GO TO 33 -C -C Set NEXT to the neighbor following NR, and test for -C termination of the store loop. -C - 8 LP = LPTR(LP) - NEXT = ABS(LIST(LP)) - IF (NEXT .EQ. N2) GO TO 9 -C -C Set NL or NR to NEXT. -C - IF ( LEFT(X1,Y1,Z1,X2,Y2,Z2,X(NEXT),Y(NEXT),Z(NEXT)) ) - . THEN - NL = NEXT - ELSE - NR = NEXT - ENDIF - GO TO 6 -C -C IWL is the number of arcs which intersect N1-N2. -C Store LWK. -C - 9 LWK = IWL - IWEND = IWL -C -C Initialize for edge swapping loop -- all possible swaps -C are applied (even if the new arc again intersects -C N1-N2), arcs to the left of N1->N2 are stored in the -C left portion of IWK, and arcs to the right are stored in -C the right portion. IWF and IWL index the first and last -C intersecting arcs. -C - IWF = 1 -C -C Top of loop -- set N0 to N1 and NL->NR to the first edge. -C IWC points to the arc currently being processed. LFT -C .LE. 0 iff N0 LEFT N1->N2. -C - 10 LFT = 0 - N0 = N1 - X0 = X1 - Y0 = Y1 - Z0 = Z1 - NL = IWK(1,IWF) - NR = IWK(2,IWF) - IWC = IWF -C -C Set NEXT to the node opposite NL->NR unless IWC is the -C last arc. -C - 11 IF (IWC .EQ. IWL) GO TO 21 - IWCP1 = IWC + 1 - NEXT = IWK(1,IWCP1) - IF (NEXT .NE. NL) GO TO 16 - NEXT = IWK(2,IWCP1) -C -C NEXT RIGHT N1->N2 and IWC .LT. IWL. Test for a possible -C swap. -C - IF ( .NOT. LEFT(X0,Y0,Z0,X(NR),Y(NR),Z(NR),X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 14 - IF (LFT .GE. 0) GO TO 12 - IF ( .NOT. LEFT(X(NL),Y(NL),Z(NL),X0,Y0,Z0,X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 14 -C -C Replace NL->NR with N0->NEXT. -C - CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWC) = N0 - IWK(2,IWC) = NEXT - GO TO 15 -C -C Swap NL-NR for N0-NEXT, shift columns IWC+1,...,IWL to -C the left, and store N0-NEXT in the right portion of -C IWK. -C - 12 CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - DO 13 I = IWCP1,IWL - IWK(1,I-1) = IWK(1,I) - IWK(2,I-1) = IWK(2,I) - 13 CONTINUE - IWK(1,IWL) = N0 - IWK(2,IWL) = NEXT - IWL = IWL - 1 - NR = NEXT - GO TO 11 -C -C A swap is not possible. Set N0 to NR. -C - 14 N0 = NR - X0 = X(N0) - Y0 = Y(N0) - Z0 = Z(N0) - LFT = 1 -C -C Advance to the next arc. -C - 15 NR = NEXT - IWC = IWC + 1 - GO TO 11 -C -C NEXT LEFT N1->N2, NEXT .NE. N2, and IWC .LT. IWL. -C Test for a possible swap. -C - 16 IF ( .NOT. LEFT(X(NL),Y(NL),Z(NL),X0,Y0,Z0,X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 19 - IF (LFT .LE. 0) GO TO 17 - IF ( .NOT. LEFT(X0,Y0,Z0,X(NR),Y(NR),Z(NR),X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 19 -C -C Replace NL->NR with NEXT->N0. -C - CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWC) = NEXT - IWK(2,IWC) = N0 - GO TO 20 -C -C Swap NL-NR for N0-NEXT, shift columns IWF,...,IWC-1 to -C the right, and store N0-NEXT in the left portion of -C IWK. -C - 17 CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - DO 18 I = IWC-1,IWF,-1 - IWK(1,I+1) = IWK(1,I) - IWK(2,I+1) = IWK(2,I) - 18 CONTINUE - IWK(1,IWF) = N0 - IWK(2,IWF) = NEXT - IWF = IWF + 1 - GO TO 20 -C -C A swap is not possible. Set N0 to NL. -C - 19 N0 = NL - X0 = X(N0) - Y0 = Y(N0) - Z0 = Z(N0) - LFT = -1 -C -C Advance to the next arc. -C - 20 NL = NEXT - IWC = IWC + 1 - GO TO 11 -C -C N2 is opposite NL->NR (IWC = IWL). -C - 21 IF (N0 .EQ. N1) GO TO 24 - IF (LFT .LT. 0) GO TO 22 -C -C N0 RIGHT N1->N2. Test for a possible swap. -C - IF ( .NOT. LEFT(X0,Y0,Z0,X(NR),Y(NR),Z(NR),X2,Y2,Z2) ) - . GO TO 10 -C -C Swap NL-NR for N0-N2 and store N0-N2 in the right -C portion of IWK. -C - CALL SWAP (N2,N0,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWL) = N0 - IWK(2,IWL) = N2 - IWL = IWL - 1 - GO TO 10 -C -C N0 LEFT N1->N2. Test for a possible swap. -C - 22 IF ( .NOT. LEFT(X(NL),Y(NL),Z(NL),X0,Y0,Z0,X2,Y2,Z2) ) - . GO TO 10 -C -C Swap NL-NR for N0-N2, shift columns IWF,...,IWL-1 to the -C right, and store N0-N2 in the left portion of IWK. -C - CALL SWAP (N2,N0,NL,NR, LIST,LPTR,LEND, LP21) - I = IWL - 23 IWK(1,I) = IWK(1,I-1) - IWK(2,I) = IWK(2,I-1) - I = I - 1 - IF (I .GT. IWF) GO TO 23 - IWK(1,IWF) = N0 - IWK(2,IWF) = N2 - IWF = IWF + 1 - GO TO 10 -C -C IWF = IWC = IWL. Swap out the last arc for N1-N2 and -C store zeros in IWK. -C - 24 CALL SWAP (N2,N1,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWC) = 0 - IWK(2,IWC) = 0 -C -C Optimization procedure -- -C - IER = 0 - IF (IWC .GT. 1) THEN -C -C Optimize the set of new arcs to the left of IN1->IN2. -C - NIT = 4*(IWC-1) - CALL OPTIM (X,Y,Z,IWC-1, LIST,LPTR,LEND,NIT, - . IWK, IERR) - IF (IERR .NE. 0 .AND. IERR .NE. 1) GO TO 34 - IF (IERR .EQ. 1) IER = 5 - ENDIF - IF (IWC .LT. IWEND) THEN -C -C Optimize the set of new arcs to the right of IN1->IN2. -C - NIT = 4*(IWEND-IWC) - CALL OPTIM (X,Y,Z,IWEND-IWC, LIST,LPTR,LEND,NIT, - . IWK(1,IWC+1), IERR) - IF (IERR .NE. 0 .AND. IERR .NE. 1) GO TO 34 - IF (IERR .EQ. 1) GO TO 35 - ENDIF - IF (IER .EQ. 5) GO TO 35 -C -C Successful termination (IER = 0). -C - RETURN -C -C IN1 and IN2 were adjacent on input. -C - 30 IER = 0 - RETURN -C -C Invalid input parameter. -C - 31 IER = 1 - RETURN -C -C Insufficient space reserved for IWK. -C - 32 IER = 2 - RETURN -C -C Invalid triangulation data structure or collinear nodes -C on convex hull boundary. -C - 33 IER = 3 - WRITE (*,130) IN1, IN2 - 130 FORMAT (//5X,'*** Error in EDGE: Invalid triangula', - . 'tion or null triangles on boundary'/ - . 9X,'IN1 =',I4,', IN2=',I4/) - RETURN -C -C Error flag (other than 1) returned by OPTIM. -C - 34 IER = 4 - WRITE (*,140) NIT, IERR - 140 FORMAT (//5X,'*** Error in OPTIM (called from EDGE):', - . ' NIT = ',I4,', IER = ',I1,' ***'/) - RETURN -C -C Error flag 1 returned by OPTIM. -C - 35 IER = 5 - RETURN - END SUBROUTINE - SUBROUTINE GETNP (X,Y,Z,LIST,LPTR,LEND,L, NPTS, DF, - . IER) - INTEGER LIST(*), LPTR(*), LEND(*), L, NPTS(L), IER - REAL X(*), Y(*), Z(*), DF -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/28/98 -C -C Given a Delaunay triangulation of N nodes on the unit -C sphere and an array NPTS containing the indexes of L-1 -C nodes ordered by angular distance from NPTS(1), this sub- -C routine sets NPTS(L) to the index of the next node in the -C sequence -- the node, other than NPTS(1),...,NPTS(L-1), -C that is closest to NPTS(1). Thus, the ordered sequence -C of K closest nodes to N1 (including N1) may be determined -C by K-1 calls to GETNP with NPTS(1) = N1 and L = 2,3,...,K -C for K .GE. 2. -C -C The algorithm uses the property of a Delaunay triangula- -C tion that the K-th closest node to N1 is a neighbor of one -C of the K-1 closest nodes to N1. -C -C -C On input: -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. -C -C LIST,LPTR,LEND = Triangulation data structure. Re- -C fer to Subroutine TRMESH. -C -C L = Number of nodes in the sequence on output. 2 -C .LE. L .LE. N. -C -C The above parameters are not altered by this routine. -C -C NPTS = Array of length .GE. L containing the indexes -C of the L-1 closest nodes to NPTS(1) in the -C first L-1 locations. -C -C On output: -C -C NPTS = Array updated with the index of the L-th -C closest node to NPTS(1) in position L unless -C IER = 1. -C -C DF = Value of an increasing function (negative cos- -C ine) of the angular distance between NPTS(1) -C and NPTS(L) unless IER = 1. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if L < 2. -C -C Modules required by GETNP: None -C -C Intrinsic function called by GETNP: ABS -C -C*********************************************************** -C - INTEGER I, LM1, LP, LPL, N1, NB, NI, NP - REAL DNB, DNP, X1, Y1, Z1 -C -C Local parameters: -C -C DNB,DNP = Negative cosines of the angular distances from -C N1 to NB and to NP, respectively -C I = NPTS index and DO-loop index -C LM1 = L-1 -C LP = LIST pointer of a neighbor of NI -C LPL = Pointer to the last neighbor of NI -C N1 = NPTS(1) -C NB = Neighbor of NI and candidate for NP -C NI = NPTS(I) -C NP = Candidate for NPTS(L) -C X1,Y1,Z1 = Coordinates of N1 -C - LM1 = L - 1 - IF (LM1 .LT. 1) GO TO 6 - IER = 0 -C -C Store N1 = NPTS(1) and mark the elements of NPTS. -C - N1 = NPTS(1) - X1 = X(N1) - Y1 = Y(N1) - Z1 = Z(N1) - DO 1 I = 1,LM1 - NI = NPTS(I) - LEND(NI) = -LEND(NI) - 1 CONTINUE -C -C Candidates for NP = NPTS(L) are the unmarked neighbors -C of nodes in NPTS. DNP is initially greater than -cos(PI) -C (the maximum distance). -C - DNP = 2. -C -C Loop on nodes NI in NPTS. -C - DO 4 I = 1,LM1 - NI = NPTS(I) - LPL = -LEND(NI) - LP = LPL -C -C Loop on neighbors NB of NI. -C - 2 NB = ABS(LIST(LP)) - IF (LEND(NB) .LT. 0) GO TO 3 -C -C NB is an unmarked neighbor of NI. Replace NP if NB is -C closer to N1. -C - DNB = -(X(NB)*X1 + Y(NB)*Y1 + Z(NB)*Z1) - IF (DNB .GE. DNP) GO TO 3 - NP = NB - DNP = DNB - 3 LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 2 - 4 CONTINUE - NPTS(L) = NP - DF = DNP -C -C Unmark the elements of NPTS. -C - DO 5 I = 1,LM1 - NI = NPTS(I) - LEND(NI) = -LEND(NI) - 5 CONTINUE - RETURN -C -C L is outside its valid range. -C - 6 IER = 1 - RETURN - END SUBROUTINE - SUBROUTINE INSERT (K,LP, LIST,LPTR,LNEW ) - INTEGER K, LP, LIST(*), LPTR(*), LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine inserts K as a neighbor of N1 following -C N2, where LP is the LIST pointer of N2 as a neighbor of -C N1. Note that, if N2 is the last neighbor of N1, K will -C become the first neighbor (even if N1 is a boundary node). -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C K = Index of the node to be inserted. -C -C LP = LIST pointer of N2 as a neighbor of N1. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LNEW = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C On output: -C -C LIST,LPTR,LNEW = Data structure updated with the -C addition of node K. -C -C Modules required by INSERT: None -C -C*********************************************************** -C - INTEGER LSAV -C - LSAV = LPTR(LP) - LPTR(LP) = LNEW - LIST(LNEW) = K - LPTR(LNEW) = LSAV - LNEW = LNEW + 1 - RETURN - END SUBROUTINE - LOGICAL FUNCTION INSIDE (P,LV,XV,YV,ZV,NV,LISTV, IER) - INTEGER LV, NV, LISTV(NV), IER - REAL P(3), XV(LV), YV(LV), ZV(LV) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 12/27/93 -C -C This function locates a point P relative to a polygonal -C region R on the surface of the unit sphere, returning -C INSIDE = TRUE if and only if P is contained in R. R is -C defined by a cyclically ordered sequence of vertices which -C form a positively-oriented simple closed curve. Adjacent -C vertices need not be distinct but the curve must not be -C self-intersecting. Also, while polygon edges are by defi- -C nition restricted to a single hemisphere, R is not so -C restricted. Its interior is the region to the left as the -C vertices are traversed in order. -C -C The algorithm consists of selecting a point Q in R and -C then finding all points at which the great circle defined -C by P and Q intersects the boundary of R. P lies inside R -C if and only if there is an even number of intersection -C points between Q and P. Q is taken to be a point immedi- -C ately to the left of a directed boundary edge -- the first -C one that results in no consistency-check failures. -C -C If P is close to the polygon boundary, the problem is -C ill-conditioned and the decision may be incorrect. Also, -C an incorrect decision may result from a poor choice of Q -C (if, for example, a boundary edge lies on the great cir- -C cle defined by P and Q). A more reliable result could be -C obtained by a sequence of calls to INSIDE with the ver- -C tices cyclically permuted before each call (to alter the -C choice of Q). -C -C -C On input: -C -C P = Array of length 3 containing the Cartesian -C coordinates of the point (unit vector) to be -C located. -C -C LV = Length of arrays XV, YV, and ZV. -C -C XV,YV,ZV = Arrays of length LV containing the Carte- -C sian coordinates of unit vectors (points -C on the unit sphere). These values are -C not tested for validity. -C -C NV = Number of vertices in the polygon. 3 .LE. NV -C .LE. LV. -C -C LISTV = Array of length NV containing the indexes -C (for XV, YV, and ZV) of a cyclically-ordered -C (and CCW-ordered) sequence of vertices that -C define R. The last vertex (indexed by -C LISTV(NV)) is followed by the first (indexed -C by LISTV(1)). LISTV entries must be in the -C range 1 to LV. -C -C Input parameters are not altered by this function. -C -C On output: -C -C INSIDE = TRUE if and only if P lies inside R unless -C IER .NE. 0, in which case the value is not -C altered. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if LV or NV is outside its valid -C range. -C IER = 2 if a LISTV entry is outside its valid -C range. -C IER = 3 if the polygon boundary was found to -C be self-intersecting. This error will -C not necessarily be detected. -C IER = 4 if every choice of Q (one for each -C boundary edge) led to failure of some -C internal consistency check. The most -C likely cause of this error is invalid -C input: P = (0,0,0), a null or self- -C intersecting polygon, etc. -C -C Module required by INSIDE: INTRSC -C -C Intrinsic function called by INSIDE: SQRT -C -C*********************************************************** -C - INTEGER I1, I2, IERR, IMX, K, K0, N, NI - LOGICAL EVEN, LFT1, LFT2, PINR, QINR - REAL B(3), BP, BQ, CN(3), D, EPS, PN(3), Q(3), - . QN(3), QNRM, V1(3), V2(3), VN(3), VNRM -C -C Local parameters: -C -C B = Intersection point between the boundary and -C the great circle defined by P and Q -C BP,BQ = and , respectively, maximized over -C intersection points B that lie between P and -C Q (on the shorter arc) -- used to find the -C closest intersection points to P and Q -C CN = Q X P = normal to the plane of P and Q -C D = Dot product or -C EPS = Parameter used to define Q as the point whose -C orthogonal distance to (the midpoint of) -C boundary edge V1->V2 is approximately EPS/ -C (2*Cos(A/2)), where = Cos(A). -C EVEN = TRUE iff an even number of intersection points -C lie between P and Q (on the shorter arc) -C I1,I2 = Indexes (LISTV elements) of a pair of adjacent -C boundary vertices (endpoints of a boundary -C edge) -C IERR = Error flag for calls to INTRSC (not tested) -C IMX = Local copy of LV and maximum value of I1 and -C I2 -C K = DO-loop index and LISTV index -C K0 = LISTV index of the first endpoint of the -C boundary edge used to compute Q -C LFT1,LFT2 = Logical variables associated with I1 and I2 in -C the boundary traversal: TRUE iff the vertex -C is strictly to the left of Q->P ( > 0) -C N = Local copy of NV -C NI = Number of intersections (between the boundary -C curve and the great circle P-Q) encountered -C PINR = TRUE iff P is to the left of the directed -C boundary edge associated with the closest -C intersection point to P that lies between P -C and Q (a left-to-right intersection as -C viewed from Q), or there is no intersection -C between P and Q (on the shorter arc) -C PN,QN = P X CN and CN X Q, respectively: used to -C locate intersections B relative to arc Q->P -C Q = (V1 + V2 + EPS*VN/VNRM)/QNRM, where V1->V2 is -C the boundary edge indexed by LISTV(K0) -> -C LISTV(K0+1) -C QINR = TRUE iff Q is to the left of the directed -C boundary edge associated with the closest -C intersection point to Q that lies between P -C and Q (a right-to-left intersection as -C viewed from Q), or there is no intersection -C between P and Q (on the shorter arc) -C QNRM = Euclidean norm of V1+V2+EPS*VN/VNRM used to -C compute (normalize) Q -C V1,V2 = Vertices indexed by I1 and I2 in the boundary -C traversal -C VN = V1 X V2, where V1->V2 is the boundary edge -C indexed by LISTV(K0) -> LISTV(K0+1) -C VNRM = Euclidean norm of VN -C - DATA EPS/1.E-3/ -C -C Store local parameters, test for error 1, and initialize -C K0. -C - IMX = LV - N = NV - IF (N .LT. 3 .OR. N .GT. IMX) GO TO 11 - K0 = 0 - I1 = LISTV(1) - IF (I1 .LT. 1 .OR. I1 .GT. IMX) GO TO 12 -C -C Increment K0 and set Q to a point immediately to the left -C of the midpoint of edge V1->V2 = LISTV(K0)->LISTV(K0+1): -C Q = (V1 + V2 + EPS*VN/VNRM)/QNRM, where VN = V1 X V2. -C - 1 K0 = K0 + 1 - IF (K0 .GT. N) GO TO 14 - I1 = LISTV(K0) - IF (K0 .LT. N) THEN - I2 = LISTV(K0+1) - ELSE - I2 = LISTV(1) - ENDIF - IF (I2 .LT. 1 .OR. I2 .GT. IMX) GO TO 12 - VN(1) = YV(I1)*ZV(I2) - ZV(I1)*YV(I2) - VN(2) = ZV(I1)*XV(I2) - XV(I1)*ZV(I2) - VN(3) = XV(I1)*YV(I2) - YV(I1)*XV(I2) - VNRM = SQRT(VN(1)*VN(1) + VN(2)*VN(2) + VN(3)*VN(3)) - IF (VNRM .EQ. 0.) GO TO 1 - Q(1) = XV(I1) + XV(I2) + EPS*VN(1)/VNRM - Q(2) = YV(I1) + YV(I2) + EPS*VN(2)/VNRM - Q(3) = ZV(I1) + ZV(I2) + EPS*VN(3)/VNRM - QNRM = SQRT(Q(1)*Q(1) + Q(2)*Q(2) + Q(3)*Q(3)) - Q(1) = Q(1)/QNRM - Q(2) = Q(2)/QNRM - Q(3) = Q(3)/QNRM -C -C Compute CN = Q X P, PN = P X CN, and QN = CN X Q. -C - CN(1) = Q(2)*P(3) - Q(3)*P(2) - CN(2) = Q(3)*P(1) - Q(1)*P(3) - CN(3) = Q(1)*P(2) - Q(2)*P(1) - IF (CN(1) .EQ. 0. .AND. CN(2) .EQ. 0. .AND. - . CN(3) .EQ. 0.) GO TO 1 - PN(1) = P(2)*CN(3) - P(3)*CN(2) - PN(2) = P(3)*CN(1) - P(1)*CN(3) - PN(3) = P(1)*CN(2) - P(2)*CN(1) - QN(1) = CN(2)*Q(3) - CN(3)*Q(2) - QN(2) = CN(3)*Q(1) - CN(1)*Q(3) - QN(3) = CN(1)*Q(2) - CN(2)*Q(1) -C -C Initialize parameters for the boundary traversal. -C - NI = 0 - EVEN = .TRUE. - BP = -2. - BQ = -2. - PINR = .TRUE. - QINR = .TRUE. - I2 = LISTV(N) - IF (I2 .LT. 1 .OR. I2 .GT. IMX) GO TO 12 - LFT2 = CN(1)*XV(I2) + CN(2)*YV(I2) + - . CN(3)*ZV(I2) .GT. 0. -C -C Loop on boundary arcs I1->I2. -C - DO 2 K = 1,N - I1 = I2 - LFT1 = LFT2 - I2 = LISTV(K) - IF (I2 .LT. 1 .OR. I2 .GT. IMX) GO TO 12 - LFT2 = CN(1)*XV(I2) + CN(2)*YV(I2) + - . CN(3)*ZV(I2) .GT. 0. - IF (LFT1 .EQV. LFT2) GO TO 2 -C -C I1 and I2 are on opposite sides of Q->P. Compute the -C point of intersection B. -C - NI = NI + 1 - V1(1) = XV(I1) - V1(2) = YV(I1) - V1(3) = ZV(I1) - V2(1) = XV(I2) - V2(2) = YV(I2) - V2(3) = ZV(I2) - CALL INTRSC (V1,V2,CN, B,IERR) -C -C B is between Q and P (on the shorter arc) iff -C B Forward Q->P and B Forward P->Q iff -C > 0 and > 0. -C - IF (B(1)*QN(1) + B(2)*QN(2) + B(3)*QN(3) .GT. 0. - . .AND. - . B(1)*PN(1) + B(2)*PN(2) + B(3)*PN(3) .GT. 0.) - . THEN -C -C Update EVEN, BQ, QINR, BP, and PINR. -C - EVEN = .NOT. EVEN - D = B(1)*Q(1) + B(2)*Q(2) + B(3)*Q(3) - IF (D .GT. BQ) THEN - BQ = D - QINR = LFT2 - ENDIF - D = B(1)*P(1) + B(2)*P(2) + B(3)*P(3) - IF (D .GT. BP) THEN - BP = D - PINR = LFT1 - ENDIF - ENDIF - 2 CONTINUE -C -C Test for consistency: NI must be even and QINR must be -C TRUE. -C - IF (NI .NE. 2*(NI/2) .OR. .NOT. QINR) GO TO 1 -C -C Test for error 3: different values of PINR and EVEN. -C - IF (PINR .NEQV. EVEN) GO TO 13 -C -C No error encountered. -C - IER = 0 - INSIDE = EVEN - RETURN -C -C LV or NV is outside its valid range. -C - 11 IER = 1 - RETURN -C -C A LISTV entry is outside its valid range. -C - 12 IER = 2 - RETURN -C -C The polygon boundary is self-intersecting. -C - 13 IER = 3 - RETURN -C -C Consistency tests failed for all values of Q. -C - 14 IER = 4 - RETURN - END FUNCTION - SUBROUTINE INTADD (KK,I1,I2,I3, LIST,LPTR,LEND,LNEW ) - INTEGER KK, I1, I2, I3, LIST(*), LPTR(*), LEND(*), - . LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine adds an interior node to a triangulation -C of a set of points on the unit sphere. The data structure -C is updated with the insertion of node KK into the triangle -C whose vertices are I1, I2, and I3. No optimization of the -C triangulation is performed. -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C KK = Index of the node to be inserted. KK .GE. 1 -C and KK must not be equal to I1, I2, or I3. -C -C I1,I2,I3 = Indexes of the counterclockwise-ordered -C sequence of vertices of a triangle which -C contains node KK. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Data structure defining the -C triangulation. Refer to Sub- -C routine TRMESH. Triangle -C (I1,I2,I3) must be included -C in the triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node KK. KK -C will be connected to nodes I1, -C I2, and I3. -C -C Modules required by INTADD: INSERT, LSTPTR -C -C*********************************************************** -C - INTEGER K, LP, N1, N2, N3 -C -C Local parameters: -C -C K = Local copy of KK -C LP = LIST pointer -C N1,N2,N3 = Local copies of I1, I2, and I3 -C - K = KK -C -C Initialization. -C - N1 = I1 - N2 = I2 - N3 = I3 -C -C Add K as a neighbor of I1, I2, and I3. -C - LP = LSTPTR(LEND(N1),N2,LIST,LPTR) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - LP = LSTPTR(LEND(N2),N3,LIST,LPTR) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - LP = LSTPTR(LEND(N3),N1,LIST,LPTR) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) -C -C Add I1, I2, and I3 as neighbors of K. -C - LIST(LNEW) = N1 - LIST(LNEW+1) = N2 - LIST(LNEW+2) = N3 - LPTR(LNEW) = LNEW + 1 - LPTR(LNEW+1) = LNEW + 2 - LPTR(LNEW+2) = LNEW - LEND(K) = LNEW + 2 - LNEW = LNEW + 3 - RETURN - END SUBROUTINE - SUBROUTINE INTRSC (P1,P2,CN, P,IER) - INTEGER IER - REAL P1(3), P2(3), CN(3), P(3) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/19/90 -C -C Given a great circle C and points P1 and P2 defining an -C arc A on the surface of the unit sphere, where A is the -C shorter of the two portions of the great circle C12 assoc- -C iated with P1 and P2, this subroutine returns the point -C of intersection P between C and C12 that is closer to A. -C Thus, if P1 and P2 lie in opposite hemispheres defined by -C C, P is the point of intersection of C with A. -C -C -C On input: -C -C P1,P2 = Arrays of length 3 containing the Cartesian -C coordinates of unit vectors. -C -C CN = Array of length 3 containing the Cartesian -C coordinates of a nonzero vector which defines C -C as the intersection of the plane whose normal -C is CN with the unit sphere. Thus, if C is to -C be the great circle defined by P and Q, CN -C should be P X Q. -C -C The above parameters are not altered by this routine. -C -C P = Array of length 3. -C -C On output: -C -C P = Point of intersection defined above unless IER -C .NE. 0, in which case P is not altered. -C -C IER = Error indicator. -C IER = 0 if no errors were encountered. -C IER = 1 if = . This occurs -C iff P1 = P2 or CN = 0 or there are -C two intersection points at the same -C distance from A. -C IER = 2 if P2 = -P1 and the definition of A is -C therefore ambiguous. -C -C Modules required by INTRSC: None -C -C Intrinsic function called by INTRSC: SQRT -C -C*********************************************************** -C - INTEGER I - REAL D1, D2, PP(3), PPN, T -C -C Local parameters: -C -C D1 = -C D2 = -C I = DO-loop index -C PP = P1 + T*(P2-P1) = Parametric representation of the -C line defined by P1 and P2 -C PPN = Norm of PP -C T = D1/(D1-D2) = Parameter value chosen so that PP lies -C in the plane of C -C - D1 = CN(1)*P1(1) + CN(2)*P1(2) + CN(3)*P1(3) - D2 = CN(1)*P2(1) + CN(2)*P2(2) + CN(3)*P2(3) -C - IF (D1 .EQ. D2) THEN - IER = 1 - RETURN - ENDIF -C -C Solve for T such that = 0 and compute PP and PPN. -C - T = D1/(D1-D2) - PPN = 0. - DO 1 I = 1,3 - PP(I) = P1(I) + T*(P2(I)-P1(I)) - PPN = PPN + PP(I)*PP(I) - 1 CONTINUE -C -C PPN = 0 iff PP = 0 iff P2 = -P1 (and T = .5). -C - IF (PPN .EQ. 0.) THEN - IER = 2 - RETURN - ENDIF - PPN = SQRT(PPN) -C -C Compute P = PP/PPN. -C - DO 2 I = 1,3 - P(I) = PP(I)/PPN - 2 CONTINUE - IER = 0 - RETURN - END SUBROUTINE - INTEGER FUNCTION JRAND (N, IX,IY,IZ ) - INTEGER N, IX, IY, IZ -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/28/98 -C -C This function returns a uniformly distributed pseudo- -C random integer in the range 1 to N. -C -C -C On input: -C -C N = Maximum value to be returned. -C -C N is not altered by this function. -C -C IX,IY,IZ = Integer seeds initialized to values in -C the range 1 to 30,000 before the first -C call to JRAND, and not altered between -C subsequent calls (unless a sequence of -C random numbers is to be repeated by -C reinitializing the seeds). -C -C On output: -C -C IX,IY,IZ = Updated integer seeds. -C -C JRAND = Random integer in the range 1 to N. -C -C Reference: B. A. Wichmann and I. D. Hill, "An Efficient -C and Portable Pseudo-random Number Generator", -C Applied Statistics, Vol. 31, No. 2, 1982, -C pp. 188-190. -C -C Modules required by JRAND: None -C -C Intrinsic functions called by JRAND: INT, MOD, REAL -C -C*********************************************************** -C - REAL U, X -C -C Local parameters: -C -C U = Pseudo-random number uniformly distributed in the -C interval (0,1). -C X = Pseudo-random number in the range 0 to 3 whose frac- -C tional part is U. -C - IX = MOD(171*IX,30269) - IY = MOD(172*IY,30307) - IZ = MOD(170*IZ,30323) - X = (REAL(IX)/30269.) + (REAL(IY)/30307.) + - . (REAL(IZ)/30323.) - U = X - INT(X) - JRAND = REAL(N)*U + 1. - RETURN - END FUNCTION - LOGICAL FUNCTION LEFT (X1,Y1,Z1,X2,Y2,Z2,X0,Y0,Z0) - REAL X1, Y1, Z1, X2, Y2, Z2, X0, Y0, Z0 -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/15/96 -C -C This function determines whether node N0 is in the -C (closed) left hemisphere defined by the plane containing -C N1, N2, and the origin, where left is defined relative to -C an observer at N1 facing N2. -C -C -C On input: -C -C X1,Y1,Z1 = Coordinates of N1. -C -C X2,Y2,Z2 = Coordinates of N2. -C -C X0,Y0,Z0 = Coordinates of N0. -C -C Input parameters are not altered by this function. -C -C On output: -C -C LEFT = TRUE if and only if N0 is in the closed -C left hemisphere. -C -C Modules required by LEFT: None -C -C*********************************************************** -C -C LEFT = TRUE iff = det(N0,N1,N2) .GE. 0. -C - LEFT = X0*(Y1*Z2-Y2*Z1) - Y0*(X1*Z2-X2*Z1) + - . Z0*(X1*Y2-X2*Y1) .GE. 0. - RETURN - END FUNCTION - INTEGER FUNCTION LSTPTR (LPL,NB,LIST,LPTR) - INTEGER LPL, NB, LIST(*), LPTR(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/15/96 -C -C This function returns the index (LIST pointer) of NB in -C the adjacency list for N0, where LPL = LEND(N0). -C -C This function is identical to the similarly named -C function in TRIPACK. -C -C -C On input: -C -C LPL = LEND(N0) -C -C NB = Index of the node whose pointer is to be re- -C turned. NB must be connected to N0. -C -C LIST,LPTR = Data structure defining the triangula- -C tion. Refer to Subroutine TRMESH. -C -C Input parameters are not altered by this function. -C -C On output: -C -C LSTPTR = Pointer such that LIST(LSTPTR) = NB or -C LIST(LSTPTR) = -NB, unless NB is not a -C neighbor of N0, in which case LSTPTR = LPL. -C -C Modules required by LSTPTR: None -C -C*********************************************************** -C - INTEGER LP, ND -C -C Local parameters: -C -C LP = LIST pointer -C ND = Nodal index -C - LP = LPTR(LPL) - 1 ND = LIST(LP) - IF (ND .EQ. NB) GO TO 2 - LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 1 -C - 2 LSTPTR = LP - RETURN - END FUNCTION - INTEGER FUNCTION NBCNT (LPL,LPTR) - INTEGER LPL, LPTR(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/15/96 -C -C This function returns the number of neighbors of a node -C N0 in a triangulation created by Subroutine TRMESH. -C -C This function is identical to the similarly named -C function in TRIPACK. -C -C -C On input: -C -C LPL = LIST pointer to the last neighbor of N0 -- -C LPL = LEND(N0). -C -C LPTR = Array of pointers associated with LIST. -C -C Input parameters are not altered by this function. -C -C On output: -C -C NBCNT = Number of neighbors of N0. -C -C Modules required by NBCNT: None -C -C*********************************************************** -C - INTEGER K, LP -C -C Local parameters: -C -C K = Counter for computing the number of neighbors -C LP = LIST pointer -C - LP = LPL - K = 1 -C - 1 LP = LPTR(LP) - IF (LP .EQ. LPL) GO TO 2 - K = K + 1 - GO TO 1 -C - 2 NBCNT = K - RETURN - END FUNCTION - INTEGER FUNCTION NEARND (P,IST,N,X,Y,Z,LIST,LPTR, - . LEND, AL) - INTEGER IST, N, LIST(*), LPTR(*), LEND(N) - REAL P(3), X(N), Y(N), Z(N), AL -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/28/98 -C -C Given a point P on the surface of the unit sphere and a -C Delaunay triangulation created by Subroutine TRMESH, this -C function returns the index of the nearest triangulation -C node to P. -C -C The algorithm consists of implicitly adding P to the -C triangulation, finding the nearest neighbor to P, and -C implicitly deleting P from the triangulation. Thus, it -C is based on the fact that, if P is a node in a Delaunay -C triangulation, the nearest node to P is a neighbor of P. -C -C -C On input: -C -C P = Array of length 3 containing the Cartesian coor- -C dinates of the point P to be located relative to -C the triangulation. It is assumed without a test -C that P(1)**2 + P(2)**2 + P(3)**2 = 1. -C -C IST = Index of a node at which TRFIND begins the -C search. Search time depends on the proximity -C of this node to P. -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to TRMESH. -C -C Input parameters are not altered by this function. -C -C On output: -C -C NEARND = Nodal index of the nearest node to P, or 0 -C if N < 3 or the triangulation data struc- -C ture is invalid. -C -C AL = Arc length (angular distance in radians) be- -C tween P and NEARND unless NEARND = 0. -C -C Note that the number of candidates for NEARND -C (neighbors of P) is limited to LMAX defined in -C the PARAMETER statement below. -C -C Modules required by NEARND: JRAND, LSTPTR, TRFIND, STORE -C -C Intrinsic functions called by NEARND: ABS, ACOS -C -C*********************************************************** -C - INTEGER LMAX - PARAMETER (LMAX=25) - INTEGER I1, I2, I3, L, LISTP(LMAX), LP, LP1, LP2, - . LPL, LPTRP(LMAX), N1, N2, N3, NN, NR, NST - REAL B1, B2, B3, DS1, DSR, DX1, DX2, DX3, DY1, - . DY2, DY3, DZ1, DZ2, DZ3 -C -C Local parameters: -C -C B1,B2,B3 = Unnormalized barycentric coordinates returned -C by TRFIND -C DS1 = (Negative cosine of the) distance from P to N1 -C DSR = (Negative cosine of the) distance from P to NR -C DX1,..DZ3 = Components of vectors used by the swap test -C I1,I2,I3 = Nodal indexes of a triangle containing P, or -C the rightmost (I1) and leftmost (I2) visible -C boundary nodes as viewed from P -C L = Length of LISTP/LPTRP and number of neighbors -C of P -C LMAX = Maximum value of L -C LISTP = Indexes of the neighbors of P -C LPTRP = Array of pointers in 1-1 correspondence with -C LISTP elements -C LP = LIST pointer to a neighbor of N1 and LISTP -C pointer -C LP1,LP2 = LISTP indexes (pointers) -C LPL = Pointer to the last neighbor of N1 -C N1 = Index of a node visible from P -C N2 = Index of an endpoint of an arc opposite P -C N3 = Index of the node opposite N1->N2 -C NN = Local copy of N -C NR = Index of a candidate for the nearest node to P -C NST = Index of the node at which TRFIND begins the -C search -C -C -C Store local parameters and test for N invalid. -C - NN = N - IF (NN .LT. 3) GO TO 6 - NST = IST - IF (NST .LT. 1 .OR. NST .GT. NN) NST = 1 -C -C Find a triangle (I1,I2,I3) containing P, or the rightmost -C (I1) and leftmost (I2) visible boundary nodes as viewed -C from P. -C - CALL TRFIND (NST,P,N,X,Y,Z,LIST,LPTR,LEND, B1,B2,B3, - . I1,I2,I3) -C -C Test for collinear nodes. -C - IF (I1 .EQ. 0) GO TO 6 -C -C Store the linked list of 'neighbors' of P in LISTP and -C LPTRP. I1 is the first neighbor, and 0 is stored as -C the last neighbor if P is not contained in a triangle. -C L is the length of LISTP and LPTRP, and is limited to -C LMAX. -C - IF (I3 .NE. 0) THEN - LISTP(1) = I1 - LPTRP(1) = 2 - LISTP(2) = I2 - LPTRP(2) = 3 - LISTP(3) = I3 - LPTRP(3) = 1 - L = 3 - ELSE - N1 = I1 - L = 1 - LP1 = 2 - LISTP(L) = N1 - LPTRP(L) = LP1 -C -C Loop on the ordered sequence of visible boundary nodes -C N1 from I1 to I2. -C - 1 LPL = LEND(N1) - N1 = -LIST(LPL) - L = LP1 - LP1 = L+1 - LISTP(L) = N1 - LPTRP(L) = LP1 - IF (N1 .NE. I2 .AND. LP1 .LT. LMAX) GO TO 1 - L = LP1 - LISTP(L) = 0 - LPTRP(L) = 1 - ENDIF -C -C Initialize variables for a loop on arcs N1-N2 opposite P -C in which new 'neighbors' are 'swapped' in. N1 follows -C N2 as a neighbor of P, and LP1 and LP2 are the LISTP -C indexes of N1 and N2. -C - LP2 = 1 - N2 = I1 - LP1 = LPTRP(1) - N1 = LISTP(LP1) -C -C Begin loop: find the node N3 opposite N1->N2. -C - 2 LP = LSTPTR(LEND(N1),N2,LIST,LPTR) - IF (LIST(LP) .LT. 0) GO TO 3 - LP = LPTR(LP) - N3 = ABS(LIST(LP)) -C -C Swap test: Exit the loop if L = LMAX. -C - IF (L .EQ. LMAX) GO TO 4 - DX1 = X(N1) - P(1) - DY1 = Y(N1) - P(2) - DZ1 = Z(N1) - P(3) -C - DX2 = X(N2) - P(1) - DY2 = Y(N2) - P(2) - DZ2 = Z(N2) - P(3) -C - DX3 = X(N3) - P(1) - DY3 = Y(N3) - P(2) - DZ3 = Z(N3) - P(3) - IF ( DX3*(DY2*DZ1 - DY1*DZ2) - - . DY3*(DX2*DZ1 - DX1*DZ2) + - . DZ3*(DX2*DY1 - DX1*DY2) .LE. 0. ) GO TO 3 -C -C Swap: Insert N3 following N2 in the adjacency list for P. -C The two new arcs opposite P must be tested. -C - L = L+1 - LPTRP(LP2) = L - LISTP(L) = N3 - LPTRP(L) = LP1 - LP1 = L - N1 = N3 - GO TO 2 -C -C No swap: Advance to the next arc and test for termination -C on N1 = I1 (LP1 = 1) or N1 followed by 0. -C - 3 IF (LP1 .EQ. 1) GO TO 4 - LP2 = LP1 - N2 = N1 - LP1 = LPTRP(LP1) - N1 = LISTP(LP1) - IF (N1 .EQ. 0) GO TO 4 - GO TO 2 -C -C Set NR and DSR to the index of the nearest node to P and -C an increasing function (negative cosine) of its distance -C from P, respectively. -C - 4 NR = I1 - DSR = -(X(NR)*P(1) + Y(NR)*P(2) + Z(NR)*P(3)) - DO 5 LP = 2,L - N1 = LISTP(LP) - IF (N1 .EQ. 0) GO TO 5 - DS1 = -(X(N1)*P(1) + Y(N1)*P(2) + Z(N1)*P(3)) - IF (DS1 .LT. DSR) THEN - NR = N1 - DSR = DS1 - ENDIF - 5 CONTINUE - DSR = -DSR - IF (DSR .GT. 1.0) DSR = 1.0 - AL = ACOS(DSR) - NEARND = NR - RETURN -C -C Invalid input. -C - 6 NEARND = 0 - RETURN - END FUNCTION - SUBROUTINE OPTIM (X,Y,Z,NA, LIST,LPTR,LEND,NIT, - . IWK, IER) - INTEGER NA, LIST(*), LPTR(*), LEND(*), NIT, IWK(2,NA), - . IER - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/30/98 -C -C Given a set of NA triangulation arcs, this subroutine -C optimizes the portion of the triangulation consisting of -C the quadrilaterals (pairs of adjacent triangles) which -C have the arcs as diagonals by applying the circumcircle -C test and appropriate swaps to the arcs. -C -C An iteration consists of applying the swap test and -C swaps to all NA arcs in the order in which they are -C stored. The iteration is repeated until no swap occurs -C or NIT iterations have been performed. The bound on the -C number of iterations may be necessary to prevent an -C infinite loop caused by cycling (reversing the effect of a -C previous swap) due to floating point inaccuracy when four -C or more nodes are nearly cocircular. -C -C -C On input: -C -C X,Y,Z = Arrays containing the nodal coordinates. -C -C NA = Number of arcs in the set. NA .GE. 0. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C NIT = Maximum number of iterations to be performed. -C NIT = 4*NA should be sufficient. NIT .GE. 1. -C -C IWK = Integer array dimensioned 2 by NA containing -C the nodal indexes of the arc endpoints (pairs -C of endpoints are stored in columns). -C -C On output: -C -C LIST,LPTR,LEND = Updated triangulation data struc- -C ture reflecting the swaps. -C -C NIT = Number of iterations performed. -C -C IWK = Endpoint indexes of the new set of arcs -C reflecting the swaps. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if a swap occurred on the last of -C MAXIT iterations, where MAXIT is the -C value of NIT on input. The new set -C of arcs is not necessarily optimal -C in this case. -C IER = 2 if NA < 0 or NIT < 1 on input. -C IER = 3 if IWK(2,I) is not a neighbor of -C IWK(1,I) for some I in the range 1 -C to NA. A swap may have occurred in -C this case. -C IER = 4 if a zero pointer was returned by -C Subroutine SWAP. -C -C Modules required by OPTIM: LSTPTR, SWAP, SWPTST -C -C Intrinsic function called by OPTIM: ABS -C -C*********************************************************** -C - INTEGER I, IO1, IO2, ITER, LP, LP21, LPL, LPP, MAXIT, - . N1, N2, NNA - LOGICAL SWP -C -C Local parameters: -C -C I = Column index for IWK -C IO1,IO2 = Nodal indexes of the endpoints of an arc in IWK -C ITER = Iteration count -C LP = LIST pointer -C LP21 = Parameter returned by SWAP (not used) -C LPL = Pointer to the last neighbor of IO1 -C LPP = Pointer to the node preceding IO2 as a neighbor -C of IO1 -C MAXIT = Input value of NIT -C N1,N2 = Nodes opposite IO1->IO2 and IO2->IO1, -C respectively -C NNA = Local copy of NA -C SWP = Flag set to TRUE iff a swap occurs in the -C optimization loop -C - NNA = NA - MAXIT = NIT - IF (NNA .LT. 0 .OR. MAXIT .LT. 1) GO TO 7 -C -C Initialize iteration count ITER and test for NA = 0. -C - ITER = 0 - IF (NNA .EQ. 0) GO TO 5 -C -C Top of loop -- -C SWP = TRUE iff a swap occurred in the current iteration. -C - 1 IF (ITER .EQ. MAXIT) GO TO 6 - ITER = ITER + 1 - SWP = .FALSE. -C -C Inner loop on arcs IO1-IO2 -- -C - DO 4 I = 1,NNA - IO1 = IWK(1,I) - IO2 = IWK(2,I) -C -C Set N1 and N2 to the nodes opposite IO1->IO2 and -C IO2->IO1, respectively. Determine the following: -C -C LPL = pointer to the last neighbor of IO1, -C LP = pointer to IO2 as a neighbor of IO1, and -C LPP = pointer to the node N2 preceding IO2. -C - LPL = LEND(IO1) - LPP = LPL - LP = LPTR(LPP) - 2 IF (LIST(LP) .EQ. IO2) GO TO 3 - LPP = LP - LP = LPTR(LPP) - IF (LP .NE. LPL) GO TO 2 -C -C IO2 should be the last neighbor of IO1. Test for no -C arc and bypass the swap test if IO1 is a boundary -C node. -C - IF (ABS(LIST(LP)) .NE. IO2) GO TO 8 - IF (LIST(LP) .LT. 0) GO TO 4 -C -C Store N1 and N2, or bypass the swap test if IO1 is a -C boundary node and IO2 is its first neighbor. -C - 3 N2 = LIST(LPP) - IF (N2 .LT. 0) GO TO 4 - LP = LPTR(LP) - N1 = ABS(LIST(LP)) -C -C Test IO1-IO2 for a swap, and update IWK if necessary. -C - IF ( .NOT. SWPTST(N1,N2,IO1,IO2,X,Y,Z) ) GO TO 4 - CALL SWAP (N1,N2,IO1,IO2, LIST,LPTR,LEND, LP21) - IF (LP21 .EQ. 0) GO TO 9 - SWP = .TRUE. - IWK(1,I) = N1 - IWK(2,I) = N2 - 4 CONTINUE - IF (SWP) GO TO 1 -C -C Successful termination. -C - 5 NIT = ITER - IER = 0 - RETURN -C -C MAXIT iterations performed without convergence. -C - 6 NIT = MAXIT - IER = 1 - RETURN -C -C Invalid input parameter. -C - 7 NIT = 0 - IER = 2 - RETURN -C -C IO2 is not a neighbor of IO1. -C - 8 NIT = ITER - IER = 3 - RETURN -C -C Zero pointer returned by SWAP. -C - 9 NIT = ITER - IER = 4 - RETURN - END SUBROUTINE - SUBROUTINE SCOORD (PX,PY,PZ, PLAT,PLON,PNRM) - REAL PX, PY, PZ, PLAT, PLON, PNRM -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 08/27/90 -C -C This subroutine converts a point P from Cartesian coor- -C dinates to spherical coordinates. -C -C -C On input: -C -C PX,PY,PZ = Cartesian coordinates of P. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C PLAT = Latitude of P in the range -PI/2 to PI/2, or -C 0 if PNRM = 0. PLAT should be scaled by -C 180/PI to obtain the value in degrees. -C -C PLON = Longitude of P in the range -PI to PI, or 0 -C if P lies on the Z-axis. PLON should be -C scaled by 180/PI to obtain the value in -C degrees. -C -C PNRM = Magnitude (Euclidean norm) of P. -C -C Modules required by SCOORD: None -C -C Intrinsic functions called by SCOORD: ASIN, ATAN2, SQRT -C -C*********************************************************** -C - PNRM = SQRT(PX*PX + PY*PY + PZ*PZ) - IF (PX .NE. 0. .OR. PY .NE. 0.) THEN - PLON = ATAN2(PY,PX) - ELSE - PLON = 0. - ENDIF - IF (PNRM .NE. 0.) THEN - PLAT = ASIN(PZ/PNRM) - ELSE - PLAT = 0. - ENDIF - RETURN - END SUBROUTINE - REAL FUNCTION STORE (X) - REAL X -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 05/09/92 -C -C This function forces its argument X to be stored in a -C memory location, thus providing a means of determining -C floating point number characteristics (such as the machine -C precision) when it is necessary to avoid computation in -C high precision registers. -C -C -C On input: -C -C X = Value to be stored. -C -C X is not altered by this function. -C -C On output: -C -C STORE = Value of X after it has been stored and -C possibly truncated or rounded to the single -C precision word length. -C -C Modules required by STORE: None -C -C*********************************************************** -C - REAL Y - COMMON/STCOM/Y - Y = X - STORE = Y - RETURN - END FUNCTION - SUBROUTINE SWAP (IN1,IN2,IO1,IO2, LIST,LPTR, - . LEND, LP21) - INTEGER IN1, IN2, IO1, IO2, LIST(*), LPTR(*), LEND(*), - . LP21 -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 06/22/98 -C -C Given a triangulation of a set of points on the unit -C sphere, this subroutine replaces a diagonal arc in a -C strictly convex quadrilateral (defined by a pair of adja- -C cent triangles) with the other diagonal. Equivalently, a -C pair of adjacent triangles is replaced by another pair -C having the same union. -C -C -C On input: -C -C IN1,IN2,IO1,IO2 = Nodal indexes of the vertices of -C the quadrilateral. IO1-IO2 is re- -C placed by IN1-IN2. (IO1,IO2,IN1) -C and (IO2,IO1,IN2) must be trian- -C gles on input. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C On output: -C -C LIST,LPTR,LEND = Data structure updated with the -C swap -- triangles (IO1,IO2,IN1) and -C (IO2,IO1,IN2) are replaced by -C (IN1,IN2,IO2) and (IN2,IN1,IO1) -C unless LP21 = 0. -C -C LP21 = Index of IN1 as a neighbor of IN2 after the -C swap is performed unless IN1 and IN2 are -C adjacent on input, in which case LP21 = 0. -C -C Module required by SWAP: LSTPTR -C -C Intrinsic function called by SWAP: ABS -C -C*********************************************************** -C - INTEGER LP, LPH, LPSAV -C -C Local parameters: -C -C LP,LPH,LPSAV = LIST pointers -C -C -C Test for IN1 and IN2 adjacent. -C - LP = LSTPTR(LEND(IN1),IN2,LIST,LPTR) - IF (ABS(LIST(LP)) .EQ. IN2) THEN - LP21 = 0 - RETURN - ENDIF -C -C Delete IO2 as a neighbor of IO1. -C - LP = LSTPTR(LEND(IO1),IN2,LIST,LPTR) - LPH = LPTR(LP) - LPTR(LP) = LPTR(LPH) -C -C If IO2 is the last neighbor of IO1, make IN2 the -C last neighbor. -C - IF (LEND(IO1) .EQ. LPH) LEND(IO1) = LP -C -C Insert IN2 as a neighbor of IN1 following IO1 -C using the hole created above. -C - LP = LSTPTR(LEND(IN1),IO1,LIST,LPTR) - LPSAV = LPTR(LP) - LPTR(LP) = LPH - LIST(LPH) = IN2 - LPTR(LPH) = LPSAV -C -C Delete IO1 as a neighbor of IO2. -C - LP = LSTPTR(LEND(IO2),IN1,LIST,LPTR) - LPH = LPTR(LP) - LPTR(LP) = LPTR(LPH) -C -C If IO1 is the last neighbor of IO2, make IN1 the -C last neighbor. -C - IF (LEND(IO2) .EQ. LPH) LEND(IO2) = LP -C -C Insert IN1 as a neighbor of IN2 following IO2. -C - LP = LSTPTR(LEND(IN2),IO2,LIST,LPTR) - LPSAV = LPTR(LP) - LPTR(LP) = LPH - LIST(LPH) = IN1 - LPTR(LPH) = LPSAV - LP21 = LPH - RETURN - END SUBROUTINE - LOGICAL FUNCTION SWPTST (N1,N2,N3,N4,X,Y,Z) - INTEGER N1, N2, N3, N4 - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 03/29/91 -C -C This function decides whether or not to replace a -C diagonal arc in a quadrilateral with the other diagonal. -C The decision will be to swap (SWPTST = TRUE) if and only -C if N4 lies above the plane (in the half-space not contain- -C ing the origin) defined by (N1,N2,N3), or equivalently, if -C the projection of N4 onto this plane is interior to the -C circumcircle of (N1,N2,N3). The decision will be for no -C swap if the quadrilateral is not strictly convex. -C -C -C On input: -C -C N1,N2,N3,N4 = Indexes of the four nodes defining the -C quadrilateral with N1 adjacent to N2, -C and (N1,N2,N3) in counterclockwise -C order. The arc connecting N1 to N2 -C should be replaced by an arc connec- -C ting N3 to N4 if SWPTST = TRUE. Refer -C to Subroutine SWAP. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. (X(I),Y(I),Z(I)) -C define node I for I = N1, N2, N3, and N4. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C SWPTST = TRUE if and only if the arc connecting N1 -C and N2 should be swapped for an arc con- -C necting N3 and N4. -C -C Modules required by SWPTST: None -C -C*********************************************************** -C - REAL DX1, DX2, DX3, DY1, DY2, DY3, DZ1, DZ2, DZ3, - . X4, Y4, Z4 -C -C Local parameters: -C -C DX1,DY1,DZ1 = Coordinates of N4->N1 -C DX2,DY2,DZ2 = Coordinates of N4->N2 -C DX3,DY3,DZ3 = Coordinates of N4->N3 -C X4,Y4,Z4 = Coordinates of N4 -C - X4 = X(N4) - Y4 = Y(N4) - Z4 = Z(N4) - DX1 = X(N1) - X4 - DX2 = X(N2) - X4 - DX3 = X(N3) - X4 - DY1 = Y(N1) - Y4 - DY2 = Y(N2) - Y4 - DY3 = Y(N3) - Y4 - DZ1 = Z(N1) - Z4 - DZ2 = Z(N2) - Z4 - DZ3 = Z(N3) - Z4 -C -C N4 lies above the plane of (N1,N2,N3) iff N3 lies above -C the plane of (N2,N1,N4) iff Det(N3-N4,N2-N4,N1-N4) = -C (N3-N4,N2-N4 X N1-N4) > 0. -C - SWPTST = DX3*(DY2*DZ1 - DY1*DZ2) - . -DY3*(DX2*DZ1 - DX1*DZ2) - . +DZ3*(DX2*DY1 - DX1*DY2) .GT. 0. - RETURN - END FUNCTION - SUBROUTINE TRANS (N,RLAT,RLON, X,Y,Z) - INTEGER N - REAL RLAT(N), RLON(N), X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 04/08/90 -C -C This subroutine transforms spherical coordinates into -C Cartesian coordinates on the unit sphere for input to -C Subroutine TRMESH. Storage for X and Y may coincide with -C storage for RLAT and RLON if the latter need not be saved. -C -C -C On input: -C -C N = Number of nodes (points on the unit sphere) -C whose coordinates are to be transformed. -C -C RLAT = Array of length N containing latitudinal -C coordinates of the nodes in radians. -C -C RLON = Array of length N containing longitudinal -C coordinates of the nodes in radians. -C -C The above parameters are not altered by this routine. -C -C X,Y,Z = Arrays of length at least N. -C -C On output: -C -C X,Y,Z = Cartesian coordinates in the range -1 to 1. -C X(I)**2 + Y(I)**2 + Z(I)**2 = 1 for I = 1 -C to N. -C -C Modules required by TRANS: None -C -C Intrinsic functions called by TRANS: COS, SIN -C -C*********************************************************** -C - INTEGER I, NN - REAL COSPHI, PHI, THETA -C -C Local parameters: -C -C COSPHI = cos(PHI) -C I = DO-loop index -C NN = Local copy of N -C PHI = Latitude -C THETA = Longitude -C - NN = N - DO 1 I = 1,NN - PHI = RLAT(I) - THETA = RLON(I) - COSPHI = COS(PHI) - X(I) = COSPHI*COS(THETA) - Y(I) = COSPHI*SIN(THETA) - Z(I) = SIN(PHI) - 1 CONTINUE - RETURN - END SUBROUTINE - SUBROUTINE TRFIND (NST,P,N,X,Y,Z,LIST,LPTR,LEND, B1, - . B2,B3,I1,I2,I3) - INTEGER NST, N, LIST(*), LPTR(*), LEND(N), I1, I2, I3 - REAL P(3), X(N), Y(N), Z(N), B1, B2, B3 -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 11/30/99 -C -C This subroutine locates a point P relative to a triangu- -C lation created by Subroutine TRMESH. If P is contained in -C a triangle, the three vertex indexes and barycentric coor- -C dinates are returned. Otherwise, the indexes of the -C visible boundary nodes are returned. -C -C -C On input: -C -C NST = Index of a node at which TRFIND begins its -C search. Search time depends on the proximity -C of this node to P. -C -C P = Array of length 3 containing the x, y, and z -C coordinates (in that order) of the point P to be -C located. -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the triangulation nodes (unit -C vectors). (X(I),Y(I),Z(I)) defines node I -C for I = 1 to N. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C B1,B2,B3 = Unnormalized barycentric coordinates of -C the central projection of P onto the un- -C derlying planar triangle if P is in the -C convex hull of the nodes. These parame- -C ters are not altered if I1 = 0. -C -C I1,I2,I3 = Counterclockwise-ordered vertex indexes -C of a triangle containing P if P is con- -C tained in a triangle. If P is not in the -C convex hull of the nodes, I1 and I2 are -C the rightmost and leftmost (boundary) -C nodes that are visible from P, and -C I3 = 0. (If all boundary nodes are vis- -C ible from P, then I1 and I2 coincide.) -C I1 = I2 = I3 = 0 if P and all of the -C nodes are coplanar (lie on a common great -C circle. -C -C Modules required by TRFIND: JRAND, LSTPTR, STORE -C -C Intrinsic function called by TRFIND: ABS -C -C*********************************************************** -C - INTEGER IX, IY, IZ, LP, N0, N1, N1S, N2, N2S, N3, N4, - . NEXT, NF, NL - REAL DET, EPS, PTN1, PTN2, Q(3), S12, TOL, XP, YP, - . ZP - REAL X0, X1, X2, Y0, Y1, Y2, Z0, Z1, Z2 -C - SAVE IX, IY, IZ - DATA IX/1/, IY/2/, IZ/3/ -C -C Local parameters: -C -C EPS = Machine precision -C IX,IY,IZ = Integer seeds for JRAND -C LP = LIST pointer -C N0,N1,N2 = Nodes in counterclockwise order defining a -C cone (with vertex N0) containing P, or end- -C points of a boundary edge such that P Right -C N1->N2 -C N1S,N2S = Initially-determined values of N1 and N2 -C N3,N4 = Nodes opposite N1->N2 and N2->N1, respectively -C NEXT = Candidate for I1 or I2 when P is exterior -C NF,NL = First and last neighbors of N0, or first -C (rightmost) and last (leftmost) nodes -C visible from P when P is exterior to the -C triangulation -C PTN1 = Scalar product -C PTN2 = Scalar product -C Q = (N2 X N1) X N2 or N1 X (N2 X N1) -- used in -C the boundary traversal when P is exterior -C S12 = Scalar product -C TOL = Tolerance (multiple of EPS) defining an upper -C bound on the magnitude of a negative bary- -C centric coordinate (B1 or B2) for P in a -C triangle -- used to avoid an infinite number -C of restarts with 0 <= B3 < EPS and B1 < 0 or -C B2 < 0 but small in magnitude -C XP,YP,ZP = Local variables containing P(1), P(2), and P(3) -C X0,Y0,Z0 = Dummy arguments for DET -C X1,Y1,Z1 = Dummy arguments for DET -C X2,Y2,Z2 = Dummy arguments for DET -C -C Statement function: -C -C DET(X1,...,Z0) .GE. 0 if and only if (X0,Y0,Z0) is in the -C (closed) left hemisphere defined by -C the plane containing (0,0,0), -C (X1,Y1,Z1), and (X2,Y2,Z2), where -C left is defined relative to an ob- -C server at (X1,Y1,Z1) facing -C (X2,Y2,Z2). -C - DET (X1,Y1,Z1,X2,Y2,Z2,X0,Y0,Z0) = X0*(Y1*Z2-Y2*Z1) - . - Y0*(X1*Z2-X2*Z1) + Z0*(X1*Y2-X2*Y1) -C -C Initialize variables. -C - XP = P(1) - YP = P(2) - ZP = P(3) - N0 = NST - IF (N0 .LT. 1 .OR. N0 .GT. N) - . N0 = JRAND(N, IX,IY,IZ ) -C -C Compute the relative machine precision EPS and TOL. -C - EPS = 1.E0 - 1 EPS = EPS/2.E0 - IF (STORE(EPS+1.E0) .GT. 1.E0) GO TO 1 - EPS = 2.E0*EPS - TOL = 100.E0*EPS -C -C Set NF and NL to the first and last neighbors of N0, and -C initialize N1 = NF. -C - 2 LP = LEND(N0) - NL = LIST(LP) - LP = LPTR(LP) - NF = LIST(LP) - N1 = NF -C -C Find a pair of adjacent neighbors N1,N2 of N0 that define -C a wedge containing P: P LEFT N0->N1 and P RIGHT N0->N2. -C - IF (NL .GT. 0) THEN -C -C N0 is an interior node. Find N1. -C - 3 IF ( DET(X(N0),Y(N0),Z(N0),X(N1),Y(N1),Z(N1), - . XP,YP,ZP) .LT. 0. ) THEN - LP = LPTR(LP) - N1 = LIST(LP) - IF (N1 .EQ. NL) GO TO 6 - GO TO 3 - ENDIF - ELSE -C -C N0 is a boundary node. Test for P exterior. -C - NL = -NL - IF ( DET(X(N0),Y(N0),Z(N0),X(NF),Y(NF),Z(NF), - . XP,YP,ZP) .LT. 0. ) THEN -C -C P is to the right of the boundary edge N0->NF. -C - N1 = N0 - N2 = NF - GO TO 9 - ENDIF - IF ( DET(X(NL),Y(NL),Z(NL),X(N0),Y(N0),Z(N0), - . XP,YP,ZP) .LT. 0. ) THEN -C -C P is to the right of the boundary edge NL->N0. -C - N1 = NL - N2 = N0 - GO TO 9 - ENDIF - ENDIF -C -C P is to the left of arcs N0->N1 and NL->N0. Set N2 to the -C next neighbor of N0 (following N1). -C - 4 LP = LPTR(LP) - N2 = ABS(LIST(LP)) - IF ( DET(X(N0),Y(N0),Z(N0),X(N2),Y(N2),Z(N2), - . XP,YP,ZP) .LT. 0. ) GO TO 7 - N1 = N2 - IF (N1 .NE. NL) GO TO 4 - IF ( DET(X(N0),Y(N0),Z(N0),X(NF),Y(NF),Z(NF), - . XP,YP,ZP) .LT. 0. ) GO TO 6 -C -C P is left of or on arcs N0->NB for all neighbors NB -C of N0. Test for P = +/-N0. -C - IF (STORE(ABS(X(N0)*XP + Y(N0)*YP + Z(N0)*ZP)) - . .LT. 1.0-4.0*EPS) THEN -C -C All points are collinear iff P Left NB->N0 for all -C neighbors NB of N0. Search the neighbors of N0. -C Note: N1 = NL and LP points to NL. -C - 5 IF ( DET(X(N1),Y(N1),Z(N1),X(N0),Y(N0),Z(N0), - . XP,YP,ZP) .GE. 0. ) THEN - LP = LPTR(LP) - N1 = ABS(LIST(LP)) - IF (N1 .EQ. NL) GO TO 14 - GO TO 5 - ENDIF - ENDIF -C -C P is to the right of N1->N0, or P = +/-N0. Set N0 to N1 -C and start over. -C - N0 = N1 - GO TO 2 -C -C P is between arcs N0->N1 and N0->NF. -C - 6 N2 = NF -C -C P is contained in a wedge defined by geodesics N0-N1 and -C N0-N2, where N1 is adjacent to N2. Save N1 and N2 to -C test for cycling. -C - 7 N3 = N0 - N1S = N1 - N2S = N2 -C -C Top of edge-hopping loop: -C - 8 B3 = DET(X(N1),Y(N1),Z(N1),X(N2),Y(N2),Z(N2),XP,YP,ZP) - IF (B3 .LT. 0.) THEN -C -C Set N4 to the first neighbor of N2 following N1 (the -C node opposite N2->N1) unless N1->N2 is a boundary arc. -C - LP = LSTPTR(LEND(N2),N1,LIST,LPTR) - IF (LIST(LP) .LT. 0) GO TO 9 - LP = LPTR(LP) - N4 = ABS(LIST(LP)) -C -C Define a new arc N1->N2 which intersects the geodesic -C N0-P. -C - IF ( DET(X(N0),Y(N0),Z(N0),X(N4),Y(N4),Z(N4), - . XP,YP,ZP) .LT. 0. ) THEN - N3 = N2 - N2 = N4 - N1S = N1 - IF (N2 .NE. N2S .AND. N2 .NE. N0) GO TO 8 - ELSE - N3 = N1 - N1 = N4 - N2S = N2 - IF (N1 .NE. N1S .AND. N1 .NE. N0) GO TO 8 - ENDIF -C -C The starting node N0 or edge N1-N2 was encountered -C again, implying a cycle (infinite loop). Restart -C with N0 randomly selected. -C - N0 = JRAND(N, IX,IY,IZ ) - GO TO 2 - ENDIF -C -C P is in (N1,N2,N3) unless N0, N1, N2, and P are collinear -C or P is close to -N0. -C - IF (B3 .GE. EPS) THEN -C -C B3 .NE. 0. -C - B1 = DET(X(N2),Y(N2),Z(N2),X(N3),Y(N3),Z(N3), - . XP,YP,ZP) - B2 = DET(X(N3),Y(N3),Z(N3),X(N1),Y(N1),Z(N1), - . XP,YP,ZP) - IF (B1 .LT. -TOL .OR. B2 .LT. -TOL) THEN -C -C Restart with N0 randomly selected. -C - N0 = JRAND(N, IX,IY,IZ ) - GO TO 2 - ENDIF - ELSE -C -C B3 = 0 and thus P lies on N1->N2. Compute -C B1 = Det(P,N2 X N1,N2) and B2 = Det(P,N1,N2 X N1). -C - B3 = 0. - S12 = X(N1)*X(N2) + Y(N1)*Y(N2) + Z(N1)*Z(N2) - PTN1 = XP*X(N1) + YP*Y(N1) + ZP*Z(N1) - PTN2 = XP*X(N2) + YP*Y(N2) + ZP*Z(N2) - B1 = PTN1 - S12*PTN2 - B2 = PTN2 - S12*PTN1 - IF (B1 .LT. -TOL .OR. B2 .LT. -TOL) THEN -C -C Restart with N0 randomly selected. -C - N0 = JRAND(N, IX,IY,IZ ) - GO TO 2 - ENDIF - ENDIF -C -C P is in (N1,N2,N3). -C - I1 = N1 - I2 = N2 - I3 = N3 - IF (B1 .LT. 0.0) B1 = 0.0 - IF (B2 .LT. 0.0) B2 = 0.0 - RETURN -C -C P Right N1->N2, where N1->N2 is a boundary edge. -C Save N1 and N2, and set NL = 0 to indicate that -C NL has not yet been found. -C - 9 N1S = N1 - N2S = N2 - NL = 0 -C -C Counterclockwise Boundary Traversal: -C - 10 LP = LEND(N2) - LP = LPTR(LP) - NEXT = LIST(LP) - IF ( DET(X(N2),Y(N2),Z(N2),X(NEXT),Y(NEXT),Z(NEXT), - . XP,YP,ZP) .GE. 0. ) THEN -C -C N2 is the rightmost visible node if P Forward N2->N1 -C or NEXT Forward N2->N1. Set Q to (N2 X N1) X N2. -C - S12 = X(N1)*X(N2) + Y(N1)*Y(N2) + Z(N1)*Z(N2) - Q(1) = X(N1) - S12*X(N2) - Q(2) = Y(N1) - S12*Y(N2) - Q(3) = Z(N1) - S12*Z(N2) - IF (XP*Q(1) + YP*Q(2) + ZP*Q(3) .GE. 0.) GO TO 11 - IF (X(NEXT)*Q(1) + Y(NEXT)*Q(2) + Z(NEXT)*Q(3) - . .GE. 0.) GO TO 11 -C -C N1, N2, NEXT, and P are nearly collinear, and N2 is -C the leftmost visible node. -C - NL = N2 - ENDIF -C -C Bottom of counterclockwise loop: -C - N1 = N2 - N2 = NEXT - IF (N2 .NE. N1S) GO TO 10 -C -C All boundary nodes are visible from P. -C - I1 = N1S - I2 = N1S - I3 = 0 - RETURN -C -C N2 is the rightmost visible node. -C - 11 NF = N2 - IF (NL .EQ. 0) THEN -C -C Restore initial values of N1 and N2, and begin the search -C for the leftmost visible node. -C - N2 = N2S - N1 = N1S -C -C Clockwise Boundary Traversal: -C - 12 LP = LEND(N1) - NEXT = -LIST(LP) - IF ( DET(X(NEXT),Y(NEXT),Z(NEXT),X(N1),Y(N1),Z(N1), - . XP,YP,ZP) .GE. 0. ) THEN -C -C N1 is the leftmost visible node if P or NEXT is -C forward of N1->N2. Compute Q = N1 X (N2 X N1). -C - S12 = X(N1)*X(N2) + Y(N1)*Y(N2) + Z(N1)*Z(N2) - Q(1) = X(N2) - S12*X(N1) - Q(2) = Y(N2) - S12*Y(N1) - Q(3) = Z(N2) - S12*Z(N1) - IF (XP*Q(1) + YP*Q(2) + ZP*Q(3) .GE. 0.) GO TO 13 - IF (X(NEXT)*Q(1) + Y(NEXT)*Q(2) + Z(NEXT)*Q(3) - . .GE. 0.) GO TO 13 -C -C P, NEXT, N1, and N2 are nearly collinear and N1 is the -C rightmost visible node. -C - NF = N1 - ENDIF -C -C Bottom of clockwise loop: -C - N2 = N1 - N1 = NEXT - IF (N1 .NE. N1S) GO TO 12 -C -C All boundary nodes are visible from P. -C - I1 = N1 - I2 = N1 - I3 = 0 - RETURN -C -C N1 is the leftmost visible node. -C - 13 NL = N1 - ENDIF -C -C NF and NL have been found. -C - I1 = NF - I2 = NL - I3 = 0 - RETURN -C -C All points are collinear (coplanar). -C - 14 I1 = 0 - I2 = 0 - I3 = 0 - RETURN - END SUBROUTINE - SUBROUTINE TRLIST (N,LIST,LPTR,LEND,NROW, NT,LTRI,IER) - INTEGER N, LIST(*), LPTR(*), LEND(N), NROW, NT, - . LTRI(NROW,*), IER -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/20/96 -C -C This subroutine converts a triangulation data structure -C from the linked list created by Subroutine TRMESH to a -C triangle list. -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C LIST,LPTR,LEND = Linked list data structure defin- -C ing the triangulation. Refer to -C Subroutine TRMESH. -C -C NROW = Number of rows (entries per triangle) re- -C served for the triangle list LTRI. The value -C must be 6 if only the vertex indexes and -C neighboring triangle indexes are to be -C stored, or 9 if arc indexes are also to be -C assigned and stored. Refer to LTRI. -C -C The above parameters are not altered by this routine. -C -C LTRI = Integer array of length at least NROW*NT, -C where NT is at most 2N-4. (A sufficient -C length is 12N if NROW=6 or 18N if NROW=9.) -C -C On output: -C -C NT = Number of triangles in the triangulation unless -C IER .NE. 0, in which case NT = 0. NT = 2N-NB-2 -C if NB .GE. 3 or 2N-4 if NB = 0, where NB is the -C number of boundary nodes. -C -C LTRI = NROW by NT array whose J-th column contains -C the vertex nodal indexes (first three rows), -C neighboring triangle indexes (second three -C rows), and, if NROW = 9, arc indexes (last -C three rows) associated with triangle J for -C J = 1,...,NT. The vertices are ordered -C counterclockwise with the first vertex taken -C to be the one with smallest index. Thus, -C LTRI(2,J) and LTRI(3,J) are larger than -C LTRI(1,J) and index adjacent neighbors of -C node LTRI(1,J). For I = 1,2,3, LTRI(I+3,J) -C and LTRI(I+6,J) index the triangle and arc, -C respectively, which are opposite (not shared -C by) node LTRI(I,J), with LTRI(I+3,J) = 0 if -C LTRI(I+6,J) indexes a boundary arc. Vertex -C indexes range from 1 to N, triangle indexes -C from 0 to NT, and, if included, arc indexes -C from 1 to NA, where NA = 3N-NB-3 if NB .GE. 3 -C or 3N-6 if NB = 0. The triangles are or- -C dered on first (smallest) vertex indexes. -C -C IER = Error indicator. -C IER = 0 if no errors were encountered. -C IER = 1 if N or NROW is outside its valid -C range on input. -C IER = 2 if the triangulation data structure -C (LIST,LPTR,LEND) is invalid. Note, -C however, that these arrays are not -C completely tested for validity. -C -C Modules required by TRLIST: None -C -C Intrinsic function called by TRLIST: ABS -C -C*********************************************************** -C - INTEGER I, I1, I2, I3, ISV, J, KA, KN, KT, LP, LP2, - . LPL, LPLN1, N1, N2, N3, NM2 - LOGICAL ARCS -C -C Local parameters: -C -C ARCS = Logical variable with value TRUE iff are -C indexes are to be stored -C I,J = LTRI row indexes (1 to 3) associated with -C triangles KT and KN, respectively -C I1,I2,I3 = Nodal indexes of triangle KN -C ISV = Variable used to permute indexes I1,I2,I3 -C KA = Arc index and number of currently stored arcs -C KN = Index of the triangle that shares arc I1-I2 -C with KT -C KT = Triangle index and number of currently stored -C triangles -C LP = LIST pointer -C LP2 = Pointer to N2 as a neighbor of N1 -C LPL = Pointer to the last neighbor of I1 -C LPLN1 = Pointer to the last neighbor of N1 -C N1,N2,N3 = Nodal indexes of triangle KT -C NM2 = N-2 -C -C -C Test for invalid input parameters. -C - IF (N .LT. 3 .OR. (NROW .NE. 6 .AND. NROW .NE. 9)) - . GO TO 11 -C -C Initialize parameters for loop on triangles KT = (N1,N2, -C N3), where N1 < N2 and N1 < N3. -C -C ARCS = TRUE iff arc indexes are to be stored. -C KA,KT = Numbers of currently stored arcs and triangles. -C NM2 = Upper bound on candidates for N1. -C - ARCS = NROW .EQ. 9 - KA = 0 - KT = 0 - NM2 = N-2 -C -C Loop on nodes N1. -C - DO 9 N1 = 1,NM2 -C -C Loop on pairs of adjacent neighbors (N2,N3). LPLN1 points -C to the last neighbor of N1, and LP2 points to N2. -C - LPLN1 = LEND(N1) - LP2 = LPLN1 - 1 LP2 = LPTR(LP2) - N2 = LIST(LP2) - LP = LPTR(LP2) - N3 = ABS(LIST(LP)) - IF (N2 .LT. N1 .OR. N3 .LT. N1) GO TO 8 -C -C Add a new triangle KT = (N1,N2,N3). -C - KT = KT + 1 - LTRI(1,KT) = N1 - LTRI(2,KT) = N2 - LTRI(3,KT) = N3 -C -C Loop on triangle sides (I2,I1) with neighboring triangles -C KN = (I1,I2,I3). -C - DO 7 I = 1,3 - IF (I .EQ. 1) THEN - I1 = N3 - I2 = N2 - ELSEIF (I .EQ. 2) THEN - I1 = N1 - I2 = N3 - ELSE - I1 = N2 - I2 = N1 - ENDIF -C -C Set I3 to the neighbor of I1 that follows I2 unless -C I2->I1 is a boundary arc. -C - LPL = LEND(I1) - LP = LPTR(LPL) - 2 IF (LIST(LP) .EQ. I2) GO TO 3 - LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 2 -C -C I2 is the last neighbor of I1 unless the data structure -C is invalid. Bypass the search for a neighboring -C triangle if I2->I1 is a boundary arc. -C - IF (ABS(LIST(LP)) .NE. I2) GO TO 12 - KN = 0 - IF (LIST(LP) .LT. 0) GO TO 6 -C -C I2->I1 is not a boundary arc, and LP points to I2 as -C a neighbor of I1. -C - 3 LP = LPTR(LP) - I3 = ABS(LIST(LP)) -C -C Find J such that LTRI(J,KN) = I3 (not used if KN > KT), -C and permute the vertex indexes of KN so that I1 is -C smallest. -C - IF (I1 .LT. I2 .AND. I1 .LT. I3) THEN - J = 3 - ELSEIF (I2 .LT. I3) THEN - J = 2 - ISV = I1 - I1 = I2 - I2 = I3 - I3 = ISV - ELSE - J = 1 - ISV = I1 - I1 = I3 - I3 = I2 - I2 = ISV - ENDIF -C -C Test for KN > KT (triangle index not yet assigned). -C - IF (I1 .GT. N1) GO TO 7 -C -C Find KN, if it exists, by searching the triangle list in -C reverse order. -C - DO 4 KN = KT-1,1,-1 - IF (LTRI(1,KN) .EQ. I1 .AND. LTRI(2,KN) .EQ. - . I2 .AND. LTRI(3,KN) .EQ. I3) GO TO 5 - 4 CONTINUE - GO TO 7 -C -C Store KT as a neighbor of KN. -C - 5 LTRI(J+3,KN) = KT -C -C Store KN as a neighbor of KT, and add a new arc KA. -C - 6 LTRI(I+3,KT) = KN - IF (ARCS) THEN - KA = KA + 1 - LTRI(I+6,KT) = KA - IF (KN .NE. 0) LTRI(J+6,KN) = KA - ENDIF - 7 CONTINUE -C -C Bottom of loop on triangles. -C - 8 IF (LP2 .NE. LPLN1) GO TO 1 - 9 CONTINUE -C -C No errors encountered. -C - NT = KT - IER = 0 - RETURN -C -C Invalid input parameter. -C - 11 NT = 0 - IER = 1 - RETURN -C -C Invalid triangulation data structure: I1 is a neighbor of -C I2, but I2 is not a neighbor of I1. -C - 12 NT = 0 - IER = 2 - RETURN - END SUBROUTINE - SUBROUTINE TRLPRT (N,X,Y,Z,IFLAG,NROW,NT,LTRI,LOUT) - INTEGER N, IFLAG, NROW, NT, LTRI(NROW,NT), LOUT - REAL X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/02/98 -C -C This subroutine prints the triangle list created by Sub- -C routine TRLIST and, optionally, the nodal coordinates -C (either latitude and longitude or Cartesian coordinates) -C on logical unit LOUT. The numbers of boundary nodes, -C triangles, and arcs are also printed. -C -C -C On input: -C -C N = Number of nodes in the triangulation. -C 3 .LE. N .LE. 9999. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes if IFLAG = 0, or -C (X and Y only) arrays of length N containing -C longitude and latitude, respectively, if -C IFLAG > 0, or unused dummy parameters if -C IFLAG < 0. -C -C IFLAG = Nodal coordinate option indicator: -C IFLAG = 0 if X, Y, and Z (assumed to contain -C Cartesian coordinates) are to be -C printed (to 6 decimal places). -C IFLAG > 0 if only X and Y (assumed to con- -C tain longitude and latitude) are -C to be printed (to 6 decimal -C places). -C IFLAG < 0 if only the adjacency lists are to -C be printed. -C -C NROW = Number of rows (entries per triangle) re- -C served for the triangle list LTRI. The value -C must be 6 if only the vertex indexes and -C neighboring triangle indexes are stored, or 9 -C if arc indexes are also stored. -C -C NT = Number of triangles in the triangulation. -C 1 .LE. NT .LE. 9999. -C -C LTRI = NROW by NT array whose J-th column contains -C the vertex nodal indexes (first three rows), -C neighboring triangle indexes (second three -C rows), and, if NROW = 9, arc indexes (last -C three rows) associated with triangle J for -C J = 1,...,NT. -C -C LOUT = Logical unit number for output. If LOUT is -C not in the range 0 to 99, output is written -C to unit 6. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C The triangle list and nodal coordinates (as specified by -C IFLAG) are written to unit LOUT. -C -C Modules required by TRLPRT: None -C -C*********************************************************** -C - INTEGER I, K, LUN, NA, NB, NL, NLMAX, NMAX - DATA NMAX/9999/, NLMAX/58/ -C -C Local parameters: -C -C I = DO-loop, nodal index, and row index for LTRI -C K = DO-loop and triangle index -C LUN = Logical unit number for output -C NA = Number of triangulation arcs -C NB = Number of boundary nodes -C NL = Number of lines printed on the current page -C NLMAX = Maximum number of print lines per page (except -C for the last page which may have two addi- -C tional lines) -C NMAX = Maximum value of N and NT (4-digit format) -C - LUN = LOUT - IF (LUN .LT. 0 .OR. LUN .GT. 99) LUN = 6 -C -C Print a heading and test for invalid input. -C - WRITE (LUN,100) N - NL = 3 - IF (N .LT. 3 .OR. N .GT. NMAX .OR. - . (NROW .NE. 6 .AND. NROW .NE. 9) .OR. - . NT .LT. 1 .OR. NT .GT. NMAX) THEN -C -C Print an error message and exit. -C - WRITE (LUN,110) N, NROW, NT - RETURN - ENDIF - IF (IFLAG .EQ. 0) THEN -C -C Print X, Y, and Z. -C - WRITE (LUN,101) - NL = 6 - DO 1 I = 1,N - IF (NL .GE. NLMAX) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - WRITE (LUN,103) I, X(I), Y(I), Z(I) - NL = NL + 1 - 1 CONTINUE - ELSEIF (IFLAG .GT. 0) THEN -C -C Print X (longitude) and Y (latitude). -C - WRITE (LUN,102) - NL = 6 - DO 2 I = 1,N - IF (NL .GE. NLMAX) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - WRITE (LUN,104) I, X(I), Y(I) - NL = NL + 1 - 2 CONTINUE - ENDIF -C -C Print the triangulation LTRI. -C - IF (NL .GT. NLMAX/2) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - IF (NROW .EQ. 6) THEN - WRITE (LUN,105) - ELSE - WRITE (LUN,106) - ENDIF - NL = NL + 5 - DO 3 K = 1,NT - IF (NL .GE. NLMAX) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - WRITE (LUN,107) K, (LTRI(I,K), I = 1,NROW) - NL = NL + 1 - 3 CONTINUE -C -C Print NB, NA, and NT (boundary nodes, arcs, and -C triangles). -C - NB = 2*N - NT - 2 - IF (NB .LT. 3) THEN - NB = 0 - NA = 3*N - 6 - ELSE - NA = NT + N - 1 - ENDIF - WRITE (LUN,109) NB, NA, NT - RETURN -C -C Print formats: -C - 100 FORMAT (///18X,'STRIPACK (TRLIST) Output, N = ',I4) - 101 FORMAT (//8X,'Node',10X,'X(Node)',10X,'Y(Node)',10X, - . 'Z(Node)'//) - 102 FORMAT (//16X,'Node',8X,'Longitude',9X,'Latitude'//) - 103 FORMAT (8X,I4,3E17.6) - 104 FORMAT (16X,I4,2E17.6) - 105 FORMAT (//1X,'Triangle',8X,'Vertices',12X,'Neighbors'/ - . 4X,'KT',7X,'N1',5X,'N2',5X,'N3',4X,'KT1',4X, - . 'KT2',4X,'KT3'/) - 106 FORMAT (//1X,'Triangle',8X,'Vertices',12X,'Neighbors', - . 14X,'Arcs'/ - . 4X,'KT',7X,'N1',5X,'N2',5X,'N3',4X,'KT1',4X, - . 'KT2',4X,'KT3',4X,'KA1',4X,'KA2',4X,'KA3'/) - 107 FORMAT (2X,I4,2X,6(3X,I4),3(2X,I5)) - 108 FORMAT (///) - 109 FORMAT (/1X,'NB = ',I4,' Boundary Nodes',5X, - . 'NA = ',I5,' Arcs',5X,'NT = ',I5, - . ' Triangles') - 110 FORMAT (//1X,10X,'*** Invalid Parameter: N =',I5, - . ', NROW =',I5,', NT =',I5,' ***') - END SUBROUTINE - SUBROUTINE TRMESH (N,X,Y,Z, LIST,LPTR,LEND,LNEW,NEAR, - . NEXT,DIST,IER) - INTEGER N, LIST(*), LPTR(*), LEND(N), LNEW, NEAR(N), - . NEXT(N), IER - REAL X(N), Y(N), Z(N), DIST(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/08/99 -C -C This subroutine creates a Delaunay triangulation of a -C set of N arbitrarily distributed points, referred to as -C nodes, on the surface of the unit sphere. The Delaunay -C triangulation is defined as a set of (spherical) triangles -C with the following five properties: -C -C 1) The triangle vertices are nodes. -C 2) No triangle contains a node other than its vertices. -C 3) The interiors of the triangles are pairwise disjoint. -C 4) The union of triangles is the convex hull of the set -C of nodes (the smallest convex set that contains -C the nodes). If the nodes are not contained in a -C single hemisphere, their convex hull is the en- -C tire sphere and there are no boundary nodes. -C Otherwise, there are at least three boundary nodes. -C 5) The interior of the circumcircle of each triangle -C contains no node. -C -C The first four properties define a triangulation, and the -C last property results in a triangulation which is as close -C as possible to equiangular in a certain sense and which is -C uniquely defined unless four or more nodes lie in a common -C plane. This property makes the triangulation well-suited -C for solving closest-point problems and for triangle-based -C interpolation. -C -C Provided the nodes are randomly ordered, the algorithm -C has expected time complexity O(N*log(N)) for most nodal -C distributions. Note, however, that the complexity may be -C as high as O(N**2) if, for example, the nodes are ordered -C on increasing latitude. -C -C Spherical coordinates (latitude and longitude) may be -C converted to Cartesian coordinates by Subroutine TRANS. -C -C The following is a list of the software package modules -C which a user may wish to call directly: -C -C ADDNOD - Updates the triangulation by appending a new -C node. -C -C AREAS - Returns the area of a spherical triangle. -C -C BNODES - Returns an array containing the indexes of the -C boundary nodes (if any) in counterclockwise -C order. Counts of boundary nodes, triangles, -C and arcs are also returned. -C -C CIRCUM - Returns the circumcenter of a spherical trian- -C gle. -C -C CRLIST - Returns the set of triangle circumcenters -C (Voronoi vertices) and circumradii associated -C with a triangulation. -C -C DELARC - Deletes a boundary arc from a triangulation. -C -C DELNOD - Updates the triangulation with a nodal deletion. -C -C EDGE - Forces an arbitrary pair of nodes to be connec- -C ted by an arc in the triangulation. -C -C GETNP - Determines the ordered sequence of L closest -C nodes to a given node, along with the associ- -C ated distances. -C -C INSIDE - Locates a point relative to a polygon on the -C surface of the sphere. -C -C INTRSC - Returns the point of intersection between a -C pair of great circle arcs. -C -C JRAND - Generates a uniformly distributed pseudo-random -C integer. -C -C LEFT - Locates a point relative to a great circle. -C -C NEARND - Returns the index of the nearest node to an -C arbitrary point, along with its squared -C distance. -C -C SCOORD - Converts a point from Cartesian coordinates to -C spherical coordinates. -C -C STORE - Forces a value to be stored in main memory so -C that the precision of floating point numbers -C in memory locations rather than registers is -C computed. -C -C TRANS - Transforms spherical coordinates into Cartesian -C coordinates on the unit sphere for input to -C Subroutine TRMESH. -C -C TRLIST - Converts the triangulation data structure to a -C triangle list more suitable for use in a fin- -C ite element code. -C -C TRLPRT - Prints the triangle list created by Subroutine -C TRLIST. -C -C TRMESH - Creates a Delaunay triangulation of a set of -C nodes. -C -C TRPLOT - Creates a level-2 Encapsulated Postscript (EPS) -C file containing a triangulation plot. -C -C TRPRNT - Prints the triangulation data structure and, -C optionally, the nodal coordinates. -C -C VRPLOT - Creates a level-2 Encapsulated Postscript (EPS) -C file containing a Voronoi diagram plot. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of distinct nodes. (X(K),Y(K), -C Z(K)) is referred to as node K, and K is re- -C ferred to as a nodal index. It is required -C that X(K)**2 + Y(K)**2 + Z(K)**2 = 1 for all -C K. The first three nodes must not be col- -C linear (lie on a common great circle). -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR = Arrays of length at least 6N-12. -C -C LEND = Array of length at least N. -C -C NEAR,NEXT,DIST = Work space arrays of length at -C least N. The space is used to -C efficiently determine the nearest -C triangulation node to each un- -C processed node for use by ADDNOD. -C -C On output: -C -C LIST = Set of nodal indexes which, along with LPTR, -C LEND, and LNEW, define the triangulation as a -C set of N adjacency lists -- counterclockwise- -C ordered sequences of neighboring nodes such -C that the first and last neighbors of a bound- -C ary node are boundary nodes (the first neigh- -C bor of an interior node is arbitrary). In -C order to distinguish between interior and -C boundary nodes, the last neighbor of each -C boundary node is represented by the negative -C of its index. -C -C LPTR = Set of pointers (LIST indexes) in one-to-one -C correspondence with the elements of LIST. -C LIST(LPTR(I)) indexes the node which follows -C LIST(I) in cyclical counterclockwise order -C (the first neighbor follows the last neigh- -C bor). -C -C LEND = Set of pointers to adjacency lists. LEND(K) -C points to the last neighbor of node K for -C K = 1,...,N. Thus, LIST(LEND(K)) < 0 if and -C only if K is a boundary node. -C -C LNEW = Pointer to the first empty location in LIST -C and LPTR (list length plus one). LIST, LPTR, -C LEND, and LNEW are not altered if IER < 0, -C and are incomplete if IER > 0. -C -C NEAR,NEXT,DIST = Garbage. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = -1 if N < 3 on input. -C IER = -2 if the first three nodes are -C collinear. -C IER = L if nodes L and M coincide for some -C M > L. The data structure represents -C a triangulation of nodes 1 to M-1 in -C this case. -C -C Modules required by TRMESH: ADDNOD, BDYADD, COVSPH, -C INSERT, INTADD, JRAND, -C LEFT, LSTPTR, STORE, SWAP, -C SWPTST, TRFIND -C -C Intrinsic function called by TRMESH: ABS -C -C*********************************************************** -C - INTEGER I, I0, J, K, LP, LPL, NEXTI, NN - REAL D, D1, D2, D3 -C -C Local parameters: -C -C D = (Negative cosine of) distance from node K to -C node I -C D1,D2,D3 = Distances from node K to nodes 1, 2, and 3, -C respectively -C I,J = Nodal indexes -C I0 = Index of the node preceding I in a sequence of -C unprocessed nodes: I = NEXT(I0) -C K = Index of node to be added and DO-loop index: -C K > 3 -C LP = LIST index (pointer) of a neighbor of K -C LPL = Pointer to the last neighbor of K -C NEXTI = NEXT(I) -C NN = Local copy of N -C - NN = N - IF (NN .LT. 3) THEN - IER = -1 - RETURN - ENDIF -C -C Store the first triangle in the linked list. -C - IF ( .NOT. LEFT (X(1),Y(1),Z(1),X(2),Y(2),Z(2), - . X(3),Y(3),Z(3)) ) THEN -C -C The first triangle is (3,2,1) = (2,1,3) = (1,3,2). -C - LIST(1) = 3 - LPTR(1) = 2 - LIST(2) = -2 - LPTR(2) = 1 - LEND(1) = 2 -C - LIST(3) = 1 - LPTR(3) = 4 - LIST(4) = -3 - LPTR(4) = 3 - LEND(2) = 4 -C - LIST(5) = 2 - LPTR(5) = 6 - LIST(6) = -1 - LPTR(6) = 5 - LEND(3) = 6 -C - ELSEIF ( .NOT. LEFT(X(2),Y(2),Z(2),X(1),Y(1),Z(1), - . X(3),Y(3),Z(3)) ) - . THEN -C -C The first triangle is (1,2,3): 3 Strictly Left 1->2, -C i.e., node 3 lies in the left hemisphere defined by -C arc 1->2. -C - LIST(1) = 2 - LPTR(1) = 2 - LIST(2) = -3 - LPTR(2) = 1 - LEND(1) = 2 -C - LIST(3) = 3 - LPTR(3) = 4 - LIST(4) = -1 - LPTR(4) = 3 - LEND(2) = 4 -C - LIST(5) = 1 - LPTR(5) = 6 - LIST(6) = -2 - LPTR(6) = 5 - LEND(3) = 6 -C - ELSE -C -C The first three nodes are collinear. -C - IER = -2 - RETURN - ENDIF -C -C Initialize LNEW and test for N = 3. -C - LNEW = 7 - IF (NN .EQ. 3) THEN - IER = 0 - RETURN - ENDIF -C -C A nearest-node data structure (NEAR, NEXT, and DIST) is -C used to obtain an expected-time (N*log(N)) incremental -C algorithm by enabling constant search time for locating -C each new node in the triangulation. -C -C For each unprocessed node K, NEAR(K) is the index of the -C triangulation node closest to K (used as the starting -C point for the search in Subroutine TRFIND) and DIST(K) -C is an increasing function of the arc length (angular -C distance) between nodes K and NEAR(K): -Cos(a) for arc -C length a. -C -C Since it is necessary to efficiently find the subset of -C unprocessed nodes associated with each triangulation -C node J (those that have J as their NEAR entries), the -C subsets are stored in NEAR and NEXT as follows: for -C each node J in the triangulation, I = NEAR(J) is the -C first unprocessed node in J's set (with I = 0 if the -C set is empty), L = NEXT(I) (if I > 0) is the second, -C NEXT(L) (if L > 0) is the third, etc. The nodes in each -C set are initially ordered by increasing indexes (which -C maximizes efficiency) but that ordering is not main- -C tained as the data structure is updated. -C -C Initialize the data structure for the single triangle. -C - NEAR(1) = 0 - NEAR(2) = 0 - NEAR(3) = 0 - DO 1 K = NN,4,-1 - D1 = -(X(K)*X(1) + Y(K)*Y(1) + Z(K)*Z(1)) - D2 = -(X(K)*X(2) + Y(K)*Y(2) + Z(K)*Z(2)) - D3 = -(X(K)*X(3) + Y(K)*Y(3) + Z(K)*Z(3)) - IF (D1 .LE. D2 .AND. D1 .LE. D3) THEN - NEAR(K) = 1 - DIST(K) = D1 - NEXT(K) = NEAR(1) - NEAR(1) = K - ELSEIF (D2 .LE. D1 .AND. D2 .LE. D3) THEN - NEAR(K) = 2 - DIST(K) = D2 - NEXT(K) = NEAR(2) - NEAR(2) = K - ELSE - NEAR(K) = 3 - DIST(K) = D3 - NEXT(K) = NEAR(3) - NEAR(3) = K - ENDIF - 1 CONTINUE -C -C Add the remaining nodes -C - DO 6 K = 4,NN - CALL ADDNOD (NEAR(K),K,X,Y,Z, LIST,LPTR,LEND, - . LNEW, IER) - IF (IER .NE. 0) RETURN -C -C Remove K from the set of unprocessed nodes associated -C with NEAR(K). -C - I = NEAR(K) - IF (NEAR(I) .EQ. K) THEN - NEAR(I) = NEXT(K) - ELSE - I = NEAR(I) - 2 I0 = I - I = NEXT(I0) - IF (I .NE. K) GO TO 2 - NEXT(I0) = NEXT(K) - ENDIF - NEAR(K) = 0 -C -C Loop on neighbors J of node K. -C - LPL = LEND(K) - LP = LPL - 3 LP = LPTR(LP) - J = ABS(LIST(LP)) -C -C Loop on elements I in the sequence of unprocessed nodes -C associated with J: K is a candidate for replacing J -C as the nearest triangulation node to I. The next value -C of I in the sequence, NEXT(I), must be saved before I -C is moved because it is altered by adding I to K's set. -C - I = NEAR(J) - 4 IF (I .EQ. 0) GO TO 5 - NEXTI = NEXT(I) -C -C Test for the distance from I to K less than the distance -C from I to J. -C - D = -(X(I)*X(K) + Y(I)*Y(K) + Z(I)*Z(K)) - IF (D .LT. DIST(I)) THEN -C -C Replace J by K as the nearest triangulation node to I: -C update NEAR(I) and DIST(I), and remove I from J's set -C of unprocessed nodes and add it to K's set. -C - NEAR(I) = K - DIST(I) = D - IF (I .EQ. NEAR(J)) THEN - NEAR(J) = NEXTI - ELSE - NEXT(I0) = NEXTI - ENDIF - NEXT(I) = NEAR(K) - NEAR(K) = I - ELSE - I0 = I - ENDIF -C -C Bottom of loop on I. -C - I = NEXTI - GO TO 4 -C -C Bottom of loop on neighbors J. -C - 5 IF (LP .NE. LPL) GO TO 3 - 6 CONTINUE - RETURN - END SUBROUTINE - SUBROUTINE TRPLOT (LUN,PLTSIZ,ELAT,ELON,A,N,X,Y,Z, - . LIST,LPTR,LEND,TITLE,NUMBR, IER) - CHARACTER*(*) TITLE - INTEGER LUN, N, LIST(*), LPTR(*), LEND(N), IER - LOGICAL NUMBR - REAL PLTSIZ, ELAT, ELON, A, X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/16/98 -C -C This subroutine creates a level-2 Encapsulated Post- -C script (EPS) file containing a graphical display of a -C triangulation of a set of nodes on the unit sphere. The -C visible nodes are projected onto the plane that contains -C the origin and has normal defined by a user-specified eye- -C position. Projections of adjacent (visible) nodes are -C connected by line segments. -C -C -C On input: -C -C LUN = Logical unit number in the range 0 to 99. -C The unit should be opened with an appropriate -C file name before the call to this routine. -C -C PLTSIZ = Plot size in inches. A circular window in -C the projection plane is mapped to a circu- -C lar viewport with diameter equal to .88* -C PLTSIZ (leaving room for labels outside the -C viewport). The viewport is centered on the -C 8.5 by 11 inch page, and its boundary is -C drawn. 1.0 .LE. PLTSIZ .LE. 8.5. -C -C ELAT,ELON = Latitude and longitude (in degrees) of -C the center of projection E (the center -C of the plot). The projection plane is -C the plane that contains the origin and -C has E as unit normal. In a rotated -C coordinate system for which E is the -C north pole, the projection plane con- -C tains the equator, and only northern -C hemisphere nodes are visible (from the -C point at infinity in the direction E). -C These are projected orthogonally onto -C the projection plane (by zeroing the z- -C component in the rotated coordinate -C system). ELAT and ELON must be in the -C range -90 to 90 and -180 to 180, respec- -C tively. -C -C A = Angular distance in degrees from E to the boun- -C dary of a circular window against which the -C triangulation is clipped. The projected window -C is a disk of radius r = Sin(A) centered at the -C origin, and only visible nodes whose projections -C are within distance r of the origin are included -C in the plot. Thus, if A = 90, the plot includes -C the entire hemisphere centered at E. 0 .LT. A -C .LE. 90. -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes (unit vectors). -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C TITLE = Type CHARACTER variable or constant contain- -C ing a string to be centered above the plot. -C The string must be enclosed in parentheses; -C i.e., the first and last characters must be -C '(' and ')', respectively, but these are not -C displayed. TITLE may have at most 80 char- -C acters including the parentheses. -C -C NUMBR = Option indicator: If NUMBR = TRUE, the -C nodal indexes are plotted next to the nodes. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if LUN, PLTSIZ, or N is outside its -C valid range. -C IER = 2 if ELAT, ELON, or A is outside its -C valid range. -C IER = 3 if an error was encountered in writing -C to unit LUN. -C -C The values in the data statement below may be altered -C in order to modify various plotting options. -C -C Modules required by TRPLOT: None -C -C Intrinsic functions called by TRPLOT: ABS, ATAN, COS, -C NINT, REAL, SIN, -C SQRT -C -C*********************************************************** -C - INTEGER IPX1, IPX2, IPY1, IPY2, IR, LP, LPL, N0, N1 - LOGICAL ANNOT - REAL CF, CT, EX, EY, EZ, FSIZN, FSIZT, R11, R12, - . R21, R22, R23, SF, T, TX, TY, WR, WRS, X0, X1, - . Y0, Y1, Z0, Z1 -C - DATA ANNOT/.TRUE./, FSIZN/10.0/, FSIZT/16.0/ -C -C Local parameters: -C -C ANNOT = Logical variable with value TRUE iff the plot -C is to be annotated with the values of ELAT, -C ELON, and A -C CF = Conversion factor for degrees to radians -C CT = Cos(ELAT) -C EX,EY,EZ = Cartesian coordinates of the eye-position E -C FSIZN = Font size in points for labeling nodes with -C their indexes if NUMBR = TRUE -C FSIZT = Font size in points for the title (and -C annotation if ANNOT = TRUE) -C IPX1,IPY1 = X and y coordinates (in points) of the lower -C left corner of the bounding box or viewport -C box -C IPX2,IPY2 = X and y coordinates (in points) of the upper -C right corner of the bounding box or viewport -C box -C IR = Half the width (height) of the bounding box or -C viewport box in points -- viewport radius -C LP = LIST index (pointer) -C LPL = Pointer to the last neighbor of N0 -C N0 = Index of a node whose incident arcs are to be -C drawn -C N1 = Neighbor of N0 -C R11...R23 = Components of the first two rows of a rotation -C that maps E to the north pole (0,0,1) -C SF = Scale factor for mapping world coordinates -C (window coordinates in [-WR,WR] X [-WR,WR]) -C to viewport coordinates in [IPX1,IPX2] X -C [IPY1,IPY2] -C T = Temporary variable -C TX,TY = Translation vector for mapping world coordi- -C nates to viewport coordinates -C WR = Window radius r = Sin(A) -C WRS = WR**2 -C X0,Y0,Z0 = Coordinates of N0 in the rotated coordinate -C system or label location (X0,Y0) -C X1,Y1,Z1 = Coordinates of N1 in the rotated coordinate -C system or intersection of edge N0-N1 with -C the equator (in the rotated coordinate -C system) -C -C -C Test for invalid parameters. -C - IF (LUN .LT. 0 .OR. LUN .GT. 99 .OR. - . PLTSIZ .LT. 1.0 .OR. PLTSIZ .GT. 8.5 .OR. - . N .LT. 3) - . GO TO 11 - IF (ABS(ELAT) .GT. 90.0 .OR. ABS(ELON) .GT. 180.0 - . .OR. A .GT. 90.0) GO TO 12 -C -C Compute a conversion factor CF for degrees to radians -C and compute the window radius WR. -C - CF = ATAN(1.0)/45.0 - WR = SIN(CF*A) - WRS = WR*WR -C -C Compute the lower left (IPX1,IPY1) and upper right -C (IPX2,IPY2) corner coordinates of the bounding box. -C The coordinates, specified in default user space units -C (points, at 72 points/inch with origin at the lower -C left corner of the page), are chosen to preserve the -C square aspect ratio, and to center the plot on the 8.5 -C by 11 inch page. The center of the page is (306,396), -C and IR = PLTSIZ/2 in points. -C - IR = NINT(36.0*PLTSIZ) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Output header comments. -C - WRITE (LUN,100,ERR=13) IPX1, IPY1, IPX2, IPY2 - 100 FORMAT ('%!PS-Adobe-3.0 EPSF-3.0'/ - . '%%BoundingBox:',4I4/ - . '%%Title: Triangulation'/ - . '%%Creator: STRIPACK'/ - . '%%EndComments') -C -C Set (IPX1,IPY1) and (IPX2,IPY2) to the corner coordinates -C of a viewport box obtained by shrinking the bounding box -C by 12% in each dimension. -C - IR = NINT(0.88*REAL(IR)) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Set the line thickness to 2 points, and draw the -C viewport boundary. -C - T = 2.0 - WRITE (LUN,110,ERR=13) T - WRITE (LUN,120,ERR=13) IR - WRITE (LUN,130,ERR=13) - 110 FORMAT (F12.6,' setlinewidth') - 120 FORMAT ('306 396 ',I3,' 0 360 arc') - 130 FORMAT ('stroke') -C -C Set up an affine mapping from the window box [-WR,WR] X -C [-WR,WR] to the viewport box. -C - SF = REAL(IR)/WR - TX = IPX1 + SF*WR - TY = IPY1 + SF*WR - WRITE (LUN,140,ERR=13) TX, TY, SF, SF - 140 FORMAT (2F12.6,' translate'/ - . 2F12.6,' scale') -C -C The line thickness must be changed to reflect the new -C scaling which is applied to all subsequent output. -C Set it to 1.0 point. -C - T = 0.25/SF - WRITE (LUN,110,ERR=13) T -C -C Save the current graphics state, and set the clip path to -C the boundary of the window. -C - WRITE (LUN,150,ERR=13) - WRITE (LUN,160,ERR=13) WR - WRITE (LUN,170,ERR=13) - 150 FORMAT ('gsave') - 160 FORMAT ('0 0 ',F12.6,' 0 360 arc') - 170 FORMAT ('clip newpath') -C -C Compute the Cartesian coordinates of E and the components -C of a rotation R which maps E to the north pole (0,0,1). -C R is taken to be a rotation about the z-axis (into the -C yz-plane) followed by a rotation about the x-axis chosen -C so that the view-up direction is (0,0,1), or (-1,0,0) if -C E is the north or south pole. -C -C ( R11 R12 0 ) -C R = ( R21 R22 R23 ) -C ( EX EY EZ ) -C - T = CF*ELON - CT = COS(CF*ELAT) - EX = CT*COS(T) - EY = CT*SIN(T) - EZ = SIN(CF*ELAT) - IF (CT .NE. 0.0) THEN - R11 = -EY/CT - R12 = EX/CT - ELSE - R11 = 0.0 - R12 = 1.0 - ENDIF - R21 = -EZ*R12 - R22 = EZ*R11 - R23 = CT -C -C Loop on visible nodes N0 that project to points (X0,Y0) in -C the window. -C - DO 3 N0 = 1,N - Z0 = EX*X(N0) + EY*Y(N0) + EZ*Z(N0) - IF (Z0 .LT. 0.) GO TO 3 - X0 = R11*X(N0) + R12*Y(N0) - Y0 = R21*X(N0) + R22*Y(N0) + R23*Z(N0) - IF (X0*X0 + Y0*Y0 .GT. WRS) GO TO 3 - LPL = LEND(N0) - LP = LPL -C -C Loop on neighbors N1 of N0. LPL points to the last -C neighbor of N0. Copy the components of N1 into P. -C - 1 LP = LPTR(LP) - N1 = ABS(LIST(LP)) - X1 = R11*X(N1) + R12*Y(N1) - Y1 = R21*X(N1) + R22*Y(N1) + R23*Z(N1) - Z1 = EX*X(N1) + EY*Y(N1) + EZ*Z(N1) - IF (Z1 .LT. 0.) THEN -C -C N1 is a 'southern hemisphere' point. Move it to the -C intersection of edge N0-N1 with the equator so that -C the edge is clipped properly. Z1 is implicitly set -C to 0. -C - X1 = Z0*X1 - Z1*X0 - Y1 = Z0*Y1 - Z1*Y0 - T = SQRT(X1*X1+Y1*Y1) - X1 = X1/T - Y1 = Y1/T - ENDIF -C -C If node N1 is in the window and N1 < N0, bypass edge -C N0->N1 (since edge N1->N0 has already been drawn). -C - IF ( Z1 .GE. 0.0 .AND. X1*X1 + Y1*Y1 .LE. WRS - . .AND. N1 .LT. N0 ) GO TO 2 -C -C Add the edge to the path. -C - WRITE (LUN,180,ERR=13) X0, Y0, X1, Y1 - 180 FORMAT (2F12.6,' moveto',2F12.6,' lineto') -C -C Bottom of loops. -C - 2 IF (LP .NE. LPL) GO TO 1 - 3 CONTINUE -C -C Paint the path and restore the saved graphics state (with -C no clip path). -C - WRITE (LUN,130,ERR=13) - WRITE (LUN,190,ERR=13) - 190 FORMAT ('grestore') - IF (NUMBR) THEN -C -C Nodes in the window are to be labeled with their indexes. -C Convert FSIZN from points to world coordinates, and -C output the commands to select a font and scale it. -C - T = FSIZN/SF - WRITE (LUN,200,ERR=13) T - 200 FORMAT ('/Helvetica findfont'/ - . F12.6,' scalefont setfont') -C -C Loop on visible nodes N0 that project to points (X0,Y0) in -C the window. -C - DO 4 N0 = 1,N - IF (EX*X(N0) + EY*Y(N0) + EZ*Z(N0) .LT. 0.) - . GO TO 4 - X0 = R11*X(N0) + R12*Y(N0) - Y0 = R21*X(N0) + R22*Y(N0) + R23*Z(N0) - IF (X0*X0 + Y0*Y0 .GT. WRS) GO TO 4 -C -C Move to (X0,Y0) and draw the label N0. The first char- -C acter will will have its lower left corner about one -C character width to the right of the nodal position. -C - WRITE (LUN,210,ERR=13) X0, Y0 - WRITE (LUN,220,ERR=13) N0 - 210 FORMAT (2F12.6,' moveto') - 220 FORMAT ('(',I3,') show') - 4 CONTINUE - ENDIF -C -C Convert FSIZT from points to world coordinates, and output -C the commands to select a font and scale it. -C - T = FSIZT/SF - WRITE (LUN,200,ERR=13) T -C -C Display TITLE centered above the plot: -C - Y0 = WR + 3.0*T - WRITE (LUN,230,ERR=13) TITLE, Y0 - 230 FORMAT (A80/' stringwidth pop 2 div neg ',F12.6, - . ' moveto') - WRITE (LUN,240,ERR=13) TITLE - 240 FORMAT (A80/' show') - IF (ANNOT) THEN -C -C Display the window center and radius below the plot. -C - X0 = -WR - Y0 = -WR - 50.0/SF - WRITE (LUN,210,ERR=13) X0, Y0 - WRITE (LUN,250,ERR=13) ELAT, ELON - Y0 = Y0 - 2.0*T - WRITE (LUN,210,ERR=13) X0, Y0 - WRITE (LUN,260,ERR=13) A - 250 FORMAT ('(Window center: ELAT = ',F7.2, - . ', ELON = ',F8.2,') show') - 260 FORMAT ('(Angular extent: A = ',F5.2,') show') - ENDIF -C -C Paint the path and output the showpage command and -C end-of-file indicator. -C - WRITE (LUN,270,ERR=13) - 270 FORMAT ('stroke'/ - . 'showpage'/ - . '%%EOF') -C -C HP's interpreters require a one-byte End-of-PostScript-Job -C indicator (to eliminate a timeout error message): -C ASCII 4. -C - WRITE (LUN,280,ERR=13) CHAR(4) - 280 FORMAT (A1) -C -C No error encountered. -C - IER = 0 - RETURN -C -C Invalid input parameter LUN, PLTSIZ, or N. -C - 11 IER = 1 - RETURN -C -C Invalid input parameter ELAT, ELON, or A. -C - 12 IER = 2 - RETURN -C -C Error writing to unit LUN. -C - 13 IER = 3 - RETURN - END SUBROUTINE - SUBROUTINE TRPRNT (N,X,Y,Z,IFLAG,LIST,LPTR,LEND,LOUT) - INTEGER N, IFLAG, LIST(*), LPTR(*), LEND(N), LOUT - REAL X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/25/98 -C -C This subroutine prints the triangulation adjacency lists -C created by Subroutine TRMESH and, optionally, the nodal -C coordinates (either latitude and longitude or Cartesian -C coordinates) on logical unit LOUT. The list of neighbors -C of a boundary node is followed by index 0. The numbers of -C boundary nodes, triangles, and arcs are also printed. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3 -C and N .LE. 9999. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes if IFLAG = 0, or -C (X and Y only) arrays of length N containing -C longitude and latitude, respectively, if -C IFLAG > 0, or unused dummy parameters if -C IFLAG < 0. -C -C IFLAG = Nodal coordinate option indicator: -C IFLAG = 0 if X, Y, and Z (assumed to contain -C Cartesian coordinates) are to be -C printed (to 6 decimal places). -C IFLAG > 0 if only X and Y (assumed to con- -C tain longitude and latitude) are -C to be printed (to 6 decimal -C places). -C IFLAG < 0 if only the adjacency lists are to -C be printed. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C LOUT = Logical unit for output. If LOUT is not in -C the range 0 to 99, output is written to -C logical unit 6. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C The adjacency lists and nodal coordinates (as specified -C by IFLAG) are written to unit LOUT. -C -C Modules required by TRPRNT: None -C -C*********************************************************** -C - INTEGER I, INC, K, LP, LPL, LUN, NA, NABOR(400), NB, - . ND, NL, NLMAX, NMAX, NODE, NN, NT - DATA NMAX/9999/, NLMAX/58/ -C -C Local parameters: -C -C I = NABOR index (1 to K) -C INC = Increment for NL associated with an adjacency list -C K = Counter and number of neighbors of NODE -C LP = LIST pointer of a neighbor of NODE -C LPL = Pointer to the last neighbor of NODE -C LUN = Logical unit for output (copy of LOUT) -C NA = Number of arcs in the triangulation -C NABOR = Array containing the adjacency list associated -C with NODE, with zero appended if NODE is a -C boundary node -C NB = Number of boundary nodes encountered -C ND = Index of a neighbor of NODE (or negative index) -C NL = Number of lines that have been printed on the -C current page -C NLMAX = Maximum number of print lines per page (except -C for the last page which may have two addi- -C tional lines) -C NMAX = Upper bound on N (allows 4-digit indexes) -C NODE = Index of a node and DO-loop index (1 to N) -C NN = Local copy of N -C NT = Number of triangles in the triangulation -C - NN = N - LUN = LOUT - IF (LUN .LT. 0 .OR. LUN .GT. 99) LUN = 6 -C -C Print a heading and test the range of N. -C - WRITE (LUN,100) NN - IF (NN .LT. 3 .OR. NN .GT. NMAX) THEN -C -C N is outside its valid range. -C - WRITE (LUN,110) - RETURN - ENDIF -C -C Initialize NL (the number of lines printed on the current -C page) and NB (the number of boundary nodes encountered). -C - NL = 6 - NB = 0 - IF (IFLAG .LT. 0) THEN -C -C Print LIST only. K is the number of neighbors of NODE -C that have been stored in NABOR. -C - WRITE (LUN,101) - DO 2 NODE = 1,NN - LPL = LEND(NODE) - LP = LPL - K = 0 -C - 1 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NABOR(K) = ND - IF (LP .NE. LPL) GO TO 1 - IF (ND .LE. 0) THEN -C -C NODE is a boundary node. Correct the sign of the last -C neighbor, add 0 to the end of the list, and increment -C NB. -C - NABOR(K) = -ND - K = K + 1 - NABOR(K) = 0 - NB = NB + 1 - ENDIF -C -C Increment NL and print the list of neighbors. -C - INC = (K-1)/14 + 2 - NL = NL + INC - IF (NL .GT. NLMAX) THEN - WRITE (LUN,108) - NL = INC - ENDIF - WRITE (LUN,104) NODE, (NABOR(I), I = 1,K) - IF (K .NE. 14) WRITE (LUN,107) - 2 CONTINUE - ELSEIF (IFLAG .GT. 0) THEN -C -C Print X (longitude), Y (latitude), and LIST. -C - WRITE (LUN,102) - DO 4 NODE = 1,NN - LPL = LEND(NODE) - LP = LPL - K = 0 -C - 3 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NABOR(K) = ND - IF (LP .NE. LPL) GO TO 3 - IF (ND .LE. 0) THEN -C -C NODE is a boundary node. -C - NABOR(K) = -ND - K = K + 1 - NABOR(K) = 0 - NB = NB + 1 - ENDIF -C -C Increment NL and print X, Y, and NABOR. -C - INC = (K-1)/8 + 2 - NL = NL + INC - IF (NL .GT. NLMAX) THEN - WRITE (LUN,108) - NL = INC - ENDIF - WRITE (LUN,105) NODE, X(NODE), Y(NODE), - . (NABOR(I), I = 1,K) - IF (K .NE. 8) WRITE (LUN,107) - 4 CONTINUE - ELSE -C -C Print X, Y, Z, and LIST. -C - WRITE (LUN,103) - DO 6 NODE = 1,NN - LPL = LEND(NODE) - LP = LPL - K = 0 -C - 5 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NABOR(K) = ND - IF (LP .NE. LPL) GO TO 5 - IF (ND .LE. 0) THEN -C -C NODE is a boundary node. -C - NABOR(K) = -ND - K = K + 1 - NABOR(K) = 0 - NB = NB + 1 - ENDIF -C -C Increment NL and print X, Y, Z, and NABOR. -C - INC = (K-1)/5 + 2 - NL = NL + INC - IF (NL .GT. NLMAX) THEN - WRITE (LUN,108) - NL = INC - ENDIF - WRITE (LUN,106) NODE, X(NODE), Y(NODE), - . Z(NODE), (NABOR(I), I = 1,K) - IF (K .NE. 5) WRITE (LUN,107) - 6 CONTINUE - ENDIF -C -C Print NB, NA, and NT (boundary nodes, arcs, and -C triangles). -C - IF (NB .NE. 0) THEN - NA = 3*NN - NB - 3 - NT = 2*NN - NB - 2 - ELSE - NA = 3*NN - 6 - NT = 2*NN - 4 - ENDIF - WRITE (LUN,109) NB, NA, NT - RETURN -C -C Print formats: -C - 100 FORMAT (///15X,'STRIPACK Triangulation Data ', - . 'Structure, N = ',I5//) - 101 FORMAT (1X,'Node',31X,'Neighbors of Node'//) - 102 FORMAT (1X,'Node',5X,'Longitude',6X,'Latitude', - . 18X,'Neighbors of Node'//) - 103 FORMAT (1X,'Node',5X,'X(Node)',8X,'Y(Node)',8X, - . 'Z(Node)',11X,'Neighbors of Node'//) - 104 FORMAT (1X,I4,4X,14I5/(1X,8X,14I5)) - 105 FORMAT (1X,I4,2E15.6,4X,8I5/(1X,38X,8I5)) - 106 FORMAT (1X,I4,3E15.6,4X,5I5/(1X,53X,5I5)) - 107 FORMAT (1X) - 108 FORMAT (///) - 109 FORMAT (/1X,'NB = ',I4,' Boundary Nodes',5X, - . 'NA = ',I5,' Arcs',5X,'NT = ',I5, - . ' Triangles') - 110 FORMAT (1X,10X,'*** N is outside its valid', - . ' range ***') - END SUBROUTINE - SUBROUTINE VRPLOT (LUN,PLTSIZ,ELAT,ELON,A,N,X,Y,Z, - . NT,LISTC,LPTR,LEND,XC,YC,ZC,TITLE, - . NUMBR, IER) - CHARACTER*(*) TITLE - INTEGER LUN, N, NT, LISTC(*), LPTR(*), LEND(N), IER - LOGICAL NUMBR - REAL PLTSIZ, ELAT, ELON, A, X(N), Y(N), Z(N), - . XC(NT), YC(NT), ZC(NT) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/16/98 -C -C This subroutine creates a level-2 Encapsulated Post- -C script (EPS) file containing a graphical depiction of a -C Voronoi diagram of a set of nodes on the unit sphere. -C The visible vertices are projected onto the plane that -C contains the origin and has normal defined by a user- -C specified eye-position. Projections of adjacent (visible) -C Voronoi vertices are connected by line segments. -C -C The parameters defining the Voronoi diagram may be com- -C puted by Subroutine CRLIST. -C -C -C On input: -C -C LUN = Logical unit number in the range 0 to 99. -C The unit should be opened with an appropriate -C file name before the call to this routine. -C -C PLTSIZ = Plot size in inches. A circular window in -C the projection plane is mapped to a circu- -C lar viewport with diameter equal to .88* -C PLTSIZ (leaving room for labels outside the -C viewport). The viewport is centered on the -C 8.5 by 11 inch page, and its boundary is -C drawn. 1.0 .LE. PLTSIZ .LE. 8.5. -C -C ELAT,ELON = Latitude and longitude (in degrees) of -C the center of projection E (the center -C of the plot). The projection plane is -C the plane that contains the origin and -C has E as unit normal. In a rotated -C coordinate system for which E is the -C north pole, the projection plane con- -C tains the equator, and only northern -C hemisphere points are visible (from the -C point at infinity in the direction E). -C These are projected orthogonally onto -C the projection plane (by zeroing the z- -C component in the rotated coordinate -C system). ELAT and ELON must be in the -C range -90 to 90 and -180 to 180, respec- -C tively. -C -C A = Angular distance in degrees from E to the boun- -C dary of a circular window against which the -C Voronoi diagram is clipped. The projected win- -C dow is a disk of radius r = Sin(A) centered at -C the origin, and only visible vertices whose -C projections are within distance r of the origin -C are included in the plot. Thus, if A = 90, the -C plot includes the entire hemisphere centered at -C E. 0 .LT. A .LE. 90. -C -C N = Number of nodes (Voronoi centers) and Voronoi -C regions. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes (unit vectors). -C -C NT = Number of Voronoi region vertices (triangles, -C including those in the extended triangulation -C if the number of boundary nodes NB is nonzero): -C NT = 2*N-4. -C -C LISTC = Array of length 3*NT containing triangle -C indexes (indexes to XC, YC, and ZC) stored -C in 1-1 correspondence with LIST/LPTR entries -C (or entries that would be stored in LIST for -C the extended triangulation): the index of -C triangle (N1,N2,N3) is stored in LISTC(K), -C LISTC(L), and LISTC(M), where LIST(K), -C LIST(L), and LIST(M) are the indexes of N2 -C as a neighbor of N1, N3 as a neighbor of N2, -C and N1 as a neighbor of N3. The Voronoi -C region associated with a node is defined by -C the CCW-ordered sequence of circumcenters in -C one-to-one correspondence with its adjacency -C list (in the extended triangulation). -C -C LPTR = Array of length 3*NT = 6*N-12 containing a -C set of pointers (LISTC indexes) in one-to-one -C correspondence with the elements of LISTC. -C LISTC(LPTR(I)) indexes the triangle which -C follows LISTC(I) in cyclical counterclockwise -C order (the first neighbor follows the last -C neighbor). -C -C LEND = Array of length N containing a set of -C pointers to triangle lists. LP = LEND(K) -C points to a triangle (indexed by LISTC(LP)) -C containing node K for K = 1 to N. -C -C XC,YC,ZC = Arrays of length NT containing the -C Cartesian coordinates of the triangle -C circumcenters (Voronoi vertices). -C XC(I)**2 + YC(I)**2 + ZC(I)**2 = 1. -C -C TITLE = Type CHARACTER variable or constant contain- -C ing a string to be centered above the plot. -C The string must be enclosed in parentheses; -C i.e., the first and last characters must be -C '(' and ')', respectively, but these are not -C displayed. TITLE may have at most 80 char- -C acters including the parentheses. -C -C NUMBR = Option indicator: If NUMBR = TRUE, the -C nodal indexes are plotted at the Voronoi -C region centers. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if LUN, PLTSIZ, N, or NT is outside -C its valid range. -C IER = 2 if ELAT, ELON, or A is outside its -C valid range. -C IER = 3 if an error was encountered in writing -C to unit LUN. -C -C Modules required by VRPLOT: None -C -C Intrinsic functions called by VRPLOT: ABS, ATAN, COS, -C NINT, REAL, SIN, -C SQRT -C -C*********************************************************** -C - INTEGER IPX1, IPX2, IPY1, IPY2, IR, KV1, KV2, LP, LPL, - . N0 - LOGICAL ANNOT, IN1, IN2 - REAL CF, CT, EX, EY, EZ, FSIZN, FSIZT, R11, R12, - . R21, R22, R23, SF, T, TX, TY, WR, WRS, X0, X1, - . X2, Y0, Y1, Y2, Z1, Z2 -C - DATA ANNOT/.TRUE./, FSIZN/10.0/, FSIZT/16.0/ -C -C Local parameters: -C -C ANNOT = Logical variable with value TRUE iff the plot -C is to be annotated with the values of ELAT, -C ELON, and A -C CF = Conversion factor for degrees to radians -C CT = Cos(ELAT) -C EX,EY,EZ = Cartesian coordinates of the eye-position E -C FSIZN = Font size in points for labeling nodes with -C their indexes if NUMBR = TRUE -C FSIZT = Font size in points for the title (and -C annotation if ANNOT = TRUE) -C IN1,IN2 = Logical variables with value TRUE iff the -C projections of vertices KV1 and KV2, respec- -C tively, are inside the window -C IPX1,IPY1 = X and y coordinates (in points) of the lower -C left corner of the bounding box or viewport -C box -C IPX2,IPY2 = X and y coordinates (in points) of the upper -C right corner of the bounding box or viewport -C box -C IR = Half the width (height) of the bounding box or -C viewport box in points -- viewport radius -C KV1,KV2 = Endpoint indexes of a Voronoi edge -C LP = LIST index (pointer) -C LPL = Pointer to the last neighbor of N0 -C N0 = Index of a node -C R11...R23 = Components of the first two rows of a rotation -C that maps E to the north pole (0,0,1) -C SF = Scale factor for mapping world coordinates -C (window coordinates in [-WR,WR] X [-WR,WR]) -C to viewport coordinates in [IPX1,IPX2] X -C [IPY1,IPY2] -C T = Temporary variable -C TX,TY = Translation vector for mapping world coordi- -C nates to viewport coordinates -C WR = Window radius r = Sin(A) -C WRS = WR**2 -C X0,Y0 = Projection plane coordinates of node N0 or -C label location -C X1,Y1,Z1 = Coordinates of vertex KV1 in the rotated -C coordinate system -C X2,Y2,Z2 = Coordinates of vertex KV2 in the rotated -C coordinate system or intersection of edge -C KV1-KV2 with the equator (in the rotated -C coordinate system) -C -C -C Test for invalid parameters. -C - IF (LUN .LT. 0 .OR. LUN .GT. 99 .OR. - . PLTSIZ .LT. 1.0 .OR. PLTSIZ .GT. 8.5 .OR. - . N .LT. 3 .OR. NT .NE. 2*N-4) - . GO TO 11 - IF (ABS(ELAT) .GT. 90.0 .OR. ABS(ELON) .GT. 180.0 - . .OR. A .GT. 90.0) GO TO 12 -C -C Compute a conversion factor CF for degrees to radians -C and compute the window radius WR. -C - CF = ATAN(1.0)/45.0 - WR = SIN(CF*A) - WRS = WR*WR -C -C Compute the lower left (IPX1,IPY1) and upper right -C (IPX2,IPY2) corner coordinates of the bounding box. -C The coordinates, specified in default user space units -C (points, at 72 points/inch with origin at the lower -C left corner of the page), are chosen to preserve the -C square aspect ratio, and to center the plot on the 8.5 -C by 11 inch page. The center of the page is (306,396), -C and IR = PLTSIZ/2 in points. -C - IR = NINT(36.0*PLTSIZ) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Output header comments. -C - WRITE (LUN,100,ERR=13) IPX1, IPY1, IPX2, IPY2 - 100 FORMAT ('%!PS-Adobe-3.0 EPSF-3.0'/ - . '%%BoundingBox:',4I4/ - . '%%Title: Voronoi diagram'/ - . '%%Creator: STRIPACK'/ - . '%%EndComments') -C -C Set (IPX1,IPY1) and (IPX2,IPY2) to the corner coordinates -C of a viewport box obtained by shrinking the bounding box -C by 12% in each dimension. -C - IR = NINT(0.88*REAL(IR)) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Set the line thickness to 2 points, and draw the -C viewport boundary. -C - T = 2.0 - WRITE (LUN,110,ERR=13) T - WRITE (LUN,120,ERR=13) IR - WRITE (LUN,130,ERR=13) - 110 FORMAT (F12.6,' setlinewidth') - 120 FORMAT ('306 396 ',I3,' 0 360 arc') - 130 FORMAT ('stroke') -C -C Set up an affine mapping from the window box [-WR,WR] X -C [-WR,WR] to the viewport box. -C - SF = REAL(IR)/WR - TX = IPX1 + SF*WR - TY = IPY1 + SF*WR - WRITE (LUN,140,ERR=13) TX, TY, SF, SF - 140 FORMAT (2F12.6,' translate'/ - . 2F12.6,' scale') -C -C The line thickness must be changed to reflect the new -C scaling which is applied to all subsequent output. -C Set it to 1.0 point. -C - T = 1.0/SF - WRITE (LUN,110,ERR=13) T -C -C Save the current graphics state, and set the clip path to -C the boundary of the window. -C - WRITE (LUN,150,ERR=13) - WRITE (LUN,160,ERR=13) WR - WRITE (LUN,170,ERR=13) - 150 FORMAT ('gsave') - 160 FORMAT ('0 0 ',F12.6,' 0 360 arc') - 170 FORMAT ('clip newpath') -C -C Compute the Cartesian coordinates of E and the components -C of a rotation R which maps E to the north pole (0,0,1). -C R is taken to be a rotation about the z-axis (into the -C yz-plane) followed by a rotation about the x-axis chosen -C so that the view-up direction is (0,0,1), or (-1,0,0) if -C E is the north or south pole. -C -C ( R11 R12 0 ) -C R = ( R21 R22 R23 ) -C ( EX EY EZ ) -C - T = CF*ELON - CT = COS(CF*ELAT) - EX = CT*COS(T) - EY = CT*SIN(T) - EZ = SIN(CF*ELAT) - IF (CT .NE. 0.0) THEN - R11 = -EY/CT - R12 = EX/CT - ELSE - R11 = 0.0 - R12 = 1.0 - ENDIF - R21 = -EZ*R12 - R22 = EZ*R11 - R23 = CT -C -C Loop on nodes (Voronoi centers) N0. -C LPL indexes the last neighbor of N0. -C - DO 3 N0 = 1,N - LPL = LEND(N0) -C -C Set KV2 to the first (and last) vertex index and compute -C its coordinates (X2,Y2,Z2) in the rotated coordinate -C system. -C - KV2 = LISTC(LPL) - X2 = R11*XC(KV2) + R12*YC(KV2) - Y2 = R21*XC(KV2) + R22*YC(KV2) + R23*ZC(KV2) - Z2 = EX*XC(KV2) + EY*YC(KV2) + EZ*ZC(KV2) -C -C IN2 = TRUE iff KV2 is in the window. -C - IN2 = Z2 .GE. 0. .AND. X2*X2 + Y2*Y2 .LE. WRS -C -C Loop on neighbors N1 of N0. For each triangulation edge -C N0-N1, KV1-KV2 is the corresponding Voronoi edge. -C - LP = LPL - 1 LP = LPTR(LP) - KV1 = KV2 - X1 = X2 - Y1 = Y2 - Z1 = Z2 - IN1 = IN2 - KV2 = LISTC(LP) -C -C Compute the new values of (X2,Y2,Z2) and IN2. -C - X2 = R11*XC(KV2) + R12*YC(KV2) - Y2 = R21*XC(KV2) + R22*YC(KV2) + R23*ZC(KV2) - Z2 = EX*XC(KV2) + EY*YC(KV2) + EZ*ZC(KV2) - IN2 = Z2 .GE. 0. .AND. X2*X2 + Y2*Y2 .LE. WRS -C -C Add edge KV1-KV2 to the path iff both endpoints are inside -C the window and KV2 > KV1, or KV1 is inside and KV2 is -C outside (so that the edge is drawn only once). -C - IF (.NOT. IN1 .OR. (IN2 .AND. KV2 .LE. KV1)) - . GO TO 2 - IF (Z2 .LT. 0.) THEN -C -C KV2 is a 'southern hemisphere' point. Move it to the -C intersection of edge KV1-KV2 with the equator so that -C the edge is clipped properly. Z2 is implicitly set -C to 0. -C - X2 = Z1*X2 - Z2*X1 - Y2 = Z1*Y2 - Z2*Y1 - T = SQRT(X2*X2+Y2*Y2) - X2 = X2/T - Y2 = Y2/T - ENDIF - WRITE (LUN,180,ERR=13) X1, Y1, X2, Y2 - 180 FORMAT (2F12.6,' moveto',2F12.6,' lineto') -C -C Bottom of loops. -C - 2 IF (LP .NE. LPL) GO TO 1 - 3 CONTINUE -C -C Paint the path and restore the saved graphics state (with -C no clip path). -C - WRITE (LUN,130,ERR=13) - WRITE (LUN,190,ERR=13) - 190 FORMAT ('grestore') - IF (NUMBR) THEN -C -C Nodes in the window are to be labeled with their indexes. -C Convert FSIZN from points to world coordinates, and -C output the commands to select a font and scale it. -C - T = FSIZN/SF - WRITE (LUN,200,ERR=13) T - 200 FORMAT ('/Helvetica findfont'/ - . F12.6,' scalefont setfont') -C -C Loop on visible nodes N0 that project to points (X0,Y0) in -C the window. -C - DO 4 N0 = 1,N - IF (EX*X(N0) + EY*Y(N0) + EZ*Z(N0) .LT. 0.) - . GO TO 4 - X0 = R11*X(N0) + R12*Y(N0) - Y0 = R21*X(N0) + R22*Y(N0) + R23*Z(N0) - IF (X0*X0 + Y0*Y0 .GT. WRS) GO TO 4 -C -C Move to (X0,Y0), and draw the label N0 with the origin -C of the first character at (X0,Y0). -C - WRITE (LUN,210,ERR=13) X0, Y0 - WRITE (LUN,220,ERR=13) N0 - 210 FORMAT (2F12.6,' moveto') - 220 FORMAT ('(',I3,') show') - 4 CONTINUE - ENDIF -C -C Convert FSIZT from points to world coordinates, and output -C the commands to select a font and scale it. -C - T = FSIZT/SF - WRITE (LUN,200,ERR=13) T -C -C Display TITLE centered above the plot: -C - Y0 = WR + 3.0*T - WRITE (LUN,230,ERR=13) TITLE, Y0 - 230 FORMAT (A80/' stringwidth pop 2 div neg ',F12.6, - . ' moveto') - WRITE (LUN,240,ERR=13) TITLE - 240 FORMAT (A80/' show') - IF (ANNOT) THEN -C -C Display the window center and radius below the plot. -C - X0 = -WR - Y0 = -WR - 50.0/SF - WRITE (LUN,210,ERR=13) X0, Y0 - WRITE (LUN,250,ERR=13) ELAT, ELON - Y0 = Y0 - 2.0*T - WRITE (LUN,210,ERR=13) X0, Y0 - WRITE (LUN,260,ERR=13) A - 250 FORMAT ('(Window center: ELAT = ',F7.2, - . ', ELON = ',F8.2,') show') - 260 FORMAT ('(Angular extent: A = ',F5.2,') show') - ENDIF -C -C Paint the path and output the showpage command and -C end-of-file indicator. -C - WRITE (LUN,270,ERR=13) - 270 FORMAT ('stroke'/ - . 'showpage'/ - . '%%EOF') -C -C HP's interpreters require a one-byte End-of-PostScript-Job -C indicator (to eliminate a timeout error message): -C ASCII 4. -C - WRITE (LUN,280,ERR=13) CHAR(4) - 280 FORMAT (A1) -C -C No error encountered. -C - IER = 0 - RETURN -C -C Invalid input parameter LUN, PLTSIZ, N, or NT. -C - 11 IER = 1 - RETURN -C -C Invalid input parameter ELAT, ELON, or A. -C - 12 IER = 2 - RETURN -C -C Error writing to unit LUN. -C - 13 IER = 3 - RETURN - END SUBROUTINE - - END MODULE STRIPACK diff --git a/grid_gen/global_scvt/src/grid_gen.F b/grid_gen/global_scvt/src/grid_gen.F deleted file mode 100644 index 34d64bb50..000000000 --- a/grid_gen/global_scvt/src/grid_gen.F +++ /dev/null @@ -1,157 +0,0 @@ -program grid_gen - - use grid_params - use grid_constants - use data_types - use grid_gen_utils - use voronoi_utils - use stripack - use scvt - use grid_meta - - implicit none - - real :: dlat, dlon - real :: dl - real :: d1, d2, d3, d4 - integer :: p1, p2, p3 - integer :: if - character (len=80) :: frame_name - - real :: pi - real :: area_per_sample, nhexs, sum_nhexs, hex_area - type (geo_point) :: p - - integer :: i, j, k, nb, ier - - real, allocatable, dimension(:) :: rlat, rlon, vclat, vclon, x, y, z, xc, yc, zc - integer, allocatable, dimension(:) :: list, lptr, listc, lend - integer, allocatable, dimension(:,:) :: ltri - - integer :: n, nrow, ntmax, nvc - integer :: tr1, tr2, tr3, tr4 - integer :: i1, i2, i3, k1, k2 - - type (adjacency_list) :: alist, clist - - real :: lat1, lon1, lat2, lon2, lat3, lon3, latc, lonc - - - call read_namelist() - - - pi = 4.0*atan(1.0) - - area_per_sample = 4.0 * pi * 6370000**2.0 / 6000000.0 - sum_nhexs = 0.0 - write(0,'(a,f10.1)') 'Computing an estimate for the required number of cells to reach dx=', min_dx - do if = 1,5 - nhexs = 0.0 - do i=1,6000000 - call random_point(p) - d1 = density_for_point(p) - dl = min_dx / (d1 ** 0.25) - hex_area = sqrt(3.0) / 2.0 * dl**2.0 - nhexs = nhexs + area_per_sample / hex_area - end do -! write(0,'(a,i2,a,i)') 'Estimate ',if,' for required # hexs:', nint(nhexs) - sum_nhexs = sum_nhexs + nhexs - write(0,'(a,i3,a)',advance='no') ' ...',if*20,'%' - end do - write(0,*) ' ' - write(0,*) 'Estimated # hexs:', nint(sum_nhexs/5.0) - write(0,*) ' ' - - - n = np - - ntmax = 6*n - nvc = ntmax - nrow = 6 - allocate(rlat(n)) - allocate(rlon(n)) - allocate(x(n)) - allocate(y(n)) - allocate(z(n)) - allocate(ltri(nrow,ntmax)) - allocate(list(nvc)) - allocate(lptr(nvc)) - allocate(lend(n)) - allocate(listc(nvc)) - allocate(vclat(nvc)) - allocate(vclon(nvc)) - allocate(xc(nvc)) - allocate(yc(nvc)) - allocate(zc(nvc)) - - - write(0,*) ' ' - write(0,*) 'Reading generating points from a file' - open(22,file='locs.dat',form='formatted',status='old') - if (locs_as_xyz) then - read(22,*) - do i=1,n - read(22,'(10x,f22.10,f23.10,f23.10)') x(i), y(i), z(i) - end do - call trans_inv(x, y, z, rlat, rlon, n) - else - do i=1,n - read(22,'(f13.10,1x,f13.10)') rlat(i), rlon(i) - end do - end if - close(22) - - - ! - ! Compute Voronoi corners - ! - write(0,*) ' ' - write(0,*) 'Computing Voronoi corners' - call compute_vc(rlat, rlon, n, nrow, ntmax, list, lptr, lend, listc, vclat, vclon, nvc) - - - ! - ! Form SCVT - ! - call TRANS (n, rlat, rlon, x, y, z) - call TRANS (nvc, vclat, vclon, xc, yc, zc) - write(frame_name,'(a)') 'scvt_initial.ps' - open(32,file=trim(frame_name),form='formatted',status='unknown') - call vrplot(32, 8.0, 0.0, 0.0, 90.0 ,N, X,Y,Z, 2*n-4,LISTC,LPTR,LEND,XC,YC,ZC,'(spherical centroidal voronoi tessellation)',.false.,IER) - close(32) - - call scvt_solve(n, lend, rlat, rlon, nvc, list, lptr, if) - - call compute_vc(rlat, rlon, n, nrow, ntmax, list, lptr, lend, listc, vclat, vclon, nvc) - call TRANS (n, rlat, rlon, x, y, z) - call TRANS (nvc, vclat, vclon, xc, yc, zc) - write(frame_name,'(a)') 'scvt_final.ps' - open(32,file=trim(frame_name),form='formatted',status='unknown') - call vrplot(32, 8.0, 0.0, 0.0, 90.0 ,N, X,Y,Z, 2*n-4,LISTC,LPTR,LEND,XC,YC,ZC,'(spherical centroidal voronoi tessellation)',.false.,IER) - close(32) - - - write(0,*) ' ' - write(0,*) 'Deriving grid metadata and writing output' - call convert_adjacency_list(n, lend, nvc, list, lptr, alist) - call convert_corner_list(n, lend, nvc, listc, lptr, clist) - call write_grid(rlat, rlon, n, vclat, vclon, nvc, alist, clist) - - - deallocate(rlat) - deallocate(x) - deallocate(y) - deallocate(z) - deallocate(rlon) - deallocate(ltri) - deallocate(list) - deallocate(lptr) - deallocate(lend) - deallocate(listc) - deallocate(vclat) - deallocate(vclon) - deallocate(xc) - deallocate(yc) - deallocate(zc) - -end program grid_gen diff --git a/grid_gen/global_scvt/src/module_data_types.F b/grid_gen/global_scvt/src/module_data_types.F deleted file mode 100644 index 011bd8fbf..000000000 --- a/grid_gen/global_scvt/src/module_data_types.F +++ /dev/null @@ -1,206 +0,0 @@ -module data_types - - integer, parameter :: LESS = -1, EQUAL = 0, GREATER = 1 - - type geo_point - real :: lat, lon - end type geo_point - - type send_list_ptr - integer :: nodeID - integer :: nNodeList - integer, pointer, dimension(:) :: nodeList - type (send_list_ptr), pointer :: next - end type send_list_ptr - - type recv_list_ptr - integer :: nodeID - integer :: nNodeList - integer, pointer, dimension(:) :: nodeList - type (recv_list_ptr), pointer :: next - end type recv_list_ptr - - type adjacency_list - integer :: nNodes - integer :: nNeighbors - integer, pointer, dimension(:) :: neighbor, start, len - end type adjacency_list - - type binary_tree - integer :: node1, node2 - integer :: vertex1, vertex2 - real :: lat1, lon1, lat2, lon2 - type (binary_tree), pointer :: left, right, parent - end type binary_tree - - contains - - - integer function cmp_points(a, b) - - implicit none - - type (geo_point), intent(in) :: a, b - - if (a%lat > b%lat) then - cmp_points = GREATER - else if (a%lat == b%lat) then - if (a%lon > b%lon) then - cmp_points = GREATER - else if (a%lon == b%lon) then - cmp_points = EQUAL - else - cmp_points = LESS - end if - else - cmp_points = LESS - end if - - end function cmp_points - - - subroutine swap_points(a, b) - - implicit none - - type (geo_point), intent(inout) :: a, b - - type (geo_point) :: temp - - temp = a - a = b - b = temp - - end subroutine swap_points - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE CONVERT_ADJACENCY_LIST - ! - ! Convert adjacency list from format provided by STRIPACK to format used in - ! our code. - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine convert_adjacency_list(n, lend, nvc, list, lptr, alist) - - implicit none - - integer, intent(in) :: n, nvc - integer, dimension(n), intent(in) :: lend - integer, dimension(nvc), intent(in) :: lptr - integer, dimension(nvc), intent(in) :: list - type (adjacency_list), intent(inout) :: alist - - integer :: i, j, k, len, ipos - - len = 0 - - ! Count total number of nodes - do i=1,n - - ! Scan neighbors of i - k = lend(i) - k = lptr(lend(i)) - len = len + 1 - - do while (k /= lend(i)) - k = lptr(k) - len = len + 1 - end do - - end do - - alist % nNodes = n - alist % nNeighbors = len - allocate(alist % neighbor(len)) - allocate(alist % start(n)) - allocate(alist % len(n)) - - ipos = 0 - do i=1,n - - ! Scan neighbors of i - k = lend(i) - k = lptr(lend(i)) - ipos = ipos + 1 - - alist % start(i) = ipos - alist % neighbor(ipos) = list(k) - alist % len(i) = 1 - - do while (k /= lend(i)) - k = lptr(k) - ipos = ipos + 1 - - alist % neighbor(ipos) = list(k) - alist % len(i) = alist % len(i) + 1 - end do - end do - - end subroutine convert_adjacency_list - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE CONVERT_CORNER_LIST - ! - ! Convert VC list from format provided by STRIPACK to format used in - ! our code. - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine convert_corner_list(n, lend, nvc, listc, lptr, clist) - - implicit none - - integer, intent(in) :: n, nvc - integer, dimension(n), intent(in) :: lend - integer, dimension(nvc), intent(in) :: lptr - integer, dimension(nvc), intent(in) :: listc - type (adjacency_list), intent(inout) :: clist - - integer :: i, j, k, len, ipos - - len = 0 - - ! Count total number of nodes - do i=1,n - - ! Scan neighbors of i - k = lend(i) - k = lptr(lend(i)) - len = len + 1 - - do while (k /= lend(i)) - k = lptr(k) - len = len + 1 - end do - - end do - - clist % nNodes = n - clist % nNeighbors = len - allocate(clist % neighbor(len)) - allocate(clist % start(n)) - allocate(clist % len(n)) - - ipos = 0 - do i=1,n - - ! Scan neighbors of i - k = lend(i) - k = lptr(lend(i)) - ipos = ipos + 1 - - clist % start(i) = ipos - clist % neighbor(ipos) = listc(k) - clist % len(i) = 1 - - do while (k /= lend(i)) - k = lptr(k) - ipos = ipos + 1 - - clist % neighbor(ipos) = listc(k) - clist % len(i) = clist % len(i) + 1 - end do - end do - - end subroutine convert_corner_list - -end module data_types diff --git a/grid_gen/global_scvt/src/module_grid_constants.F b/grid_gen/global_scvt/src/module_grid_constants.F deleted file mode 100644 index b23a3d3ef..000000000 --- a/grid_gen/global_scvt/src/module_grid_constants.F +++ /dev/null @@ -1,11 +0,0 @@ -module grid_constants - - real, parameter :: radius = 1. - real, parameter :: pii = 3.141592653589793 - real, parameter :: rtod = 180./pii - real, parameter :: s_area = 4.*pii*radius*radius - - integer, parameter :: nicos_vertices = 12 - integer, parameter :: nicos_neighbors = 5 - -end module grid_constants diff --git a/grid_gen/global_scvt/src/module_grid_gen_utils.F b/grid_gen/global_scvt/src/module_grid_gen_utils.F deleted file mode 100644 index 80c0fbd52..000000000 --- a/grid_gen/global_scvt/src/module_grid_gen_utils.F +++ /dev/null @@ -1,309 +0,0 @@ -module grid_gen_utils - - use sphere_utilities - - - contains - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE GENERATE_BASE_GRID - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine generate_base_grid(points, neighbors, triangles) - - use data_types - use grid_params - use grid_constants - - implicit none - - type (geo_point), dimension(nicos_vertices), intent(out) :: points - integer, dimension(nicos_neighbors,nicos_vertices), intent(out) :: neighbors - integer, dimension(3,20), intent(out) :: triangles - - real dl,dtri - integer im1, ii, ip1, i - - !-- set point positions - - dl = 2.*pii/5. - dtri = acos(cos(0.5*dl)/(1.+cos(0.5*dl))) - - points(1)%lat = pii/2. ! latitude of north pole point - points(1)%lon = 0. ! longitude of north pole point - - ! - ! Set points just south of the north pole - ! - do i=2,6 - points(i)%lat = pii/2. - dtri ! latitude, same for all 5 point - points(i)%lon = 0. + dl*float(i-2) ! longitude - enddo - - points(12)%lat = -pii/2. ! latitude of south pole point - points(12)%lon = 0. ! longitude of south pole point - - do i=7,11 ! the set points just north of the south pole - points(i)%lat = -pii/2. + dtri ! latitude, same for all 5 point - points(i)%lon = 0. + dl*(0.5+float(i-7)) ! longitude. shifted from north - enddo - - ! construct neighbors - - do i=1,5 - neighbors(i,1) = i+1 ! north pole neighbors, points 2 through 6 - neighbors(i,12) = i+6 ! south pole neighbors, points 7 through 11 - neighbors(1,i+1) = 1 ! add north pole point - neighbors(1,i+6) = 12 - enddo - - do i=2,6 - im1 = i-1 - ip1 = i+1 - if(im1 .lt. 2) im1 = 6 - if(ip1 .gt. 6) ip1 = 2 - neighbors(2,i) = im1 - neighbors(3,i) = ip1 - - im1 = i+4 - ii = i+5 - if(im1 .lt. 7) im1 = 11 - neighbors(4,i) = im1 - neighbors(5,i) = ii - - enddo - - do i=7,11 - im1 = i-1 - ip1 = i+1 - if(im1 .lt. 7) im1 = 11 - if(ip1 .gt. 11) ip1 = 7 - neighbors(2,i) = im1 - neighbors(3,i) = ip1 - - im1 = i-5 - ii = i-4 - if(ii .gt. 6) ii = 2 - neighbors(4,i) = im1 - neighbors(5,i) = ii - enddo - - ! set the triangle points - - do i=1,5 ! triangles with north pole points - triangles(1,i) = 1 - triangles(2,i) = i+1 - triangles(3,i) = i+2 - if(triangles(2,i) .gt. 6) triangles(2,i) = triangles(2,i) - 5 - if(triangles(3,i) .gt. 6) triangles(3,i) = triangles(3,i) - 5 - enddo - - do i=16,20 ! triangles with south pole points - triangles(1,i) = 12 - triangles(2,i) = i-9 - triangles(3,i) = i-8 - if(triangles(2,i) .gt. 11) triangles(2,i) = triangles(2,i) - 5 - if(triangles(3,i) .gt. 11) triangles(3,i) = triangles(3,i) - 5 - enddo - - do i=6,10 ! upward pointing equator triangles - triangles(1,i) = i-4 - triangles(2,i) = i+5 - triangles(3,i) = i+1 - if(triangles(2,i) .gt. 11) triangles(2,i) = triangles(2,i) - 5 - if(triangles(3,i) .gt. 11) triangles(3,i) = triangles(3,i) - 5 - enddo - - do i=11,15 ! downward pointing equator triangles - triangles(1,i) = i-4 - triangles(2,i) = i-9 - triangles(3,i) = i-8 - if(triangles(2,i) .gt. 6) triangles(2,i) = triangles(2,i) - 5 - if(triangles(3,i) .gt. 6) triangles(3,i) = triangles(3,i) - 5 - enddo - - write(6,*) ' ' - write(6,*) ' triangle nodes ' - do i=1,20 - write(6,*) i, triangles(1,i),triangles(2,i),triangles(3,i) - enddo - - end subroutine generate_base_grid - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE PROCESS_TRIANGLE - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine process_triangle(triangle, scalar, nmax, n, np) - - use data_types - - implicit none - - integer, intent(in) :: nmax, np - integer, intent(inout) :: n - type (geo_point), dimension(3), intent(in) :: triangle - type (geo_point), dimension(nmax), intent(inout) :: scalar - - integer :: i, j, k - type (geo_point), dimension(np) :: ab, ac, pline - - call great_circle_points(triangle(1), triangle(2), ab, np) - call great_circle_points(triangle(1), triangle(3), ac, np) - - do i=1,np - call add_point(scalar, nmax, n, ab(i)) - call add_point(scalar, nmax, n, ac(i)) - end do - - do i=3,np-1 - call great_circle_points(ab(i), ac(i), pline, i) - do j=2,i-1 - n = n + 1 - scalar(n) = pline(j) -!!! No need to check for points on the interior of the triangle -!!! call add_point(scalar, nmax, n, pline(j)) - end do - end do - - i=np - call great_circle_points(ab(i), ac(i), pline, i) - do j=2,i-1 - call add_point(scalar, nmax, n, pline(j)) - end do - - end subroutine process_triangle - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE ADD_POINT - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine add_point(array, nmax, n, point) - - use data_types - - implicit none - - integer, intent(in) :: nmax - integer, intent(inout) :: n - type (geo_point), intent(in) :: point - type (geo_point), dimension(nmax), intent(inout) :: array - - integer :: i - logical :: already_have - - already_have = .false. - - do i=1,n - if (abs(point%lat - array(i)%lat) < 0.00001 .and. & - abs(point%lon - array(i)%lon) < 0.00001) then - already_have = .true. - exit - end if - end do - - if (.not. already_have) then - n = n + 1 - array(n) = point - end if - - end subroutine add_point - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE COMPUTE_H_AREA - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine compute_h_area(corners, centers, areas, n) - - use data_types - - implicit none - - integer, intent(in) :: n - type (geo_point), dimension(6,2*n-1,n), intent(inout) :: corners - type (geo_point), dimension(2*n-1,n), intent(inout) :: centers - real, dimension(2*n-1,n), intent(inout) :: areas - - integer :: i, j, it, itp1 - real :: hex_area - - do j=1,n - do i=1,2*n-1 - hex_area = 0. - do it=1,6 ! 6 triangles in the hexagon - itp1 = it+1 - if(itp1 > 6) itp1 = 1 - hex_area = hex_area + triangle_area( centers(i,j), & - corners( it,i,j), & - corners(itp1,i,j), 1.) - enddo - areas(i,j) = hex_area - enddo - enddo - - end subroutine compute_h_area - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE COMPUTE_EDGE_LENGTHS - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine compute_edge_lengths(corners, edge_lengths, n) - - use data_types - - implicit none - - integer, intent(in) :: n - type (geo_point), dimension(6,2*n-1,n), intent(inout) :: corners - real, dimension(6,2*n-1,n), intent(inout) :: edge_lengths - - integer :: i, j, it, itp1 - - do j=1,n - do i=1,2*n-1 - do it=1,6 - itp1 = it+1 - if(itp1 > 6) itp1 = 1 - edge_lengths(it,i,j) = sphere_distance( corners( it,i,j), & - corners(itp1,i,j), 1.) - end do - end do - end do - - end subroutine compute_edge_lengths - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE COMPUTE_DX - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine compute_dx( centers, dx, n ) - - use data_types - - implicit none - - integer, intent(in) :: n - type (geo_point), dimension(0:2*n,0:n+1), intent(inout) :: centers - real, dimension(3,2*n-1,n), intent(inout) :: dx - - integer :: i, j - - do j=1,n - do i=1,2*n-1 - dx(1,i,j) = sphere_distance( centers(i ,j ), & - centers(i-1,j ), 1. ) - dx(2,i,j) = sphere_distance( centers(i ,j ), & - centers(i-1,j-1), 1. ) - dx(3,i,j) = sphere_distance( centers(i ,j ), & - centers(i ,j-1), 1. ) - enddo - enddo - - end subroutine compute_dx - -end module grid_gen_utils diff --git a/grid_gen/global_scvt/src/module_grid_meta.F b/grid_gen/global_scvt/src/module_grid_meta.F deleted file mode 100644 index 4d513830a..000000000 --- a/grid_gen/global_scvt/src/module_grid_meta.F +++ /dev/null @@ -1,1470 +0,0 @@ -module grid_meta - - contains - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE WRITE_GRID - ! - ! Create metadata for mesh and write out the complete grid information. - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine write_grid(rlat, rlon, n, vclat, vclon, nvc, alist, clist) - - use data_types - use grid_constants - use sphere_utilities - use write_netcdf - use scvt - - implicit none - - integer, intent(in) :: n, nvc - real, dimension(n), intent(inout) :: rlat, rlon - real, dimension(nvc), intent(inout) :: vclat, vclon - type (adjacency_list) :: alist, clist - - integer, parameter :: maxEdges = 10 - - type tree_pointer_list - integer :: stage - type (binary_tree), pointer :: p - end type tree_pointer_list - - integer :: i, ii, j, js, k, prev_edge, next_edge, vtx, nObtuse, jj, iEdge, iFlag - integer :: nCells, nEdges, nVertices, nVertLevels, nTracers - integer, dimension(3) :: cellsOnTriangle - integer, allocatable, dimension(:) :: permutation, indexToCellID, indexToEdgeID, indexToVertexID - integer, allocatable, dimension(:) :: nEdgesOnCell, nEdgesOnEdge, nEdgesOnVertex, nCellsOnVertex - integer, allocatable, dimension(:) :: tempEdgesOnEdge - integer, allocatable, dimension(:,:) :: cellsOnEdge, edgesOnCell, verticesOnCell, cellsOnCell, & - verticesOnEdge, edgesOnEdge, edgesOnVertex, cellsOnVertex - integer, allocatable, dimension(:) :: isObtuse - real, allocatable, dimension(:,:) :: weightsOnEdge, kiteAreasOnVertex - integer :: temp - logical :: tdrtest = .true. - real :: sum_r, area, r, s, de, rtmp - real, allocatable, dimension(:) :: latCell, lonCell, latEdge, lonEdge, angleEdge, latVertex, lonVertex, & - lat1Edge, lon1Edge, lat2Edge, lon2Edge, dvEdge, dv1Edge, dv2Edge, dcEdge, & - areaCell, areaTriangle, fEdge, fVertex, h_s, u_sbr - real, allocatable, dimension(:,:,:) :: u, v, h, vh, circulation, vorticity, ke - real, allocatable, dimension(:,:,:,:) :: tracers - real, allocatable, dimension(:) :: xCell, yCell, zCell, xEdge, yEdge, zEdge, xVertex, yVertex, zVertex, meshDensity - type (geo_point) :: vertex1GP, vertex2GP, cell1GP, cell2GP, cell3GP, edgeGP, edgeGP_prev, edgeGP_next, pCell - type (geo_point) :: center - type (geo_point), allocatable, dimension(:) :: points - type (binary_tree), pointer :: treeRoot - type (tree_pointer_list), allocatable, dimension(:) :: cursor - - - ! - ! Compute number of cells - ! - nCells = alist % nNodes - - - ! - ! Compute number of edges - ! - nEdges = alist % nNeighbors / 2 - - - ! - ! Compute number of vertices - ! - nVertices = nEdges - nCells + 2 ! Euler's formula - - - allocate(latCell(nCells)) - allocate(lonCell(nCells)) - allocate(meshDensity(nCells)) - allocate(xCell(nCells)) - allocate(yCell(nCells)) - allocate(zCell(nCells)) - allocate(latEdge(nEdges)) - allocate(lonEdge(nEdges)) - allocate(xEdge(nEdges)) - allocate(yEdge(nEdges)) - allocate(zEdge(nEdges)) - allocate(dvEdge(nEdges)) - allocate(dv1Edge(nEdges)) - allocate(dv2Edge(nEdges)) - allocate(dcEdge(nEdges)) - allocate(areaCell(nCells)) - allocate(areaTriangle(nVertices)) - allocate(angleEdge(nEdges)) - allocate(u_sbr(nEdges)) - allocate(latVertex(nVertices)) - allocate(lonVertex(nVertices)) - allocate(xVertex(nVertices)) - allocate(yVertex(nVertices)) - allocate(zVertex(nVertices)) - allocate(lat1Edge(nEdges)) - allocate(lon1Edge(nEdges)) - allocate(lat2Edge(nEdges)) - allocate(lon2Edge(nEdges)) - allocate(cellsOnEdge(2,nEdges)) - allocate(isObtuse(nVertices)) - allocate(edgesOnCell(maxEdges,nCells)) - allocate(verticesOnCell(maxEdges,nCells)) - allocate(verticesOnEdge(2,nEdges)) - allocate(edgesOnEdge(2*maxEdges,nEdges)) - allocate(edgesOnVertex(3,nVertices)) - allocate(cellsOnVertex(3,nVertices)) - allocate(tempEdgesOnEdge(2*maxEdges)) - allocate(weightsOnEdge(2*maxEdges,nEdges)) - allocate(kiteAreasOnVertex(3,nVertices)) - allocate(cellsOnCell(maxEdges,nCells)) - allocate(nEdgesOnCell(nCells)) - allocate(nEdgesOnEdge(nEdges)) - allocate(nEdgesOnVertex(nVertices)) - allocate(nCellsOnVertex(nVertices)) - allocate(cursor(nEdges)) - - - ! - ! Determine lat/lon for each cell - ! - do i=1,nCells - latCell(i) = rlat(i) - lonCell(i) = rlon(i) - end do - - - ! - ! Determine lat/lon for each vertex - ! - do i=1,nVertices - latVertex(i) = vclat(i) - lonVertex(i) = vclon(i) - end do - - - ! - ! Construct a list of edges (actually, a BST) - ! - nullify(treeRoot) - do i=1,nCells - do j = clist % start(i), clist % start(i) + clist % len(i) - 2 - js = j+1 - call insert_edge_to_tree(i, & - clist % neighbor(j), clist % neighbor(js), & - vclat(clist % neighbor(j)), vclon(clist % neighbor(j)), & - vclat(clist % neighbor(js)), vclon(clist % neighbor(js)), & - treeRoot) - end do - - js = clist % start(i) - call insert_edge_to_tree(i, & - clist % neighbor(j), clist % neighbor(js), & - vclat(clist % neighbor(j)), vclon(clist % neighbor(j)), & - vclat(clist % neighbor(js)), vclon(clist % neighbor(js)), & - treeRoot) - end do - - - ! - ! Determine which cells are on each edge, and lat/lon of edge endpoints - ! - i = 1 - j = 1 - cursor(i) % p => treeRoot - cursor(i) % stage = 0 - do while (i > 0) - if (associated(cursor(i) % p % left) .and. cursor(i) % stage < 1) then - cursor(i) % stage = 1 - i = i + 1 - cursor(i) % p => cursor(i-1) % p % left - cursor(i) % stage = 0 - cycle - end if - if (cursor(i) % stage < 2) then - cellsOnEdge(1,j) = cursor(i) % p % node1 - cellsOnEdge(2,j) = cursor(i) % p % node2 - lat1Edge(j) = cursor(i) % p % lat1 - lon1Edge(j) = cursor(i) % p % lon1 - lat2Edge(j) = cursor(i) % p % lat2 - lon2Edge(j) = cursor(i) % p % lon2 - verticesOnEdge(1,j) = cursor(i) % p % vertex1 - verticesOnEdge(2,j) = cursor(i) % p % vertex2 - j = j + 1 - cursor(i) % stage = 2 - end if - if (associated(cursor(i) % p % right) .and. cursor(i) % stage < 3) then - cursor(i) % stage = 3 - i = i + 1 - cursor(i) % p => cursor(i-1) % p % right - cursor(i) % stage = 0 - cycle - end if - i = i - 1 - end do - - - ! - ! Compute distance between cell centroids for each edge - ! - do i=1,nEdges - cell1GP % lat = latCell(cellsOnEdge(1,i)) - cell1GP % lon = lonCell(cellsOnEdge(1,i)) - cell2GP % lat = latCell(cellsOnEdge(2,i)) - cell2GP % lon = lonCell(cellsOnEdge(2,i)) - dcEdge(i) = sphere_distance(cell1GP, cell2GP, 1.0) - end do - - - ! - ! Determine which edges are on each cell - ! - do i=1,nCells - nEdgesOnCell(i) = 0 - end do - - do j=1,nEdges - nEdgesOnCell(cellsOnEdge(1,j)) = nEdgesOnCell(cellsOnEdge(1,j)) + 1 - if (nEdgesOnCell(cellsOnEdge(1,j)) > maxEdges) then - write(0,*) 'Houston, we have an edge problem.' - stop - end if - edgesOnCell(nEdgesOnCell(cellsOnEdge(1,j)),cellsOnEdge(1,j)) = j - - nEdgesOnCell(cellsOnEdge(2,j)) = nEdgesOnCell(cellsOnEdge(2,j)) + 1 - if (nEdgesOnCell(cellsOnEdge(2,j)) > maxEdges) then - write(0,*) 'Houston, we have an edge problem.' - stop - end if - edgesOnCell(nEdgesOnCell(cellsOnEdge(2,j)),cellsOnEdge(2,j)) = j - end do - do i=1,nCells - do j=nEdgesOnCell(i)+1,maxEdges - edgesOnCell(j,i) = edgesOnCell(nEdgesOnCell(i),i) - end do - end do - - - ! - ! Determine which cells are on each cell - ! - do i=1,nCells - k = 1 - do j = alist % start(i), alist % start(i) + alist % len(i) - 1 - cellsOnCell(k,i) = alist % neighbor(j) - k = k + 1 - end do - do j=k,maxEdges - cellsOnCell(j,i) = alist % neighbor(alist % start(i) + alist % len(i) - 1) - end do - end do - - ! Re-order edgesOnCell to be in the same order as cellsOnCell - do i=1,nCells - do j=1,nEdgesOnCell(i) - if (cellsOnEdge(1,edgesOnCell(j,i)) /= cellsOnCell(j,i) .and. cellsOnEdge(2,edgesOnCell(j,i)) /= cellsOnCell(j,i)) then - ii = 0 - do k=j+1,nEdgesOnCell(i) - if (cellsOnEdge(1,edgesOnCell(k,i)) == cellsOnCell(j,i) .or. cellsOnEdge(2,edgesOnCell(k,i)) == cellsOnCell(j,i)) then - ! Swap edgesOnCell(k,i) and edgesOnCell(j,i) - ii = edgesOnCell(j,i) - edgesOnCell(j,i) = edgesOnCell(k,i) - edgesOnCell(k,i) = ii - exit - end if - end do -if (ii == 0) then - write(0,*) 'We didn''t find an edge to match cellsOnCell' - stop -end if - end if - end do - end do - - - ! - ! Compute lat/lon of each edge - ! - do j=1,nEdges - vertex1GP % lat = lat1Edge(j) - vertex1GP % lon = lon1Edge(j) - vertex2GP % lat = lat2Edge(j) - vertex2GP % lon = lon2Edge(j) - cell1GP % lat = latCell(cellsOnEdge(1,j)) - cell1GP % lon = lonCell(cellsOnEdge(1,j)) - cell2GP % lat = latCell(cellsOnEdge(2,j)) - cell2GP % lon = lonCell(cellsOnEdge(2,j)) - call compute_edge_latlon(cell1GP, cell2GP, vertex1GP, vertex2GP, edgeGP) - - dvEdge(j) = sphere_distance(vertex1GP, vertex2GP, 1.0) - rtmp = (vertex2GP % lat - vertex1GP % lat) / dvEdge(j) - if (rtmp > 1.0) rtmp = 1.0 - if (rtmp < -1.0) rtmp = -1.0 - rtmp = acos(rtmp) - angleEdge(j) = meridian_angle(edgeGP, vertex2GP) - angleEdge(j) = sign(rtmp, angleEdge(j)) - if (angleEdge(j) > pii) angleEdge(j) = angleEdge(j) - 2.0*pii - if (angleEdge(j) < -pii) angleEdge(j) = angleEdge(j) + 2.0*pii - latEdge(j) = edgeGP % lat - lonEdge(j) = edgeGP % lon - - ! Compute u for solid body rotation - u_sbr(j) = (sin(vertex2GP % lat) - sin(vertex1GP % lat)) / dvEdge(j) - - ! Make sure order of vertices on edge is correct -! if (is_flipped_vertex_order2(edgeGP, cell2GP, vertex2GP)) then - if (is_flipped_vertex_order(cell1GP % lat, cell1GP % lon, & - cell2GP % lat, cell2GP % lon, & - vertex1GP % lat, vertex1GP % lon, & - vertex2GP % lat, vertex2GP % lon)) then - temp = verticesOnEdge(1,j) - verticesOnEdge(1,j) = verticesOnEdge(2,j) - verticesOnEdge(2,j) = temp - u_sbr(j) = -1.0*u_sbr(j) - angleEdge(j) = angleEdge(j) + pii - if (angleEdge(j) > pii) angleEdge(j) = angleEdge(j) - 2.0*pii - if (angleEdge(j) < -pii) angleEdge(j) = angleEdge(j) + 2.0*pii - end if - - end do - - - ! - ! Determine which vertices are on each cell - ! - do i=1,nCells - if (i == cellsOnEdge(1,edgesOnCell(1,i))) then - verticesOnCell(1,i) = verticesOnEdge(1,edgesOnCell(1,i)) - else if (i == cellsOnEdge(2,edgesOnCell(1,i))) then - verticesOnCell(1,i) = verticesOnEdge(2,edgesOnCell(1,i)) - else -write(0,*) 'THIS EDGE SHOULDN''T BELONG TO THIS CELL' - end if - do j=1,nEdgesOnCell(i)-1 - if (verticesOnEdge(1,edgesOnCell(j,i)) == verticesOnCell(j,i)) then - verticesOnCell(j+1,i) = verticesOnEdge(2,edgesOnCell(j,i)) - else if (verticesOnEdge(2,edgesOnCell(j,i)) == verticesOnCell(j,i)) then - verticesOnCell(j+1,i) = verticesOnEdge(1,edgesOnCell(j,i)) - else -write(0,*) 'Broken chain of vertex-edge-vertex.' -stop - end if - end do - end do - - - ! - ! Determine which edges are incident with each vertex - ! - nEdgesOnVertex(:) = 0 - do j=1,nEdges - nEdgesOnVertex(verticesOnEdge(1,j)) = nEdgesOnVertex(verticesOnEdge(1,j)) + 1 - if (nEdgesOnVertex(verticesOnEdge(1,j)) > 3) then - write(0,*) 'We have too many edges incident with vertex ',verticesOnEdge(1,j) - stop - end if - edgesOnVertex(nEdgesOnVertex(verticesOnEdge(1,j)),verticesOnEdge(1,j)) = j - - nEdgesOnVertex(verticesOnEdge(2,j)) = nEdgesOnVertex(verticesOnEdge(2,j)) + 1 - if (nEdgesOnVertex(verticesOnEdge(2,j)) > 3) then - write(0,*) 'We have too many edges incident with vertex ',verticesOnEdge(2,j) - stop - end if - edgesOnVertex(nEdgesOnVertex(verticesOnEdge(2,j)),verticesOnEdge(2,j)) = j - end do - - - ! - ! Determine which cells share each vertex - ! - nCellsOnVertex(:) = 0 - do i=1,nCells - do j=1,nEdgesOnCell(i) - nCellsOnVertex(verticesOnCell(j,i)) = nCellsOnVertex(verticesOnCell(j,i)) + 1 - if (nCellsOnVertex(verticesOnCell(j,i)) > 3) then - write(0,*) 'We have too many cells sharing vertex ', verticesOnCell(j,i) - stop - end if - cellsOnVertex(nCellsOnVertex(verticesOnCell(j,i)),verticesOnCell(j,i)) = i - end do - end do - - - ! - ! Determine which edges "neighbor" each edge - ! - do j=1,nEdges - allocate(points(nEdgesOnCell(cellsOnEdge(1,j)))) - allocate(permutation(nEdgesOnCell(cellsOnEdge(1,j)))) - js = 1 - points(js) % lat = latEdge(j) - points(js) % lon = lonEdge(j) - permutation(js) = j - js = js + 1 - center % lat = latCell(cellsOnEdge(1,j)) - center % lon = lonCell(cellsOnEdge(1,j)) - - do k=1,nEdgesOnCell(cellsOnEdge(1,j)) - if (edgesOnCell(k,cellsOnEdge(1,j)) /= j) then - nEdgesOnEdge(j) = nEdgesOnEdge(j) + 1 - edgesOnEdge(nEdgesOnEdge(j),j) = edgesOnCell(k,cellsOnEdge(1,j)) - points(js) % lat = latEdge(edgesOnCell(k,cellsOnEdge(1,j))) - points(js) % lon = lonEdge(edgesOnCell(k,cellsOnEdge(1,j))) - permutation(js) = edgesOnCell(k,cellsOnEdge(1,j)) - js = js + 1 - end if - end do - call order_points_ccw(center, nEdgesOnCell(cellsOnEdge(1,j)), points, permutation) - do k=2,nEdgesOnCell(cellsOnEdge(1,j)) - edgesOnEdge(k-1,j) = permutation(k) - end do - deallocate(points) - deallocate(permutation) - - allocate(points(nEdgesOnCell(cellsOnEdge(2,j)))) - allocate(permutation(nEdgesOnCell(cellsOnEdge(2,j)))) - js = 1 - points(js) % lat = latEdge(j) - points(js) % lon = lonEdge(j) - permutation(js) = j - js = js + 1 - center % lat = latCell(cellsOnEdge(2,j)) - center % lon = lonCell(cellsOnEdge(2,j)) - - do k=1,nEdgesOnCell(cellsOnEdge(2,j)) - if (edgesOnCell(k,cellsOnEdge(2,j)) /= j) then - nEdgesOnEdge(j) = nEdgesOnEdge(j) + 1 - edgesOnEdge(nEdgesOnEdge(j),j) = edgesOnCell(k,cellsOnEdge(2,j)) - points(js) % lat = latEdge(edgesOnCell(k,cellsOnEdge(2,j))) - points(js) % lon = lonEdge(edgesOnCell(k,cellsOnEdge(2,j))) - permutation(js) = edgesOnCell(k,cellsOnEdge(2,j)) - js = js + 1 - end if - end do - call order_points_ccw(center, nEdgesOnCell(cellsOnEdge(2,j)), points, permutation) - do k=2,nEdgesOnCell(cellsOnEdge(2,j)) - edgesOnEdge(nEdgesOnCell(cellsOnEdge(1,j))+k-2,j) = permutation(k) - end do - deallocate(points) - deallocate(permutation) - end do - - - ! - ! Compute area of each cell - ! - do i=1,nCells - cell1GP % lat = latCell(i) - cell1GP % lon = lonCell(i) - areaCell(i) = 0.0 - do j=1,nEdgesOnCell(i)-1 - vertex1GP % lat = latVertex(verticesOnCell(j,i)) - vertex1GP % lon = lonVertex(verticesOnCell(j,i)) - vertex2GP % lat = latVertex(verticesOnCell(j+1,i)) - vertex2GP % lon = lonVertex(verticesOnCell(j+1,i)) - areaCell(i) = areaCell(i) + triangle_area(cell1GP, vertex1GP, vertex2GP, 1.0) - end do - vertex1GP % lat = latVertex(verticesOnCell(j,i)) - vertex1GP % lon = lonVertex(verticesOnCell(j,i)) - vertex2GP % lat = latVertex(verticesOnCell(1,i)) - vertex2GP % lon = lonVertex(verticesOnCell(1,i)) - areaCell(i) = areaCell(i) + triangle_area(cell1GP, vertex1GP, vertex2GP, 1.0) - end do - - - ! - ! Compute area of triangles associated with each vertex - ! - do i=1,nVertices - cell1GP % lat = latCell(cellsOnVertex(1,i)) - cell1GP % lon = lonCell(cellsOnVertex(1,i)) - cell2GP % lat = latCell(cellsOnVertex(2,i)) - cell2GP % lon = lonCell(cellsOnVertex(2,i)) - cell3GP % lat = latCell(cellsOnVertex(3,i)) - cell3GP % lon = lonCell(cellsOnVertex(3,i)) - areaTriangle(i) = triangle_area(cell1GP, cell2GP, cell3GP, 1.0) - end do - - ! - ! Test to see if any of the triangles are obtuse - ! - nObtuse = 0 - do i=1,nVertices - cell1GP % lat = latCell(cellsOnVertex(1,i)) - cell1GP % lon = lonCell(cellsOnVertex(1,i)) - cell2GP % lat = latCell(cellsOnVertex(2,i)) - cell2GP % lon = lonCell(cellsOnVertex(2,i)) - cell3GP % lat = latCell(cellsOnVertex(3,i)) - cell3GP % lon = lonCell(cellsOnVertex(3,i)) - isObtuse(i) = obtuse(cell1GP, cell2GP, cell3GP) - if(isObtuse(i).gt.0) nObtuse = nObtuse + 1 - end do - write(6,*) ' number of obtuse triangles ', nObtuse - - - kiteAreasOnVertex(:,:) = -1.0 - - ! - ! Compute weights used in tangential velocity reconstruction - ! - do j=1,nEdges - cell1GP % lat = latCell(cellsOnEdge(1,j)) - cell1GP % lon = lonCell(cellsOnEdge(1,j)) - cell2GP % lat = latCell(cellsOnEdge(2,j)) - cell2GP % lon = lonCell(cellsOnEdge(2,j)) - de = dcEdge(j) - prev_edge = j - sum_r = 0.0 - do i=1,nEdgesOnCell(cellsOnEdge(1,j))-1 - next_edge = edgesOnEdge(i,j) - if ((verticesOnEdge(1,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(1,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(1,prev_edge) - else if ((verticesOnEdge(2,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(2,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(2,prev_edge) - else - write(0,*) 'Somehow these edges don''t share a vertex',j - write(0,*) 'Cells are ',cellsOnEdge(1,j),cellsOnEdge(2,j) - write(0,*) 'Edge ',prev_edge,' has vertices ',verticesOnEdge(1,prev_edge),verticesOnEdge(2,prev_edge) - write(0,*) 'Edge ',next_edge,' has vertices ',verticesOnEdge(1,next_edge),verticesOnEdge(2,next_edge) - write(0,*) 'edgesOnEdge()=',edgesOnEdge(:,j) - write(0,*) 'verticesOnCell(1) = ',verticesOnCell(:,cellsOnEdge(1,j)) - write(0,*) 'edgesOnCell(1) = ',edgesOnCell(:,cellsOnEdge(1,j)) - write(0,*) 'verticesOnEdge(12821)=',verticesOnEdge(:,12821) - write(0,*) 'verticesOnEdge(12823)=',verticesOnEdge(:,12823) - write(0,*) 'verticesOnEdge(13243)=',verticesOnEdge(:,13243) - write(0,*) 'verticesOnEdge(13245)=',verticesOnEdge(:,13245) - write(0,*) 'verticesOnEdge(13448)=',verticesOnEdge(:,13448) - write(0,*) 'verticesOnEdge(13449)=',verticesOnEdge(:,13449) - stop - end if - cell1GP % lat = latCell(cellsOnEdge(1,j)) - cell1GP % lon = lonCell(cellsOnEdge(1,j)) - vertex1GP % lat = latVertex(vtx) - vertex1GP % lon = lonVertex(vtx) - edgeGP_prev % lat = latEdge(prev_edge) - edgeGP_prev % lon = lonEdge(prev_edge) - edgeGP_next % lat = latEdge(next_edge) - edgeGP_next % lon = lonEdge(next_edge) - - if(isObtuse(vtx).eq.0) then - area = abs(triangle_area(cell1GP, vertex1GP, edgeGP_prev, 1.0)) - area = area + abs(triangle_area(cell1GP, vertex1GP, edgeGP_next, 1.0)) - else - if(cellsOnVertex(isObtuse(vtx),vtx).eq.cellsOnEdge(1,j)) then - iFlag = 0 - do ii=1,3 - jj = edgesOnVertex(ii,vtx) - if(jj.ne.prev_edge.and.jj.ne.next_edge) then - write(6,*) jj, prev_edge, next_edge - if(iFlag.eq.1) then - write(6,*) ' can not be true ' - stop - endif - iEdge = jj - iFlag = 1 - endif - enddo - edgeGP % lat = latEdge(iEdge) - edgeGP % lon = lonEdge(iEdge) - area = abs(triangle_area(cell1GP, edgeGP, edgeGP_prev, 1.0)) - area = area + abs(triangle_area(cell1GP, edgeGP, edgeGP_next, 1.0)) - else - area = abs(triangle_area(cell1GP, edgeGP_prev, edgeGP_next, 1.0)) - endif - endif - - do ii=1,3 - if (cellsOnEdge(1,j) == cellsOnVertex(ii,vtx)) then - kiteAreasOnVertex(ii,vtx) = area - exit - end if - end do - - if(.not.tdrtest) then - write(6,*) ' not tdrtest' - r = area / areaCell(cellsOnEdge(1,j)) - sum_r = sum_r + r - if (cellsOnEdge(1,j) == cellsOnEdge(1,edgesOnEdge(i,j))) then - s = 1.0 - else - s = -1.0 - end if - weightsOnEdge(i,j) = s*(0.5-sum_r)*dvEdge(edgesOnEdge(i,j))/de - endif - - prev_edge = next_edge - end do - - prev_edge = j - sum_r = 0.0 - do i=nEdgesOnCell(cellsOnEdge(1,j)),nEdgesOnEdge(j) - next_edge = edgesOnEdge(i,j) - if ((verticesOnEdge(1,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(1,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(1,prev_edge) - else if ((verticesOnEdge(2,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(2,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(2,prev_edge) - else - write(0,*) 'Somehow these edges don''t share a vertex' - write(0,*) 'Edge ',prev_edge,' has vertices ',verticesOnEdge(1,prev_edge),verticesOnEdge(2,prev_edge) - write(0,*) 'Edge ',next_edge,' has vertices ',verticesOnEdge(1,next_edge),verticesOnEdge(2,next_edge) - write(0,*) 'VerticesOnCell(2) = ',verticesOnCell(:,cellsOnEdge(2,j)) - write(0,*) 'edgesOnCell(2) = ',edgesOnCell(:,cellsOnEdge(2,j)) - stop - end if - cell1GP % lat = latCell(cellsOnEdge(2,j)) - cell1GP % lon = lonCell(cellsOnEdge(2,j)) - vertex1GP % lat = latVertex(vtx) - vertex1GP % lon = lonVertex(vtx) - - edgeGP_prev % lat = latEdge(prev_edge) - edgeGP_prev % lon = lonEdge(prev_edge) - edgeGP_next % lat = latEdge(next_edge) - edgeGP_next % lon = lonEdge(next_edge) - - if(isObtuse(vtx).eq.0) then - area = abs(triangle_area(cell1GP, vertex1GP, edgeGP_prev, 1.0)) - area = area + abs(triangle_area(cell1GP, vertex1GP, edgeGP_next, 1.0)) - else - if(cellsOnVertex(isObtuse(vtx),vtx).eq.cellsOnEdge(2,j)) then - iFlag = 0 - do ii=1,3 - jj = edgesOnVertex(ii,vtx) - if(jj.ne.prev_edge.and.jj.ne.next_edge) then - write(6,*) jj, prev_edge, next_edge - if(iFlag.eq.1) then - write(6,*) ' can not be true ' - stop - endif - iEdge = jj - iFlag = 1 - endif - enddo - edgeGP % lat = latEdge(iEdge) - edgeGP % lon = lonEdge(iEdge) - area = abs(triangle_area(cell1GP, edgeGP, edgeGP_prev, 1.0)) - area = area + abs(triangle_area(cell1GP, edgeGP, edgeGP_next, 1.0)) - else - area = abs(triangle_area(cell1GP, edgeGP_prev, edgeGP_next, 1.0)) - endif - endif - - do ii=1,3 - if (cellsOnEdge(2,j) == cellsOnVertex(ii,vtx)) then - kiteAreasOnVertex(ii,vtx) = area - exit - end if - end do - - if(.not.tdrtest) then - write(6,*) ' not tdrtest' - r = area / areaCell(cellsOnEdge(2,j)) - sum_r = sum_r + r - if (cellsOnEdge(2,j) == cellsOnEdge(1,edgesOnEdge(i,j))) then - s = -1.0 - else - s = 1.0 - end if - weightsOnEdge(i,j) = s*(0.5-sum_r)*dvEdge(edgesOnEdge(i,j))/de - endif - - prev_edge = next_edge - end do - end do - -!---- - - if(tdrtest) then - - write(6,*) ' testing tdr ' - - areaTriangle = 0.0 - areaCell = 0.0 - do i=1,nVertices - do ii=1,3 - jj = cellsOnVertex(ii,i) - areaCell(jj) = areaCell(jj) + kiteAreasOnVertex(ii,i) - areaTriangle(i) = areaTriangle(i) + kiteAreasOnVertex(ii,i) - enddo - end do - - do j=1,nEdges - de = dcEdge(j) - prev_edge = j - sum_r = 0.0 - do i=1,nEdgesOnCell(cellsOnEdge(1,j))-1 - next_edge = edgesOnEdge(i,j) - if ((verticesOnEdge(1,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(1,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(1,prev_edge) - else if ((verticesOnEdge(2,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(2,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(2,prev_edge) - else - stop - end if - - do ii=1,3 - if(cellsOnVertex(ii,vtx).eq.cellsOnEdge(1,j)) then - area = kiteAreasOnVertex(ii,vtx) - exit - endif - enddo - - r = area / areaCell(cellsOnEdge(1,j)) - sum_r = sum_r + r - if (cellsOnEdge(1,j) == cellsOnEdge(1,edgesOnEdge(i,j))) then - s = 1.0 - else - s = -1.0 - end if - weightsOnEdge(i,j) = s*(0.5-sum_r)*dvEdge(edgesOnEdge(i,j))/de - prev_edge = next_edge - end do - - prev_edge = j - sum_r = 0.0 - do i=nEdgesOnCell(cellsOnEdge(1,j)),nEdgesOnEdge(j) - next_edge = edgesOnEdge(i,j) - if ((verticesOnEdge(1,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(1,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(1,prev_edge) - else if ((verticesOnEdge(2,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(2,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(2,prev_edge) - else - stop - end if - - do ii=1,3 - if(cellsOnVertex(ii,vtx).eq.cellsOnEdge(2,j)) then - area = kiteAreasOnVertex(ii,vtx) - exit - endif - enddo - - r = area / areaCell(cellsOnEdge(2,j)) - sum_r = sum_r + r - if (cellsOnEdge(2,j) == cellsOnEdge(1,edgesOnEdge(i,j))) then - s = -1.0 - else - s = 1.0 - end if - weightsOnEdge(i,j) = s*(0.5-sum_r)*dvEdge(edgesOnEdge(i,j))/de - prev_edge = next_edge - end do - end do - - write(6,*) ' areas ', minval(areaCell), maxval(areaCell), maxval(areaCell)/minval(areaCell) - write(6,*) ' dcEdge ', minval(dcEdge), maxval(dcEdge), maxval(dcEdge)/minval(dcEdge) - - endif ! tdrtest - -!----- - - do i=1,nEdges - edgeGP % lat = latEdge(i) - edgeGP % lon = lonEdge(i) - vertex1GP % lat = latVertex(verticesOnEdge(1,i)) - vertex1GP % lon = lonVertex(verticesOnEdge(1,i)) - vertex2GP % lat = latVertex(verticesOnEdge(2,i)) - vertex2GP % lon = lonVertex(verticesOnEdge(2,i)) - dv1Edge(i) = sphere_distance(edgeGP, vertex1GP, 1.0) - dv2Edge(i) = sphere_distance(edgeGP, vertex2GP, 1.0) - end do - - - write(0,*) 'There are ', nCells, ' cells and ', nEdges, ' edges' - write(0,*) ' ' - write(0,*) 'Total area of all cells is ',sum(areaCell(:)) - write(0,*) 'Total triangle area=',sum(areaTriangle(:)) - write(0,*) 'Total kite area=',sum(kiteAreasOnVertex(:,:)) - - - do i=1,nCells - cell1GP % lat = latCell(i) - cell1GP % lon = lonCell(i) - call convert_lx(xCell(i), yCell(i), zCell(i), 1.0, cell1GP) - end do - do i=1,nVertices - vertex1GP % lat = latVertex(i) - vertex1GP % lon = lonVertex(i) - call convert_lx(xVertex(i), yVertex(i), zVertex(i), 1.0, vertex1GP) - end do - do i=1,nEdges - edgeGP % lat = latEdge(i) - edgeGP % lon = lonEdge(i) - call convert_lx(xEdge(i), yEdge(i), zEdge(i), 1.0, edgeGP) - end do - - - nVertLevels = 1 - nTracers = 1 - - allocate(indexToCellID(nCells)) - allocate(indexToEdgeID(nEdges)) - allocate(indexToVertexID(nVertices)) - allocate(fEdge(nEdges)) - allocate(fVertex(nVertices)) - allocate(h_s(nCells)) - allocate(u(nVertLevels,nEdges,1)) - allocate(v(nVertLevels,nEdges,1)) - allocate(h(nVertLevels,nCells,1)) - allocate(vh(nVertLevels,nEdges,1)) - allocate(circulation(nVertLevels,nVertices,1)) - allocate(vorticity(nVertLevels,nVertices,1)) - allocate(ke(nVertLevels,nCells,1)) - allocate(tracers(nTracers,nVertLevels,nCells,1)) - - u(1,:,1) = u_sbr(:) - h(:,:,1) = 1000.0 - v(:,:,1) = 0.0 - vh(:,:,1) = 0.0 - circulation(:,:,1) = 0.0 - vorticity(:,:,1) = 0.0 - ke(:,:,1) = 0.0 - tracers(:,:,:,1) = 0.0 - - h_s(:) = 0.0 - do i=1,nCells - indexToCellID(i) = i - end do - do i=1,nEdges - fEdge(i) = 2.0 * 7.292E-5 * sin(latEdge(i)) - indexToEdgeID(i) = i - end do - do i=1,nVertices - fVertex(i) = 2.0 * 7.292E-5 * sin(latVertex(i)) - indexToVertexID(i) = i - end do - - do i=1,nCells - pCell%lat = latCell(i) - pCell%lon = lonCell(i) - if(pCell%lon.gt. pii) pCell%lon=pCell%lon-2.0*pii - if(pCell%lon.ge.-pii) pCell%lon=pCell%lon+2.0*pii - meshDensity(i) = density_for_point(pCell) - enddo - - call write_netcdf_init( & - nCells, & - nEdges, & - nVertices, & - maxEdges, & - nVertLevels, & - nTracers & - ) - - call write_netcdf_fields( & - 1, & - latCell, & - lonCell, & - meshDensity, & - xCell, & - yCell, & - zCell, & - indexToCellID, & - latEdge, & - lonEdge, & - xEdge, & - yEdge, & - zEdge, & - indexToEdgeID, & - latVertex, & - lonVertex, & - xVertex, & - yVertex, & - zVertex, & - indexToVertexID, & - cellsOnEdge, & - nEdgesOnCell, & - nEdgesOnEdge, & - edgesOnCell, & - edgesOnEdge, & - weightsOnEdge, & - dvEdge, & - dv1Edge, & - dv2Edge, & - dcEdge, & - angleEdge, & - areaCell, & - areaTriangle, & - cellsOnCell, & - verticesOnCell, & - verticesOnEdge, & - edgesOnVertex, & - cellsOnVertex, & - kiteAreasOnVertex, & - fEdge, & - fVertex, & - h_s, & - u, & - v, & - h, & - vh, & - circulation, & - vorticity, & - ke, & - tracers & - ) - - call write_netcdf_finalize() - - - ! - ! Write out a file compatible with metis for block decomposition - ! - open(42,file='graph.info',form='formatted') - write(42,*) nCells, nEdges - do i=1,nCells - do j=1,nEdgesOnCell(i) - write(42,'(1x,i8)',advance='no') cellsOnCell(j,i) - end do - write(42,'(1x)') - end do - close(42) - - - ! - ! The following information is written out to permit further refinement - ! of a grid using the grid_gen code - ! - open(22,file='locs.dat.out',form='formatted') - write(22,*) nCells,nEdges - do i=1,nCells - write(22,'(10x,f22.10,f23.10,f23.10)') xCell(i), yCell(i), zCell(i) - end do - do i=1,nEdges - write(22,'(10x,f22.10,f23.10,f23.10)') xEdge(i), yEdge(i), zEdge(i) - end do - ! do i=1,nCells - ! write(22,'(f13.10,1x,f13.10)') latCell(i), lonCell(i) - ! end do - ! do i=1,nEdges - ! write(22,'(f13.10,1x,f13.10)') latEdge(i), lonEdge(i) - ! end do - close(22) - - ! - ! Write out data for visualization in OpenDx - ! - call write_OpenDX( nCells, & - nVertices, & - xCell, & - yCell, & - zCell, & - xVertex, & - yVertex, & - zVertex, & - nEdgesOnCell, & - verticesOnCell, & - areaCell ) - - - deallocate(indexToCellID) - deallocate(indexToEdgeID) - deallocate(indexToVertexID) - deallocate(fEdge) - deallocate(fVertex) - deallocate(h_s) - deallocate(u) - deallocate(v) - deallocate(h) - deallocate(vh) - deallocate(circulation) - deallocate(vorticity) - deallocate(ke) - deallocate(tracers) - - - deallocate(cursor) - deallocate(latCell) - deallocate(lonCell) - deallocate(xCell) - deallocate(yCell) - deallocate(zCell) - deallocate(latEdge) - deallocate(lonEdge) - deallocate(xEdge) - deallocate(yEdge) - deallocate(zEdge) - deallocate(dvEdge) - deallocate(dv1Edge) - deallocate(dv2Edge) - deallocate(dcEdge) - deallocate(areaCell) - deallocate(areaTriangle) - deallocate(angleEdge) - deallocate(u_sbr) - deallocate(latVertex) - deallocate(lonVertex) - deallocate(xVertex) - deallocate(yVertex) - deallocate(zVertex) - deallocate(lat1Edge) - deallocate(lon1Edge) - deallocate(lat2Edge) - deallocate(lon2Edge) - deallocate(cellsOnEdge) - deallocate(edgesOnCell) - deallocate(verticesOnCell) - deallocate(verticesOnEdge) - deallocate(edgesOnEdge) - deallocate(edgesOnVertex) - deallocate(cellsOnVertex) - deallocate(tempEdgesOnEdge) - deallocate(weightsOnEdge) - deallocate(kiteAreasOnVertex) - deallocate(cellsOnCell) - deallocate(nEdgesOnCell) - deallocate(nEdgesOnEdge) - deallocate(nEdgesOnVertex) - deallocate(nCellsOnVertex) - - end subroutine write_grid - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE INSERT_EDGE_TO_TREE - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine insert_edge_to_tree(cellID, vertex1ID, vertex2ID, lat1, lon1, lat2, lon2, root) - - use data_types - - implicit none - - integer, intent(in) :: cellID - integer, intent(in) :: vertex1ID, vertex2ID - real, intent(in) :: lat1, lon1, lat2, lon2 - type (binary_tree), pointer :: root - - real :: tLat1, tLat2, tLon1, tLon2 - integer :: tID1, tID2 - logical :: found - type (binary_tree), pointer :: pre_cursor, cursor - - if (point_compare(lat1, lon1, lat2, lon2) > 0) then - tLat1 = lat1 - tLon1 = lon1 - tLat2 = lat2 - tLon2 = lon2 - tID1 = vertex1ID - tID2 = vertex2ID - else - tLat1 = lat2 - tLon1 = lon2 - tLat2 = lat1 - tLon2 = lon1 - tID1 = vertex2ID - tID2 = vertex1ID - end if - - if (.not. associated(root)) then - allocate(root) - root % lat1 = tLat1 - root % lon1 = tLon1 - root % lat2 = tLat2 - root % lon2 = tLon2 - root % vertex1 = tID1 - root % vertex2 = tID2 - root % node1 = cellID - nullify(root % left) - nullify(root % right) - nullify(root % parent) - - else - - found = .false. - cursor => root - do while (associated(cursor) .and. .not. found) - if (edge_compare(tLat1, tLon1, tLat2, tLon2, cursor % lat1, cursor % lon1, cursor % lat2, cursor % lon2) == 0) then - found = .true. - else if (edge_compare(tLat1, tLon1, tLat2, tLon2, cursor % lat1, cursor % lon1, cursor % lat2, cursor % lon2) > 0) then - pre_cursor => cursor - cursor => cursor % right - else - pre_cursor => cursor - cursor => cursor % left - end if - end do - - if (.not. found) then - if (edge_compare(tLat1, tLon1, tLat2, tLon2, pre_cursor % lat1, pre_cursor % lon1, pre_cursor % lat2, pre_cursor % lon2) > 0) then - allocate(pre_cursor % right) - cursor => pre_cursor % right - else - allocate(pre_cursor % left) - cursor => pre_cursor % left - end if - cursor % lat1 = tLat1 - cursor % lon1 = tLon1 - cursor % lat2 = tLat2 - cursor % lon2 = tLon2 - cursor % vertex1 = tID1 - cursor % vertex2 = tID2 - cursor % node1 = cellID - cursor % parent => pre_cursor - nullify(cursor % left) - nullify(cursor % right) - else - cursor % node2 = cellID - end if - - end if - - end subroutine insert_edge_to_tree - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! FUNCTION POINT_COMPARE - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - integer function point_compare(lat1, lon1, lat2, lon2) - - implicit none - - real, intent(in) :: lat1, lon1, lat2, lon2 - - point_compare = -1 - - if (lat1 > lat2) then - point_compare = 1 - else if (lat1 == lat2) then - if (lon1 > lon2) then - point_compare = 1 - else if (lon1 == lon2) then - point_compare = 0 - end if - end if - - end function point_compare - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! FUNCTION EDGE_COMPARE - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - integer function edge_compare(aLat1, aLon1, aLat2, aLon2, bLat1, bLon1, bLat2, bLon2) - - implicit none - - real, intent(in) :: aLat1, aLon1, aLat2, aLon2, bLat1, bLon1, bLat2, bLon2 - - edge_compare = -1 - - if (point_compare(aLat1,aLon1,bLat1,bLon1) > 0) then - edge_compare = 1 - else if (point_compare(aLat1,aLon1,bLat1,bLon1) == 0) then - if (point_compare(aLat2,aLon2,bLat2,bLon2) > 0) then - edge_compare = 1 - else if (point_compare(aLat2,aLon2,bLat2,bLon2) == 0) then - edge_compare = 0 - end if - end if - - end function edge_compare - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE COMPUTE_EDGE_LATLON - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine compute_edge_latlon(cell1, cell2, vertex1, vertex2, edge) - - use data_types - use sphere_utilities - - implicit none - - type (geo_point), intent(in) :: cell1, cell2, vertex1, vertex2 - type (geo_point), intent(out) :: edge - - call gc_intersect(cell1, cell2, vertex1, vertex2, edge) - - end subroutine compute_edge_latlon - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE is_flipped_vertex_order - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - logical function is_flipped_vertex_order(latCell1, lonCell1, & - latCell2, lonCell2, & - latVertex1, lonVertex1, & - latVertex2, lonVertex2) - - use data_types - use sphere_utilities - - implicit none - - real, intent(in) :: latCell1, lonCell1, & - latCell2, lonCell2, & - latVertex1, lonVertex1, & - latVertex2, lonVertex2 - - real :: xCell1, yCell1, zCell1 - real :: xCell2, yCell2, zCell2 - real :: xVertex1, yVertex1, zVertex1 - real :: xVertex2, yVertex2, zVertex2 - real :: xV1, yV1, zV1 - real :: xV2, yV2, zV2 - real :: ci, cj, ck - type (geo_point) :: cell1, cell2, vertex1, vertex2 - - cell1 % lat = latCell1 - cell1 % lon = lonCell1 - cell2 % lat = latCell2 - cell2 % lon = lonCell2 - vertex1 % lat = latVertex1 - vertex1 % lon = lonVertex1 - vertex2 % lat = latVertex2 - vertex2 % lon = lonVertex2 - - call convert_lx(xCell1, yCell1, zCell1, 1.0, cell1) - call convert_lx(xCell2, yCell2, zCell2, 1.0, cell2) - call convert_lx(xVertex1, yVertex1, zVertex1, 1.0, vertex1) - call convert_lx(xVertex2, yVertex2, zVertex2, 1.0, vertex2) - - xV1 = xCell2 - xCell1 - yV1 = yCell2 - yCell1 - zV1 = zCell2 - zCell1 - xV2 = xVertex2 - xVertex1 - yV2 = yVertex2 - yVertex1 - zV2 = zVertex2 - zVertex1 - - ci = yV1*zV2 - zV1*yV2 - cj = zV1*xV2 - xV1*zV2 - ck = xV1*yV2 - yV1*xV2 - - if ((ci*xCell1 + cj*yCell1 + ck*zCell1) >= 0.0) then - is_flipped_vertex_order = .false. - else - is_flipped_vertex_order = .true. - end if - - end function is_flipped_vertex_order - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE is_flipped_vertex_order2 - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - logical function is_flipped_vertex_order2(edge, cell2, vertex2) - - use data_types - use sphere_utilities - use grid_constants - - implicit none - - type (geo_point), intent(in) :: edge, cell2, vertex2 - - real :: xEdge, yEdge, zEdge - real :: xCell2, yCell2, zCell2 - real :: xVertex2, yVertex2, zVertex2 - real :: angle - - call convert_lx(xEdge, yEdge, zEdge, 1.0, edge) - call convert_lx(xCell2, yCell2, zCell2, 1.0, cell2) - call convert_lx(xVertex2, yVertex2, zVertex2, 1.0, vertex2) - - angle = plane_angle(xEdge, yEdge, zEdge, & - xCell2, yCell2, zCell2, & - xVertex2, yVertex2, zVertex2, & - xEdge, yEdge, zEdge) - - if (angle > 0.0 .and. angle < pii) then - is_flipped_vertex_order2 = .false. - else - is_flipped_vertex_order2 = .true. - end if - - end function is_flipped_vertex_order2 - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE ORDER_POINTS_CCW - ! - ! Given a center around which the ordering should be done, the array of points - ! is re-ordered in CCW order, taking the first point in the array to be the - ! first point in the ordering, and the vector from the origin to center - ! as the normal vector of the suface containing the points at the center. - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine order_points_ccw(center, npts, points, permutation) - - use data_types - use sphere_utilities - use grid_constants - - implicit none - - type (geo_point), intent(in) :: center - integer, intent(in) :: npts - integer, dimension(npts), intent(inout) :: permutation - type (geo_point), dimension(npts), intent(inout) :: points - - integer :: i, j - integer :: itemp - real :: rtemp - real :: nx, ny, nz - real :: px, py, pz - real :: p0x, p0y, p0z - real, dimension(npts) :: angle - type (geo_point) :: ptemp - - call convert_lx(nx, ny, nz, 1.0, center) - call convert_lx(p0x, p0y, p0z, 1.0, points(1)) - - angle(1) = 0.0 - - do i=2,npts - call convert_lx(px, py, pz, 1.0, points(i)) - angle(i) = plane_angle(nx, ny, nz, p0x, p0y, p0z, px, py, pz, nx, ny, nz) - if (angle(i) < 0.0) angle(i) = angle(i) + 2.0*pii - if (angle(i) > 2.0*pii) angle(i) = angle(i) - 2.0*pii - end do - - do i=2,npts - do j=i+1,npts - if (angle(j) < angle(i)) then - rtemp = angle(i) - angle(i) = angle(j) - angle(j) = rtemp - - itemp = permutation(i) - permutation(i) = permutation(j) - permutation(j) = itemp - - ptemp = points(i) - points(i) = points(j) - points(j) = ptemp - end if - end do - end do - - end subroutine order_points_ccw - - subroutine write_OpenDX( nCells, & - nVertices, & - xCell, & - yCell, & - zCell, & - xVertex, & - yVertex, & - zVertex, & - nEdgesOnCell, & - verticesOnCell, & - areaCell ) - - integer, intent(in) :: nCells - integer, intent(in) :: nVertices - real (kind=RKIND), dimension(:), intent(in) :: xCell - real (kind=RKIND), dimension(:), intent(in) :: yCell - real (kind=RKIND), dimension(:), intent(in) :: zCell - real (kind=RKIND), dimension(:), intent(in) :: xVertex - real (kind=RKIND), dimension(:), intent(in) :: yVertex - real (kind=RKIND), dimension(:), intent(in) :: zVertex - integer, dimension(:), intent(in) :: nEdgesOnCell - integer, dimension(:,:), intent(in) :: verticesOnCell - real (kind=RKIND), dimension(:), intent(in) :: areaCell - - character(len=80) :: a, b, c, d, e, f - integer :: i, j, k, nVerticesTotal, iEdge, iLoop - - nVerticesTotal = 0 - do i=1,nCells - nVerticesTotal = nVerticesTotal + nEdgesOnCell(i) - enddo - - open(unit=1,file='dx/voronoi.dx',form='formatted',status='unknown') - - a = trim('object "positions list" class array type float rank 1 shape 3 items') - b = trim('ascii data file vor.position.data') - write(1,10) a, nVerticesTotal - write(1,10) b - write(1,*) - 10 format(a70,i10) - - a = trim('object "edge list" class array type int rank 0 items') - b = trim('ascii data file vor.edge.data') - c = trim('attribute "ref" string "positions"') - write(1,10) a, nVerticesTotal - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "loops list" class array type int rank 0 items') - b = trim('ascii data file vor.loop.data') - c = trim('attribute "ref" string "edges"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "face list" class array type int rank 0 items') - b = trim('ascii data file vor.face.data') - c = trim('attribute "ref" string "loops"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object 0 class array type float rank 0 items') - b = trim('data file vor.area.data') - c = trim('attribute "dep" string "faces"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "area" class field') - b = trim('component "positions" "positions list"') - c = trim('component "edges" "edge list"') - d = trim('component "loops" "loops list"') - e = trim('component "faces" "face list"') - f = trim('component "data" 0') - write(1,10) a - write(1,10) b - write(1,10) c - write(1,10) d - write(1,10) e - write(1,10) f - - close(1) - - open(unit=10,file='dx/vor.area.data',form='formatted',status='unknown') - open(unit=11,file='dx/vor.face.data',form='formatted',status='unknown') - open(unit=12,file='dx/vor.loop.data',form='formatted',status='unknown') - open(unit=13,file='dx/vor.edge.data',form='formatted',status='unknown') - open(unit=14,file='dx/vor.position.data',form='formatted',status='unknown') - - iLoop = 0 - iEdge = 0 - do i=1,nCells - write(10,20) areaCell(i) - write(11,21) i-1 - write(12,21) iLoop - iLoop = iLoop + nEdgesOnCell(i) - do j=1,nEdgesOnCell(i) - write(13,21) iEdge - iEdge = iEdge + 1 - k = verticesOnCell(j,i) - write(14,22) xVertex(k), yVertex(k), zVertex(k) - enddo - enddo - - 20 format(e20.10) - 21 format(i20) - 22 format(3e20.10) - - close(10) - close(11) - close(12) - close(13) - close(14) - - - end subroutine write_OpenDX - - -end module grid_meta diff --git a/grid_gen/global_scvt/src/module_grid_params.F b/grid_gen/global_scvt/src/module_grid_params.F deleted file mode 100644 index 58c2717dd..000000000 --- a/grid_gen/global_scvt/src/module_grid_params.F +++ /dev/null @@ -1,43 +0,0 @@ -module grid_params - - integer :: np - logical :: locs_as_xyz - logical :: l2_conv, inf_conv - integer :: n_scvt_iterations - real :: eps - real :: min_dx - - contains - - subroutine read_namelist() - - implicit none - - integer :: funit - real :: pi - - namelist /domains/ np, locs_as_xyz, n_scvt_iterations, eps, l2_conv, inf_conv, min_dx - - pi = 4.0*atan(1.0) - - funit = 21 - - np = 40962 - locs_as_xyz = .true. - n_scvt_iterations = 0 - eps = 0.0000000001 - l2_conv = .true. - inf_conv = .false. - min_dx = 120000.0 - - open(funit,file='namelist.input',status='old',form='formatted') - read(funit,domains) - close(funit) - - if(l2_conv) then - inf_conv = .false. - endif - - end subroutine read_namelist - -end module grid_params diff --git a/grid_gen/global_scvt/src/module_scvt.F b/grid_gen/global_scvt/src/module_scvt.F deleted file mode 100644 index e414d56eb..000000000 --- a/grid_gen/global_scvt/src/module_scvt.F +++ /dev/null @@ -1,276 +0,0 @@ -module scvt - - use data_types - use sphere_utilities - use voronoi_utils - use grid_constants - use grid_params - - - contains - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE SCVT_SOLVE - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine scvt_solve(n, lend, rlat, rlon, nvc, list, lptr, fn) - - implicit none - - integer, intent(in) :: n, nvc, fn - integer, dimension(n), intent(inout) :: lend - integer, dimension(nvc), intent(inout) :: list, lptr - real, dimension(n), intent(inout) :: rlat, rlon - - integer :: maxitr - - integer :: i, j, k, iter - integer :: ntmax, nrow, nptri - integer, allocatable, dimension(:) :: listc - real :: area, density, tot_mass - real :: x, y, z, new_ctr_x, new_ctr_y, new_ctr_z - real, allocatable, dimension(:) :: vclat, vclon - real, allocatable, dimension(:) :: rlat_2, rlon_2 - type (geo_point) :: p1, p2, p3, pc - type (geo_point) :: p_n1, p_n2 - type (geo_point), dimension(3,64) :: ptri - real :: avg_movement, maxmovement, movement - logical converged - - maxitr = n_scvt_iterations - - maxmovement = 100000 - - - ntmax = 6*n - nrow = 6 - nptri = 64 - allocate(listc(nvc)) - allocate(vclat(nvc)) - allocate(vclon(nvc)) - allocate(rlat_2(n)) - allocate(rlon_2(n)) - - iter = 1 - converged = .false. - - do while (iter <= maxitr .and. .not.converged) - - !write(0,*) 'scvt iteration ',iter - - ! - ! Compute Voronoi corners - ! - call compute_vc(rlat, rlon, n, nrow, ntmax, list, lptr, lend, listc, vclat, vclon, nvc) - - ! - ! Loop over vertices - ! Within the loop, p0 always refers to the current vertex being processed - ! -!$OMP PARALLEL DO PRIVATE(I, J, K, NEW_CTR_X, NEW_CTR_Y, NEW_CTR_Z, TOT_MASS, P1, P2, P3, PC, AREA, DENSITY, X, Y, Z, PTRI) SHARED(RLAT, RLON, RLAT_2, RLON_2, LPTR, LEND, LISTC, VCLAT, VCLON, NPTRI) - do i=1,n - - new_ctr_x = 0.0 - new_ctr_y = 0.0 - new_ctr_z = 0.0 - tot_mass = 0.0 - - ! - ! Compute center of mass of Voronoi cell - ! - p1%lat = rlat(i) - p1%lon = rlon(i) - - k = lend(i) - p2%lat = vclat(listc(k)) - p2%lon = vclon(listc(k)) - if (p1%lon - p2%lon > pii) p2%lon = p2%lon + 2.0*pii - if (p1%lon - p2%lon < -pii) p2%lon = p2%lon - 2.0*pii - k = lptr(lend(i)) - p3%lat = vclat(listc(k)) - p3%lon = vclon(listc(k)) - if (p1%lon - p3%lon > pii) p3%lon = p3%lon + 2.0*pii - if (p1%lon - p3%lon < -pii) p3%lon = p3%lon - 2.0*pii - - call divide_triangle(p1, p2, p3, nptri, ptri) - do j=1,nptri - area = triangle_area(ptri(1,j), ptri(2,j), ptri(3,j), 1.0) - call center_of_mass(ptri(1,j), ptri(2,j), ptri(3,j), pc) - if (p1%lon - pc%lon > pii) pc%lon = pc%lon + 2.0*pii - if (p1%lon - pc%lon < -pii) pc%lon = pc%lon - 2.0*pii - density = density_for_point(pc) - tot_mass = tot_mass + area * density - - call convert_lx(x, y, z, 1.0, pc) - new_ctr_x = new_ctr_x + x*area*density - new_ctr_y = new_ctr_y + y*area*density - new_ctr_z = new_ctr_z + z*area*density - end do - - do while (k /= lend(i)) - k = lptr(k) - p2 = p3 - p3%lat = vclat(listc(k)) - p3%lon = vclon(listc(k)) - if (p1%lon - p3%lon > pii) p3%lon = p3%lon + 2.0*pii - if (p1%lon - p3%lon < -pii) p3%lon = p3%lon - 2.0*pii - if (abs(p2%lat - p3%lat) < 0.00001 .and. abs(p2%lon - p3%lon) < 0.00001) cycle - - - call divide_triangle(p1, p2, p3, nptri, ptri) - do j=1,nptri - area = triangle_area(ptri(1,j), ptri(2,j), ptri(3,j), 1.0) - call center_of_mass(ptri(1,j), ptri(2,j), ptri(3,j), pc) - if (p1%lon - pc%lon > pii) pc%lon = pc%lon + 2.0*pii - if (p1%lon - pc%lon < -pii) pc%lon = pc%lon - 2.0*pii - density = density_for_point(pc) - tot_mass = tot_mass + area * density - - call convert_lx(x, y, z, 1.0, pc) - new_ctr_x = new_ctr_x + x*area*density - new_ctr_y = new_ctr_y + y*area*density - new_ctr_z = new_ctr_z + z*area*density - end do - end do - - new_ctr_x = new_ctr_x / tot_mass - new_ctr_y = new_ctr_y / tot_mass - new_ctr_z = new_ctr_z / tot_mass - call convert_xl(new_ctr_x, new_ctr_y, new_ctr_z, pc) - rlat_2(i) = pc%lat - rlon_2(i) = pc%lon - - - end do -!$OMP END PARALLEL DO - - !Compute movement - if(mod(iter,100).eq.0) then - maxmovement = 0.0 - avg_movement = 0.0 - do i = 1,n - - p_n1%lat = rlat(i) - p_n1%lon = rlon(i) - p_n2%lat = rlat_2(i) - p_n2%lon = rlon_2(i) - - call convert_lx(x,y,z,1.0,p_n1) - call convert_lx(new_ctr_x, new_ctr_y, new_ctr_z,1.0,p_n2) - - !x y z computation - movement = sqrt((x - new_ctr_x)**2 + (y - new_ctr_y)**2 + (z - new_ctr_z)**2) - - if(movement > maxmovement) maxmovement = movement - avg_movement = avg_movement + movement/n - - enddo - if(avg_movement.lt.eps.and.l2_conv) converged=.true. - if(avg_movement.lt.eps.and.inf_conv) converged=.true. - write(6,*) n, iter, maxmovement, avg_movement - endif - - rlat(:) = rlat_2(:) - rlon(:) = rlon_2(:) - iter = iter + 1 - - end do - - deallocate(listc) - deallocate(vclat) - deallocate(vclon) - deallocate(rlat_2) - deallocate(rlon_2) - - if (maxitr > 0) write(0,*) 'Finished SCVT solve' - - end subroutine scvt_solve - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE RANDOM_POINT - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine random_point(p) - - type (geo_point), intent(inout) :: p - real :: x, y, z, m - real :: pi - - pi = 4.0*atan(1.0) - - x = 0.0 - y = 0.0 - z = 0.0 - m = 2.0 - - do while (m > 1.0 .or. (x == 0.0 .and. y == 0.0 .and. z == 0.0)) - call random_number(x) - call random_number(y) - call random_number(z) - x = x * 2.0 - 1.0 - y = y * 2.0 - 1.0 - z = z * 2.0 - 1.0 - m = x**2 + y**2 + z**2 - end do - - m = 1.0 / sqrt(m) - x = x * m - y = y * m - z = z * m - - call convert_xl(x, y, z, p) - - end subroutine random_point - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! FUNCTION DENSITY_FOR_POINT - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - real function density_for_point(p) - - implicit none - - type (geo_point), intent(in) :: p - - character (len=256) :: fname - real :: rx, ry, rz, prx, pry, prz - type (geo_point) :: p_local - real :: hgt - real :: r, norm, t_cent - real :: r1 - real :: pi - real :: width, trans_center, min_val - - pi = 4.0*atan(1.0) - - !density_for_point = 1.0 + (1.19*cos(p%lat-3.141592654/4.0))**16.0 - - ! Uniform Density Function - density_for_point = 1.0 - - !Target Density Function based on hyperbolic tangent - ! p_local%lat = latitude (radians) center of high-resolution region - ! p_local%lon = longitude (radians) center of high-resolution region - ! width = width of transition zone - ! trans_center = width (radians) of high resolution zone - ! minval = minimum density value. to have grid spacing vary by a factor of 8 - ! set minval = (1.0 / 8.0)**4 - - ! p_local%lat = pii/4.0 - ! p_local%lon = 1.25*pii - ! call convert_lx(rx, ry, rz, 1.0, p) - ! call convert_lx(prx, pry, prz, 1.0, p_local) - ! r = acos(rx*prx + ry*pry + rz*prz) - - ! width = 0.15 - ! trans_center = pi/6.0 - ! min_val = (1.0/8.0)**4 - ! norm = 1.0/(1.0-min_val) - ! density_for_point = ((tanh((trans_center-r)*(1.0/width))+1.0)/2)/norm + min_val - - end function density_for_point - -end module scvt diff --git a/grid_gen/global_scvt/src/module_sphere_utilities.F b/grid_gen/global_scvt/src/module_sphere_utilities.F deleted file mode 100644 index f0026bec7..000000000 --- a/grid_gen/global_scvt/src/module_sphere_utilities.F +++ /dev/null @@ -1,959 +0,0 @@ -module sphere_utilities - - contains - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION TRIANGLE_AREA -! -! Given the (latitude, longitude) coordinates of the corners of a triangle, -! plus the radius of the sphere, compute the area of the triangle. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function triangle_area(p1, p2, p3, radius) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2, p3 - real, intent(in) :: radius - - real :: a, b, c, s, e, pii, tanqe - - pii = 2.*asin(1.0) - - a = sphere_distance(p1,p2,radius) - b = sphere_distance(p2,p3,radius) - c = sphere_distance(p3,p1,radius) - s = 0.5*(a+b+c) - - tanqe = sqrt(tan(0.5*s)*tan(0.5*(s-a))*tan(0.5*(s-b))*tan(0.5*(s-c))) - e = 4.*atan(tanqe) - triangle_area = radius*radius*e - -end function triangle_area - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION OBTUSE -! -! Given the (latitude, longitude) coordinates of the corners of a triangle, -! determine if the triangle is obtuse -! -! obtuse.ne.0 then the triangle is obtuse -! value of 1,2,3 means that angle associated with p1,p2,p3 is > 90 -! obtuse = 0 then the triangle is not obtuse -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -integer function obtuse(p1, p2, p3) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2, p3 - - real :: x1(3), x2(3), x3(3), dot, r(3), s(3), rmag, smag - - obtuse = 0 - - call convert_lx(x1(1), x1(2), x1(3), 1.0, p1) - call convert_lx(x2(1), x2(2), x2(3), 1.0, p2) - call convert_lx(x3(1), x3(2), x3(3), 1.0, p3) - - ! test angle formed by x3,x1,x2 - r(:) = x3(:) - x1(:) - s(:) = x2(:) - x1(:) - rmag = sqrt(r(1)**2+r(2)**2+r(3)**2) - smag = sqrt(s(1)**2+s(2)**2+s(3)**2) - r(:) = r(:) / rmag - s(:) = s(:) / smag - dot = r(1)*s(1) + r(2)*s(2) + r(3)*s(3) - if(dot.lt.0) obtuse = 1 - - ! test angle formed by x1,x2,x3 - r(:) = x1(:) - x2(:) - s(:) = x3(:) - x2(:) - rmag = sqrt(r(1)**2+r(2)**2+r(3)**2) - smag = sqrt(s(1)**2+s(2)**2+s(3)**2) - r(:) = r(:) / rmag - s(:) = s(:) / smag - dot = r(1)*s(1) + r(2)*s(2) + r(3)*s(3) - if(dot.lt.0) obtuse = 2 - - ! test angle formed by x2,x3,x1 - r(:) = x2(:) - x3(:) - s(:) = x1(:) - x3(:) - rmag = sqrt(r(1)**2+r(2)**2+r(3)**2) - smag = sqrt(s(1)**2+s(2)**2+s(3)**2) - r(:) = r(:) / rmag - s(:) = s(:) / smag - dot = r(1)*s(1) + r(2)*s(2) + r(3)*s(3) - if(dot.lt.0) obtuse = 3 - -end function obtuse - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION SPHERE_DISTANCE -! -! Given two (latitude, longitude) coordinates on the surface of a sphere, -! plus the radius of the sphere, compute the great circle distance between -! the points. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function sphere_distance(p1, p2, radius) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2 - real, intent(in) :: radius - - real :: arg1 - - arg1 = sqrt( sin(0.5*(p2%lat-p1%lat))**2 + & - cos(p1%lat)*cos(p2%lat)*sin(0.5*(p2%lon-p1%lon))**2 ) - sphere_distance = 2.*radius*asin(arg1) - -end function sphere_distance - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION PLANE_DISTANCE -! -! Given two (latitude, longitude) coordinates on the surface of a sphere, -! plus the radius of the sphere, compute the secant distance between -! the points. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function plane_distance(p1, p2, radius) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2 - real, intent(in) :: radius - - real :: x1, x2, y1, y2, z1, z2 - - z1 = sin(p1%lat) - z2 = sin(p2%lat) - x1 = cos(p1%lon)*cos(p1%lat) - x2 = cos(p2%lon)*cos(p2%lat) - y1 = sin(p1%lon)*cos(p1%lat) - y2 = sin(p2%lon)*cos(p2%lat) - - plane_distance = radius*sqrt((z1-z2)**2+(x1-x2)**2+(y1-y2)**2) - -end function plane_distance - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION ARC_ANGLE -! -! Given two (latitude, longitude) coordinates on the surface of a sphere, -! compute the angle between the points as measured from the origin of the -! sphere. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function arc_angle(p1, p2) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2 - - real :: arg1 - - arg1 = sqrt( sin(0.5*(p2%lat-p1%lat))**2 + & - cos(p1%lat)*cos(p2%lat)*sin(0.5*(p2%lon-p1%lon))**2 ) - arc_angle = 2.*asin(arg1) - -end function arc_angle - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION GREAT_CIRCLE_POINTS -! -! Return n points equally spaced along the great circle arc between (lat1,lon1) -! and (lat2,lon2). These points include the end points of the arc. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine great_circle_points(p1, p2, pl, n) - - use data_types - - implicit none - - integer, intent(in) :: n - type (geo_point), intent(in) :: p1, p2 - type (geo_point), dimension(n), intent(inout) :: pl - - real :: x1, x2, y1, y2, z1, z2 - real :: dx, dl - real :: x, y, z - integer :: i - real :: dtheta, dinc, dt - - real :: pii, rtod - - pii = 2.*asin(1.0) - rtod = 180./pii - -! write(6,*) ' in gcp ',rtod*lat1,rtod*lon1,rtod*lat2,rtod*lon2 - - if (n < 2) then - write(6,*) ' n less than 2 in great_circle_points ' - stop - end if - - if (n == 2) then - pl(1) = p1 - pl(2) = p2 - end if - - dtheta = arc_angle(p1, p2) - dinc = dtheta/float(n-1) - - call convert_lx(x1,y1,z1,1.,p1) - call convert_lx(x2,y2,z2,1.,p2) - -! set the end points - - pl(1) = p1 - pl(n) = p2 - -! write(6,*) ' x1,y1,z1 ',x1,y1,z1 -! write(6,*) ' x2,y2,z2 ',x2,y2,z2 - -! compute the interior points. see notes for derivation - - do i=2,n-1 - dt = float(i-1)*dinc - - if (dt <= 0.5*dtheta) then - dx = 1.-tan(0.5*dtheta-dt)/tan(0.5*dtheta) -! write(6,*) ' case 1 ',dx - x = x1+0.5*dx*(x2-x1) - y = y1+0.5*dx*(y2-y1) - z = z1+0.5*dx*(z2-z1) - else - dt = dtheta-dt - dx = 1.-tan(0.5*dtheta-dt)/tan(0.5*dtheta) -! write(6,*) ' case 2 ',dx - x = x2+0.5*dx*(x1-x2) - y = y2+0.5*dx*(y1-y2) - z = z2+0.5*dx*(z1-z2) - end if - -! write(6,*) ' x,y,z ',x,y,z - - call convert_xl(x,y,z,pl(i)) - enddo - -end subroutine great_circle_points - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE DIVIDE_TRIANGLE -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -!subroutine divide_triangle( p1, p2, p3, pnew) -! -! use data_types -! -! implicit none -! -! type (geo_point), intent(in) :: p1, p2, p3 -! type (geo_point), dimension(6), intent(inout) :: pnew -! -! real :: t_area, area_total, radius -! type (geo_point), dimension(3) :: pts -! type (geo_point) :: c -! -! radius = 1. -! pnew(1) = p1 -! pnew(4) = p2 -! pnew(6) = p3 -! -! call great_circle_points(p1,p2,pts,3) -! pnew(2) = pts(2) -! -! call great_circle_points(p1,p3,pts,3) -! pnew(3) = pts(2) -! -! call great_circle_points(p2,p3,pts,3) -! pnew(5) = pts(2) -! -! -! write(6,*) ' ' -! write(6,*) ' original triangle ' -! write(6,*) p1%lat, p1%lon -! write(6,*) p2%lat, p2%lon -! write(6,*) p3%lat, p3%lon -! -! t_area = triangle_area(p1,p2,p3,radius) -! write(6,*) ' area ',t_area -! call compute_voronoi_corner(p1,p2,p3,c) -! write(6,*) ' voronoi corner ',c%lat,c%lon -! -! area_total = 0. -! -! write(6,*) ' ' -! write(6,*) ' new triangles ' -! -! write(6,*) ' triangle 1 ' -! write(6,*) pnew(1)%lat,pnew(1)%lon -! write(6,*) pnew(1)%lat,pnew(2)%lon -! write(6,*) pnew(1)%lat,pnew(3)%lon -! t_area = triangle_area( pnew(1),pnew(2),pnew(3),radius) -! area_total = area_total + t_area -! write(6,*) ' area ',t_area -! -! write(6,*) ' triangle 2 ' -! write(6,*) pnew(2)%lat,pnew(2)%lon -! write(6,*) pnew(4)%lat,pnew(4)%lon -! write(6,*) pnew(5)%lat,pnew(5)%lon -! t_area = triangle_area( pnew(2),pnew(4),pnew(5),radius) -! area_total = area_total + t_area -! write(6,*) ' area ',t_area -! -! write(6,*) ' triangle 3 ' -! write(6,*) pnew(2)%lat,pnew(2)%lon -! write(6,*) pnew(5)%lat,pnew(5)%lon -! write(6,*) pnew(3)%lat,pnew(3)%lon -! t_area = triangle_area( pnew(2),pnew(5),pnew(3),radius) -! area_total = area_total + t_area -! write(6,*) ' area ',t_area -! -! write(6,*) ' triangle 4 ' -! write(6,*) pnew(3)%lat,pnew(3)%lon -! write(6,*) pnew(5)%lat,pnew(5)%lon -! write(6,*) pnew(6)%lat,pnew(6)%lon -! t_area = triangle_area( pnew(3),pnew(5),pnew(6),radius) -! area_total = area_total + t_area -! write(6,*) ' area ',t_area -! write(6,*) ' total area is ',area_total -! -!end subroutine divide_triangle - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE COMPUTE_VORONOI_CORNER -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -!subroutine compute_voronoi_corner( p0, p1, p2, vc ) -! -! use data_types -! -! implicit none -! -! type (geo_point), intent(in) :: p0, p1, p2 -! type (geo_point), intent(out) :: vc -! -! real :: x0, y0, z0, x1, y1, z1, x2, y2, z2, xc, yc, zc, cabs -! real :: a1, a2, a3, b1, b2, b3 -! real :: dot0 -! -! z0 = sin(p0%lat) -! z1 = sin(p1%lat) -! z2 = sin(p2%lat) -! -! x0 = cos(p0%lon)*cos(p0%lat) -! x1 = cos(p1%lon)*cos(p1%lat) -! x2 = cos(p2%lon)*cos(p2%lat) -! -! y0 = sin(p0%lon)*cos(p0%lat) -! y1 = sin(p1%lon)*cos(p1%lat) -! y2 = sin(p2%lon)*cos(p2%lat) -! -! a1 = x2-x0 -! a2 = y2-y0 -! a3 = z2-z0 -! -! b1 = x1-x0 -! b2 = y1-y0 -! b3 = z1-z0 -! -! -! xc = a2*b3-a3*b2 -! yc = a3*b1-a1*b3 -! zc = a1*b2-a2*b1 -! cabs = sqrt(xc*xc+yc*yc+zc*zc) -! -!! write(6,*) ' cabs = ',cabs -!! write(6,*) ' xc, yc, zc = ',xc,yc,zc -!! write(6,*) ' x0, y0, z0 = ',x0,y0,z0 -!! write(6,*) ' x1, y1, z1 = ',x1,y1,z1 -!! write(6,*) ' x2, y2, z2 = ',x2,y2,z2 -! dot0 = x0*xc+y0*yc+z0*zc -!! write(6,*) ' dot is ',dot0 -! -! if( dot0 < 0.) then ! flip p1 with p2 -! -! z2 = sin(p1%lat) -! z1 = sin(p2%lat) -! -! x2 = cos(p1%lon)*cos(p1%lat) -! x1 = cos(p2%lon)*cos(p2%lat) -! -! y2 = sin(p1%lon)*cos(p1%lat) -! y1 = sin(p2%lon)*cos(p2%lat) -! -! a1 = x2-x0 -! a2 = y2-y0 -! a3 = z2-z0 -! -! b1 = x1-x0 -! b2 = y1-y0 -! b3 = z1-z0 -! -! -! xc = a2*b3-a3*b2 -! yc = a3*b1-a1*b3 -! zc = a1*b2-a2*b1 -! cabs = sqrt(xc*xc+yc*yc+zc*zc) -! -!! write(6,*) ' flipping ' -!! write(6,*) ' cabs = ',cabs -!! write(6,*) ' xc, yc, zc = ',xc,yc,zc -! dot0 = x0*xc+y0*yc+z0*zc -!! write(6,*) ' dot is ',dot0 -! -! end if -! -! -! xc = xc/cabs -! yc = yc/cabs -! zc = zc/cabs -! -! call convert_xl(xc,yc,zc,vc) -! -!end subroutine compute_voronoi_corner - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE CONVERT_LX -! -! Convert (lat,lon) to an (x, y, z) location on a sphere with specified radius. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine convert_lx(x, y, z, radius, latlon) - - use data_types - - implicit none - - real, intent(in) :: radius - type (geo_point), intent(in) :: latlon - real, intent(out) :: x, y, z - - z = radius * sin(latlon%lat) - x = radius * cos(latlon%lon) * cos(latlon%lat) - y = radius * sin(latlon%lon) * cos(latlon%lat) - -end subroutine convert_lx - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE CONVERT_XL -! -! Convert (x, y, z) to a (lat, lon) location on a sphere with -! radius sqrt(x^2 + y^2 + z^2). -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine convert_xl(x, y, z, latlon) - - use data_types - - implicit none - - real, intent(in) :: x, y, z - type (geo_point), intent(out) :: latlon - - real :: dl, clat, pii, rtod - real :: eps - parameter (eps=1.e-10) - - pii = 2.*asin(1.0) - rtod=180./pii - dl = sqrt(x*x + y*y + z*z) - - latlon%lat = asin(z/dl) - -! check for being close to either pole - - if (abs(x) > eps) then - - if (abs(y) > eps) then - - latlon%lon = atan(abs(y/x)) - - if ((x <= 0.) .and. (y >= 0.)) then - latlon%lon = pii-latlon%lon - else if ((x <= 0.) .and. (y < 0.)) then - latlon%lon = latlon%lon+pii - else if ((x >= 0.) .and. (y <= 0.)) then - latlon%lon = 2*pii-latlon%lon - end if - - else ! we're either on longitude 0 or 180 - - if (x > 0) then - latlon%lon = 0. - else - latlon%lon = pii - end if - - end if - - else if (abs(y) > eps) then - - if (y > 0) then - latlon%lon = pii/2. - else - latlon%lon = 3.*pii/2. - end if - - else ! we are at a pole - - latlon%lon = 0. - - end if - -end subroutine convert_xl - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE GC_INTERSECT -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine gc_intersect(p0, p1, p2, p3, pc) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p0, p1, p2, p3 - type (geo_point), intent(out) :: pc - - real :: x0, y0, z0, x1, y1, z1, x2, y2, z2, x3, y3, z3 - real :: n1, n2, n3, m1, m2, m3 - real :: xc, yc, zc, dot - real, parameter :: radius=1.0 - - call convert_lx(x0,y0,z0,radius,p0) - call convert_lx(x1,y1,z1,radius,p1) - call convert_lx(x2,y2,z2,radius,p2) - call convert_lx(x3,y3,z3,radius,p3) - - n1 = (y0 * z1 - y1 * z0) - n2 = -(x0 * z1 - x1 * z0) - n3 = (x0 * y1 - x1 * y0) - - m1 = (y2 * z3 - y3 * z2) - m2 = -(x2 * z3 - x3 * z2) - m3 = (x2 * y3 - x3 * y2) - - xc = (n2 * m3 - n3 * m2) - yc = -(n1 * m3 - n3 * m1) - zc = (n1 * m2 - n2 * m1) - - dot = x0*xc + y0*yc + z0*zc - - if (dot < 0.0) then - xc = -xc - yc = -yc - zc = -zc - end if - - call convert_xl(xc,yc,zc,pc) - -end subroutine gc_intersect - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION POS_ANG -! -! Normalize an angle, given in radians, to lie in the interval [0,2*PI]. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function pos_ang(angle) - - implicit none - - real, intent(in) :: angle - - real :: pii - - pii = 2.*asin(1.0) - pos_ang = angle - - if(angle > 2.*pii) then - pos_ang = angle - 2.*pii - else if(angle < 0.) then - pos_ang = angle + 2.*pii - end if - -end function pos_ang - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION MERIDIAN_ANGLE -! -! Find the angle between the meridian that intersects point (lat1,lon1) -! and the great circle passing through points (lat1,lon1) (lat2,lon2). -! (lat1,lon1) is the vertex of the angle. -! -! Convention: zero points north, 90 points west, -90 point east, -! points south 180, -180 -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function meridian_angle(p1, p2) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2 - -type (geo_point) :: np - - real :: pii, da, db, dc - type (geo_point) :: p3 - real :: cosa - real :: eps - parameter (eps = 1.e-04) -real :: ax, ay, az -real :: bx, by, bz -real :: cx, cy, cz - -np = p1 -np%lat = np%lat + 0.05 - -call convert_lx(ax, ay, az, 1.0, p1) -call convert_lx(bx, by, bz, 1.0, np) -call convert_lx(cx, cy, cz, 1.0, p2) - -meridian_angle = plane_angle(ax, ay, az, bx, by, bz, cx, cy, cz, ax, ay, az) -return - - if (p1%lon == p2%lon) then - - meridian_angle = 0.0 - - else - - pii = 2.*asin(1.0) - dc = arc_angle(p1,p2) - - p3%lon = p1%lon - if (p1%lat + dc <= pii/2.0) then - p3%lat = p1%lat+dc - else - p3%lat = p1%lat-dc - end if - db = arc_angle(p1,p3) - da = arc_angle(p2,p3) - -! see spherical trig section on online wolfram pages - eq(11) -> - - cosa = max(-1.,min(1.,(cos(da)-cos(db)*cos(dc))/(sin(db)*sin(dc)))) - meridian_angle = acos(cosa) - - - if (((p2%lon > p1%lon) .and. (p2%lon - p1%lon <= pii)) .or. & - ((p2%lon < p1%lon) .and. (p1%lon - p2%lon >= pii))) then - meridian_angle = -abs(meridian_angle) - else - meridian_angle = abs(meridian_angle) - end if - - end if - -end function meridian_angle - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE CENTER_OF_MASS -! -! Find centriod of the triangle whose corners are at (lat1,lon1), (lat2,lon2), -! and (lat3,lon3). -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine center_of_mass(p1, p2, p3, pc) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2, p3 - type (geo_point), intent(out) :: pc - - real :: x1, x2, x3, xc - real :: y1, y2, y3, yc - real :: z1, z2, z3, zc - - call convert_lx(x1,y1,z1,1.,p1) - call convert_lx(x2,y2,z2,1.,p2) - call convert_lx(x3,y3,z3,1.,p3) - - xc = (x1+x2+x3)/3. - yc = (y1+y2+y3)/3. - zc = (z1+z2+z3)/3. - - call convert_xl(xc,yc,zc,pc) - -end subroutine center_of_mass - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE DIVIDE_TRIANGLE -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine divide_triangle(p1, p2, p3, n, p) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2, p3 - integer, intent(in) :: n - type (geo_point), dimension(3,n), intent(out) :: p - - integer :: i, j, k - integer :: glevel ! Level of decomposition - type (geo_point), allocatable, dimension(:) :: p1p2, p1p3 - type (geo_point), allocatable, dimension(:,:) :: line - - glevel = nint(log(real(n)) / log(4.0)) ! Each subdivision gives four times the number of - ! triangles, so log4(n) gives the level decomposition - - glevel = (2 ** glevel) + 1 - allocate(line(glevel, glevel)) - allocate(p1p2(glevel)) - allocate(p1p3(glevel)) - - call great_circle_points(p1, p2, p1p2, glevel) - call great_circle_points(p1, p3, p1p3, glevel) - - line(1,1) = p1 - line(1,2) = p1p2(2) - line(2,2) = p1p3(2) - - do i = 3,glevel - call great_circle_points(p1p2(i), p1p3(i), line(:,i), i) -!do j=1,i -!write(0,*) j,i,' P ',line(j,i)%lat*180./3.14159, line(j,i)%lon*180./3.14159 -!end do - end do - - k = 1 - do i = 1,glevel-1 - do j = 1,i - p(1,k) = line(j,i) - p(2,k) = line(j,i+1) - p(3,k) = line(j+1,i+1) -!write(0,*) j,i, ' - ',p(1,k)%lat*180./3.14159,p(1,k)%lon*180./3.14159 -!write(0,*) j,i+1, ' - ',p(2,k)%lat*180./3.14159,p(2,k)%lon*180./3.14159 -!write(0,*) j+1,i+1, ' - ',p(3,k)%lat*180./3.14159,p(3,k)%lon*180./3.14159 - k = k + 1 - end do - end do - -!write(0,*) '-----------' - do i = glevel,3,-1 - do j = 2,i-1 - p(1,k) = line(j,i) - p(2,k) = line(j,i-1) - p(3,k) = line(j-1,i-1) -!write(0,*) j,i, ' - ',p(1,k)%lat*180./3.14159,p(1,k)%lon*180./3.14159 -!write(0,*) j,i-1, ' - ',p(2,k)%lat*180./3.14159,p(2,k)%lon*180./3.14159 -!write(0,*) j-1,i-1, ' - ',p(3,k)%lat*180./3.14159,p(3,k)%lon*180./3.14159 - k = k + 1 - end do - end do - - deallocate(line) - deallocate(p1p2) - deallocate(p1p3) - -end subroutine divide_triangle - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE POINT_TO_PLANE -! -! Find projection (xp, yp, zp) of a point (Qx,Qy,Qz) onto the plane defined by -! the equation ax+by+cz+d=0 -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine point_to_plane(a, b, c, d, Qx, Qy, Qz, xp, yp, zp) - - implicit none - - real, intent(in) :: a, b, c, d ! The coefficients in the equation of the plane - real, intent(in) :: Qx, Qy, Qz ! The coordinates of the point Q to be projected to the plane - real, intent(out) :: xp, yp, zp ! The coordinates of the point projected in the plane - - real :: Px, Py, Pz ! A point P in the plane ax + by + cz + d = 0 - real :: PQx, PQy, PQz ! Components of the vector from P to Q - real :: PQn ! The dot product of PQ and the vector normal to the plane - real :: m2 ! The magnitude and squared magnitude of the vector n normal to the plane - - m2 = (a**2.0 + b**2.0 + c**2.0) - - Px = -d*a/m2 - Py = -d*b/m2 - Pz = -d*c/m2 - - PQx = Qx - Px - PQy = Qy - Py - PQz = Qz - Pz - - PQn = PQx * a + PQy * b + PQz * c - - ! . Q - ! n ^ / - ! | / - ! |/ - ! ----------.------------------- - ! P - - xp = Qx - PQn * a / m2 - yp = Qy - PQn * b / m2 - zp = Qz - PQn * c / m2 - -end subroutine point_to_plane - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE POINT_TO_SPHERE -! -! Find projection (xp, yp, zp) of a point (Qx,Qy,Qz) in the plane defined by -! the equation ax+by+cz+d=0 onto the surface of the sphere with radius r -! centered at the origin. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine point_to_sphere(a, b, c, d, r, Qx, Qy, Qz, xp, yp, zp) - - implicit none - - real, intent(in) :: a, b, c, d ! The coefficients in the equation of the plane - real, intent(in) :: r ! The radius of the sphere - real, intent(in) :: Qx, Qy, Qz ! The coordinates of the point Q to be projected to the sphere - real, intent(out) :: xp, yp, zp ! The coordinates of the point projected to the sphere - - real :: aa, bb, cc ! Coefficients of quadratic equation - real :: disc, t1, t2 - - ! Solve for the interesection of the line (Qx - at, Qy - bt, Qz - ct) and the - ! sphere x^2 + y^2 + z^2 - r^2 = 0 - aa = a**2.0 + b**2.0 + c**2.0 - bb = -2.0*(Qx*a + Qy*b + Qz*c) - cc = Qx**2.0 + Qy**2.0 + Qz**2.0 - r**2.0 - - disc = bb**2.0 - 4.0*aa*cc - - if (disc < 0.0) then ! Point has no projection on the surface of the sphere - xp = 0.0 - yp = 0.0 - zp = 0.0 - else if (disc == 0.0) then ! Point has exactly one projection (line through point and - t1 = -bb / (2.0*aa) - xp = Qx - a*t1 ! and normal to plane is tangent to sphere - yp = Qy - b*t1 - zp = Qz - c*t1 - else ! Point has two projections; choose the one that is closest - t1 = (-bb + sqrt(disc)) / (2.0*aa) - t2 = (-bb - sqrt(disc)) / (2.0*aa) - if (abs(t1) <= abs(t2)) then - xp = Qx - a*t1 - yp = Qy - b*t1 - zp = Qz - c*t1 - else - xp = Qx - a*t2 - yp = Qy - b*t2 - zp = Qz - c*t2 - end if - end if - -end subroutine point_to_sphere - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE ROTATE_ABOUT_VECTOR -! -! Rotates the point (x,y,z) through an angle theta about the vector -! originating at (a, b, c) and having direction (u, v, w). -! -! Reference: http://inside.mines.edu/~gmurray/ArbitraryAxisRotation/ArbitraryAxisRotation.html -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine rotate_about_vector(x, y, z, theta, a, b, c, u, v, w, xp, yp, zp) - - implicit none - - real, intent(in) :: x, y, z, theta, a, b, c, u, v, w - real, intent(out) :: xp, yp, zp - - real :: vw2, uw2, uv2 - real :: m - - vw2 = v**2.0 + w**2.0 - uw2 = u**2.0 + w**2.0 - uv2 = u**2.0 + v**2.0 - m = sqrt(u**2.0 + v**2.0 + w**2.0) - - xp = (a*vw2 + u*(-b*v-c*w+u*x+v*y+w*z) + ((x-a)*vw2+u*(b*v+c*w-v*y-w*z))*cos(theta) + m*(-c*v+b*w-w*y+v*z)*sin(theta))/m**2.0 - yp = (b*uw2 + v*(-a*u-c*w+u*x+v*y+w*z) + ((y-b)*uw2+v*(a*u+c*w-u*x-w*z))*cos(theta) + m*( c*u-a*w+w*x-u*z)*sin(theta))/m**2.0 - zp = (c*uv2 + w*(-a*u-b*v+u*x+v*y+w*z) + ((z-c)*uv2+w*(a*u+b*v-u*x-v*y))*cos(theta) + m*(-b*u+a*v-v*x+u*y)*sin(theta))/m**2.0 - -end subroutine rotate_about_vector - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION PLANE_ANGLE -! -! Computes the angle between vectors AB and AC, given points A, B, and C, and -! a vector (u,v,w) normal to the plane. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function plane_angle(ax, ay, az, bx, by, bz, cx, cy, cz, u, v, w) - - implicit none - - real, intent(in) :: ax, ay, az, bx, by, bz, cx, cy, cz, u, v, w - - real :: ABx, ABy, ABz ! The components of the vector AB - real :: mAB ! The magnitude of AB - real :: ACx, ACy, ACz ! The components of the vector AC - real :: mAC ! The magnitude of AC - - real :: Dx ! The i-components of the cross product AB x AC - real :: Dy ! The j-components of the cross product AB x AC - real :: Dz ! The k-components of the cross product AB x AC - - real :: cos_angle - - ABx = bx - ax - ABy = by - ay - ABz = bz - az - mAB = sqrt(ABx**2.0 + ABy**2.0 + ABz**2.0) - - ACx = cx - ax - ACy = cy - ay - ACz = cz - az - mAC = sqrt(ACx**2.0 + ACy**2.0 + ACz**2.0) - - - Dx = (ABy * ACz) - (ABz * ACy) - Dy = -((ABx * ACz) - (ABz * ACx)) - Dz = (ABx * ACy) - (ABy * ACx) - - cos_angle = (ABx*ACx + ABy*ACy + ABz*ACz) / (mAB * mAC) - - if (cos_angle < -1.0) then - cos_angle = -1.0 - else if (cos_angle > 1.0) then - cos_angle = 1.0 - end if - - if ((Dx*u + Dy*v + Dz*w) >= 0.0) then - plane_angle = acos(cos_angle) - else - plane_angle = -acos(cos_angle) - end if - -end function plane_angle - -end module sphere_utilities diff --git a/grid_gen/global_scvt/src/module_voronoi_utils.F b/grid_gen/global_scvt/src/module_voronoi_utils.F deleted file mode 100644 index 3079cd320..000000000 --- a/grid_gen/global_scvt/src/module_voronoi_utils.F +++ /dev/null @@ -1,113 +0,0 @@ -module voronoi_utils - - use grid_constants - use stripack - - contains - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE COMPUTE_DT - ! - ! Compute the Delaunay triangulation of a set of lat/lon locations. - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine compute_dt(rlat, rlon, n, ltri, nrow, ntmx, nt) - - implicit none - - integer, intent(in) :: n, nrow, ntmx - integer, intent(inout) :: nt - integer, dimension(nrow, ntmx), intent(in) :: ltri - real, dimension(n), intent(in) :: rlat, rlon - - integer :: ierr, lnew, nscr - integer, dimension(n) :: near, next - integer, dimension(n) :: lend - integer, dimension(6*n+12) :: list, lptr - real, dimension(n) :: x, y, z, dist - - nscr = 6*n+12 - - call trans(n, rlat, rlon, x, y, z) - - write(0,*) 'started TRMESH' - call trmesh(n, x, y, z, list, lptr, lend, lnew, near, next, dist, ierr) - if (ierr /= 0) then - write(0,*) 'Error: TRMESH returned error code ',ierr - end if - write(0,*) 'finished TRMESH' - - write(0,*) 'started TRLIST' - call trlist(n, list, lptr, lend, nrow, nt, ltri, ierr) - if (ierr /= 0) then - write(0,*) 'Error: TRLIST returned error code ',ierr - end if - write(0,*) 'finished TRLIST' - - end subroutine compute_dt - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE COMPUTE_VC - ! - ! Compute the Voronoi corners of a set of lat/lon locations. - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine compute_vc(rlat, rlon, n, nrow, ntmx, list, lptr, lend, listc, vclat, vclon, nvc) - - implicit none - - integer, intent(in) :: n, nrow, ntmx, nvc - integer, dimension(nvc), intent(inout) :: list, lptr, listc - real, dimension(nvc), intent(inout) :: vclat, vclon - integer, dimension(n), intent(inout) :: lend - real, dimension(n), intent(in) :: rlat, rlon - - integer :: ierr, lnew, nb - integer, dimension(n) :: near, next - integer, dimension(nrow, ntmx) :: ltri - real, dimension(n) :: x, y, z, dist - real, dimension(nvc) :: xc, yc, zc, rc - - if (nvc < 6*n-12) then - write(0,*) 'Error: Argument nvc to COMPUTE_VC must be at least 6*n+12' - return - end if - - call trans(n, rlat, rlon, x, y, z) - - call trmesh(n, x, y, z, list, lptr, lend, lnew, near, next, dist, ierr) - if (ierr /= 0) then - write(0,*) 'Error: TRMESH returned error code ',ierr - end if - - call crlist(n, ntmx, x, y, z, list, lend, lptr, lnew, ltri, listc, nb, xc, yc, zc, rc, ierr) - if (ierr /= 0) then - write(0,*) 'Error: CRLIST returned error code ',ierr - end if - - call trans_inv(xc, yc, zc, vclat, vclon, nvc) - - end subroutine compute_vc - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE TRANS_INV - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine trans_inv(x, y, z, lat, lon, n) - - implicit none - - integer, intent(in) :: n - real, dimension(n), intent(in) :: x, y, z - real, dimension(n), intent(out) :: lat, lon - - integer :: i - - do i=1,n - lat(i) = (pii/2.0 - acos(z(i))) - lon(i) = atan2(y(i),x(i)) - end do - - end subroutine trans_inv - -end module voronoi_utils diff --git a/grid_gen/global_scvt/src/module_write_netcdf.F b/grid_gen/global_scvt/src/module_write_netcdf.F deleted file mode 100644 index d386a4ad4..000000000 --- a/grid_gen/global_scvt/src/module_write_netcdf.F +++ /dev/null @@ -1,646 +0,0 @@ -module write_netcdf - - use grid_params - - integer :: wr_ncid - integer :: wrDimIDTime - integer :: wrDimIDnCells - integer :: wrDimIDnEdges - integer :: wrDimIDnVertices - integer :: wrDimIDmaxEdges - integer :: wrDimIDmaxEdges2 - integer :: wrDimIDTWO - integer :: wrDimIDvertexDegree - integer :: wrDimIDnVertLevels - integer :: wrDimIDnTracers - integer :: wrVarIDlatCell - integer :: wrVarIDlonCell - integer :: wrVarIDmeshDensity - integer :: wrVarIDxCell - integer :: wrVarIDyCell - integer :: wrVarIDzCell - integer :: wrVarIDindexToCellID - integer :: wrVarIDlatEdge - integer :: wrVarIDlonEdge - integer :: wrVarIDxEdge - integer :: wrVarIDyEdge - integer :: wrVarIDzEdge - integer :: wrVarIDindexToEdgeID - integer :: wrVarIDlatVertex - integer :: wrVarIDlonVertex - integer :: wrVarIDxVertex - integer :: wrVarIDyVertex - integer :: wrVarIDzVertex - integer :: wrVarIDindexToVertexID - integer :: wrVarIDcellsOnEdge - integer :: wrVarIDnEdgesOnCell - integer :: wrVarIDnEdgesOnEdge - integer :: wrVarIDedgesOnCell - integer :: wrVarIDedgesOnEdge - integer :: wrVarIDweightsOnEdge - integer :: wrVarIDdvEdge - integer :: wrVarIDdv1Edge - integer :: wrVarIDdv2Edge - integer :: wrVarIDdcEdge - integer :: wrVarIDangleEdge - integer :: wrVarIDareaCell - integer :: wrVarIDareaTriangle - integer :: wrVarIDcellsOnCell - integer :: wrVarIDverticesOnCell - integer :: wrVarIDverticesOnEdge - integer :: wrVarIDedgesOnVertex - integer :: wrVarIDcellsOnVertex - integer :: wrVarIDkiteAreasOnVertex - integer :: wrVarIDfEdge - integer :: wrVarIDfVertex - integer :: wrVarIDh_s - integer :: wrVarIDu - integer :: wrVarIDv - integer :: wrVarIDh - integer :: wrVarIDvh - integer :: wrVarIDcirculation - integer :: wrVarIDvorticity - integer :: wrVarIDke - integer :: wrVarIDtracers - - integer :: wrLocalnCells - integer :: wrLocalnEdges - integer :: wrLocalnVertices - integer :: wrLocalmaxEdges - integer :: wrLocalnVertLevels - integer :: wrLocalnTracers - - contains - - subroutine write_netcdf_init( & - nCells, & - nEdges, & - nVertices, & - maxEdges, & - nVertLevels, & - nTracers & - ) - - implicit none - - include 'netcdf.inc' - - integer, intent(in) :: nCells - integer, intent(in) :: nEdges - integer, intent(in) :: nVertices - integer, intent(in) :: maxEdges - integer, intent(in) :: nVertLevels - integer, intent(in) :: nTracers - - integer :: nferr - integer, dimension(10) :: dimlist - real (kind=8) :: sphere_radius - character (len=16) :: on_a_sphere - - - wrLocalnCells = nCells - wrLocalnEdges = nEdges - wrLocalnVertices = nVertices - wrLocalmaxEdges = maxEdges - wrLocalnVertLevels = nVertLevels - wrLocalnTracers = nTracers - - on_a_sphere = 'YES ' - sphere_radius = 1.0 - - nferr = nf_create('grid.nc', IOR(NF_CLOBBER,NF_64BIT_OFFSET), wr_ncid) - - ! - ! Write Namlist information - ! - - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'on_a_sphere', 16, on_a_sphere) - nferr = nf_put_att_double(wr_ncid, NF_GLOBAL, 'sphere_radius', NF_DOUBLE, 1, sphere_radius) - nferr = nf_put_att_int(wr_ncid, NF_GLOBAL, 'np', NF_INT, 1, np) - nferr = nf_put_att_int(wr_ncid, NF_GLOBAL, 'n_scvt_iterations', NF_INT, 1, n_scvt_iterations) - nferr = nf_put_att_double(wr_ncid, NF_GLOBAL, 'eps', NF_DOUBLE, 1, eps) - if(l2_conv) then - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'Convergence',2,'L2') - elseif(inf_conv) then - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'Convergence',3,'INF') - else - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'Convergence',7,'MaxIter') - endif - - ! - ! Define dimensions - ! - nferr = nf_def_dim(wr_ncid, 'nCells', nCells, wrDimIDnCells) - nferr = nf_def_dim(wr_ncid, 'nEdges', nEdges, wrDimIDnEdges) - nferr = nf_def_dim(wr_ncid, 'nVertices', nVertices, wrDimIDnVertices) - nferr = nf_def_dim(wr_ncid, 'maxEdges', maxEdges, wrDimIDmaxEdges) - nferr = nf_def_dim(wr_ncid, 'maxEdges2', 2*maxEdges, wrDimIDmaxEdges2) - nferr = nf_def_dim(wr_ncid, 'TWO', 2, wrDimIDTWO) - nferr = nf_def_dim(wr_ncid, 'vertexDegree', 3, wrDimIDvertexDegree) - nferr = nf_def_dim(wr_ncid, 'nVertLevels', nVertLevels, wrDimIDnVertLevels) - nferr = nf_def_dim(wr_ncid, 'nTracers', nTracers, wrDimIDnTracers) - nferr = nf_def_dim(wr_ncid, 'Time', NF_UNLIMITED, wrDimIDTime) - - ! - ! Define variables - ! - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'latCell', NF_DOUBLE, 1, dimlist, wrVarIDlatCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'lonCell', NF_DOUBLE, 1, dimlist, wrVarIDlonCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'meshDensity', NF_DOUBLE, 1, dimlist, wrVarIDmeshDensity) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'xCell', NF_DOUBLE, 1, dimlist, wrVarIDxCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'yCell', NF_DOUBLE, 1, dimlist, wrVarIDyCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'zCell', NF_DOUBLE, 1, dimlist, wrVarIDzCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'indexToCellID', NF_INT, 1, dimlist, wrVarIDindexToCellID) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'latEdge', NF_DOUBLE, 1, dimlist, wrVarIDlatEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'lonEdge', NF_DOUBLE, 1, dimlist, wrVarIDlonEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'xEdge', NF_DOUBLE, 1, dimlist, wrVarIDxEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'yEdge', NF_DOUBLE, 1, dimlist, wrVarIDyEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'zEdge', NF_DOUBLE, 1, dimlist, wrVarIDzEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'indexToEdgeID', NF_INT, 1, dimlist, wrVarIDindexToEdgeID) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'latVertex', NF_DOUBLE, 1, dimlist, wrVarIDlatVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'lonVertex', NF_DOUBLE, 1, dimlist, wrVarIDlonVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'xVertex', NF_DOUBLE, 1, dimlist, wrVarIDxVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'yVertex', NF_DOUBLE, 1, dimlist, wrVarIDyVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'zVertex', NF_DOUBLE, 1, dimlist, wrVarIDzVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'indexToVertexID', NF_INT, 1, dimlist, wrVarIDindexToVertexID) - dimlist( 1) = wrDimIDTWO - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'cellsOnEdge', NF_INT, 2, dimlist, wrVarIDcellsOnEdge) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'nEdgesOnCell', NF_INT, 1, dimlist, wrVarIDnEdgesOnCell) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'nEdgesOnEdge', NF_INT, 1, dimlist, wrVarIDnEdgesOnEdge) - dimlist( 1) = wrDimIDmaxEdges - dimlist( 2) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'edgesOnCell', NF_INT, 2, dimlist, wrVarIDedgesOnCell) - dimlist( 1) = wrDimIDmaxEdges2 - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'edgesOnEdge', NF_INT, 2, dimlist, wrVarIDedgesOnEdge) - dimlist( 1) = wrDimIDmaxEdges2 - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'weightsOnEdge', NF_DOUBLE, 2, dimlist, wrVarIDweightsOnEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'dvEdge', NF_DOUBLE, 1, dimlist, wrVarIDdvEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'dv1Edge', NF_DOUBLE, 1, dimlist, wrVarIDdv1Edge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'dv2Edge', NF_DOUBLE, 1, dimlist, wrVarIDdv2Edge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'dcEdge', NF_DOUBLE, 1, dimlist, wrVarIDdcEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'angleEdge', NF_DOUBLE, 1, dimlist, wrVarIDangleEdge) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'areaCell', NF_DOUBLE, 1, dimlist, wrVarIDareaCell) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'areaTriangle', NF_DOUBLE, 1, dimlist, wrVarIDareaTriangle) - dimlist( 1) = wrDimIDmaxEdges - dimlist( 2) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'cellsOnCell', NF_INT, 2, dimlist, wrVarIDcellsOnCell) - dimlist( 1) = wrDimIDmaxEdges - dimlist( 2) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'verticesOnCell', NF_INT, 2, dimlist, wrVarIDverticesOnCell) - dimlist( 1) = wrDimIDTWO - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'verticesOnEdge', NF_INT, 2, dimlist, wrVarIDverticesOnEdge) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'edgesOnVertex', NF_INT, 2, dimlist, wrVarIDedgesOnVertex) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'cellsOnVertex', NF_INT, 2, dimlist, wrVarIDcellsOnVertex) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'kiteAreasOnVertex', NF_DOUBLE, 2, dimlist, wrVarIDkiteAreasOnVertex) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'fEdge', NF_DOUBLE, 1, dimlist, wrVarIDfEdge) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'fVertex', NF_DOUBLE, 1, dimlist, wrVarIDfVertex) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'h_s', NF_DOUBLE, 1, dimlist, wrVarIDh_s) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'u', NF_DOUBLE, 3, dimlist, wrVarIDu) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'v', NF_DOUBLE, 3, dimlist, wrVarIDv) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'h', NF_DOUBLE, 3, dimlist, wrVarIDh) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'vh', NF_DOUBLE, 3, dimlist, wrVarIDvh) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnVertices - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'circulation', NF_DOUBLE, 3, dimlist, wrVarIDcirculation) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnVertices - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'vorticity', NF_DOUBLE, 3, dimlist, wrVarIDvorticity) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'ke', NF_DOUBLE, 3, dimlist, wrVarIDke) - dimlist( 1) = wrDimIDnTracers - dimlist( 2) = wrDimIDnVertLevels - dimlist( 3) = wrDimIDnCells - dimlist( 4) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'tracers', NF_DOUBLE, 4, dimlist, wrVarIDtracers) - - nferr = nf_enddef(wr_ncid) - - end subroutine write_netcdf_init - - - subroutine write_netcdf_fields( & - time, & - latCell, & - lonCell, & - meshDensity, & - xCell, & - yCell, & - zCell, & - indexToCellID, & - latEdge, & - lonEdge, & - xEdge, & - yEdge, & - zEdge, & - indexToEdgeID, & - latVertex, & - lonVertex, & - xVertex, & - yVertex, & - zVertex, & - indexToVertexID, & - cellsOnEdge, & - nEdgesOnCell, & - nEdgesOnEdge, & - edgesOnCell, & - edgesOnEdge, & - weightsOnEdge, & - dvEdge, & - dv1Edge, & - dv2Edge, & - dcEdge, & - angleEdge, & - areaCell, & - areaTriangle, & - cellsOnCell, & - verticesOnCell, & - verticesOnEdge, & - edgesOnVertex, & - cellsOnVertex, & - kiteAreasOnVertex, & - fEdge, & - fVertex, & - h_s, & - u, & - v, & - h, & - vh, & - circulation, & - vorticity, & - ke, & - tracers & - ) - - implicit none - - include 'netcdf.inc' - - integer, intent(in) :: time - real (kind=RKIND), dimension(:), intent(in) :: latCell - real (kind=RKIND), dimension(:), intent(in) :: lonCell - real (kind=RKIND), dimension(:), intent(in) :: meshDensity - real (kind=RKIND), dimension(:), intent(in) :: xCell - real (kind=RKIND), dimension(:), intent(in) :: yCell - real (kind=RKIND), dimension(:), intent(in) :: zCell - integer, dimension(:), intent(in) :: indexToCellID - real (kind=RKIND), dimension(:), intent(in) :: latEdge - real (kind=RKIND), dimension(:), intent(in) :: lonEdge - real (kind=RKIND), dimension(:), intent(in) :: xEdge - real (kind=RKIND), dimension(:), intent(in) :: yEdge - real (kind=RKIND), dimension(:), intent(in) :: zEdge - integer, dimension(:), intent(in) :: indexToEdgeID - real (kind=RKIND), dimension(:), intent(in) :: latVertex - real (kind=RKIND), dimension(:), intent(in) :: lonVertex - real (kind=RKIND), dimension(:), intent(in) :: xVertex - real (kind=RKIND), dimension(:), intent(in) :: yVertex - real (kind=RKIND), dimension(:), intent(in) :: zVertex - integer, dimension(:), intent(in) :: indexToVertexID - integer, dimension(:,:), intent(in) :: cellsOnEdge - integer, dimension(:), intent(in) :: nEdgesOnCell - integer, dimension(:), intent(in) :: nEdgesOnEdge - integer, dimension(:,:), intent(in) :: edgesOnCell - integer, dimension(:,:), intent(in) :: edgesOnEdge - real (kind=RKIND), dimension(:,:), intent(in) :: weightsOnEdge - real (kind=RKIND), dimension(:), intent(in) :: dvEdge - real (kind=RKIND), dimension(:), intent(in) :: dv1Edge - real (kind=RKIND), dimension(:), intent(in) :: dv2Edge - real (kind=RKIND), dimension(:), intent(in) :: dcEdge - real (kind=RKIND), dimension(:), intent(in) :: angleEdge - real (kind=RKIND), dimension(:), intent(in) :: areaCell - real (kind=RKIND), dimension(:), intent(in) :: areaTriangle - integer, dimension(:,:), intent(in) :: cellsOnCell - integer, dimension(:,:), intent(in) :: verticesOnCell - integer, dimension(:,:), intent(in) :: verticesOnEdge - integer, dimension(:,:), intent(in) :: edgesOnVertex - integer, dimension(:,:), intent(in) :: cellsOnVertex - real (kind=RKIND), dimension(:,:), intent(in) :: kiteAreasOnVertex - real (kind=RKIND), dimension(:), intent(in) :: fEdge - real (kind=RKIND), dimension(:), intent(in) :: fVertex - real (kind=RKIND), dimension(:), intent(in) :: h_s - real (kind=RKIND), dimension(:,:,:), intent(in) :: u - real (kind=RKIND), dimension(:,:,:), intent(in) :: v - real (kind=RKIND), dimension(:,:,:), intent(in) :: h - real (kind=RKIND), dimension(:,:,:), intent(in) :: vh - real (kind=RKIND), dimension(:,:,:), intent(in) :: circulation - real (kind=RKIND), dimension(:,:,:), intent(in) :: vorticity - real (kind=RKIND), dimension(:,:,:), intent(in) :: ke - real (kind=RKIND), dimension(:,:,:,:), intent(in) :: tracers - - integer :: nferr - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - start1(1) = 1 - - start2(1) = 1 - start2(2) = 1 - - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - - start4(1) = 1 - start4(2) = 1 - start4(3) = 1 - start4(4) = 1 - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDlatCell, start1, count1, latCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDlonCell, start1, count1, lonCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDmeshDensity, start1, count1, meshDensity) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDxCell, start1, count1, xCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDyCell, start1, count1, yCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDzCell, start1, count1, zCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDindexToCellID, start1, count1, indexToCellID) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDlatEdge, start1, count1, latEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDlonEdge, start1, count1, lonEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDxEdge, start1, count1, xEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDyEdge, start1, count1, yEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDzEdge, start1, count1, zEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDindexToEdgeID, start1, count1, indexToEdgeID) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDlatVertex, start1, count1, latVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDlonVertex, start1, count1, lonVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDxVertex, start1, count1, xVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDyVertex, start1, count1, yVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDzVertex, start1, count1, zVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDindexToVertexID, start1, count1, indexToVertexID) - - start2(2) = 1 - count2( 1) = 2 - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnEdge, start2, count2, cellsOnEdge) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDnEdgesOnCell, start1, count1, nEdgesOnCell) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDnEdgesOnEdge, start1, count1, nEdgesOnEdge) - - start2(2) = 1 - count2( 1) = wrLocalmaxEdges - count2( 2) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDedgesOnCell, start2, count2, edgesOnCell) - - start2(2) = 1 - count2( 1) = 2*wrLocalmaxEdges - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDedgesOnEdge, start2, count2, edgesOnEdge) - - start2(2) = 1 - count2( 1) = 2*wrLocalmaxEdges - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDweightsOnEdge, start2, count2, weightsOnEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDdvEdge, start1, count1, dvEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDdv1Edge, start1, count1, dv1Edge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDdv2Edge, start1, count1, dv2Edge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDdcEdge, start1, count1, dcEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDangleEdge, start1, count1, angleEdge) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDareaCell, start1, count1, areaCell) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDareaTriangle, start1, count1, areaTriangle) - - start2(2) = 1 - count2( 1) = wrLocalmaxEdges - count2( 2) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnCell, start2, count2, cellsOnCell) - - start2(2) = 1 - count2( 1) = wrLocalmaxEdges - count2( 2) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDverticesOnCell, start2, count2, verticesOnCell) - - start2(2) = 1 - count2( 1) = 2 - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDverticesOnEdge, start2, count2, verticesOnEdge) - - start2(2) = 1 - count2( 1) = 3 - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDedgesOnVertex, start2, count2, edgesOnVertex) - - start2(2) = 1 - count2( 1) = 3 - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnVertex, start2, count2, cellsOnVertex) - - start2(2) = 1 - count2( 1) = 3 - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDkiteAreasOnVertex, start2, count2, kiteAreasOnVertex) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDfEdge, start1, count1, fEdge) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDfVertex, start1, count1, fVertex) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDh_s, start1, count1, h_s) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDu, start3, count3, u) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDv, start3, count3, v) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDh, start3, count3, h) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDvh, start3, count3, vh) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnVertices - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDcirculation, start3, count3, circulation) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnVertices - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDvorticity, start3, count3, vorticity) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDke, start3, count3, ke) - - start4(4) = time - count4( 1) = wrLocalnTracers - count4( 2) = wrLocalnVertLevels - count4( 3) = wrLocalnCells - count4( 4) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDtracers, start4, count4, tracers) - - - end subroutine write_netcdf_fields - - - subroutine write_netcdf_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr - - nferr = nf_close(wr_ncid) - - end subroutine write_netcdf_finalize - -end module write_netcdf - From dbb4d7ead49a1b741d2b3fed67d46b49bee6b842 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:39:22 -0600 Subject: [PATCH 009/180] Remove grid_gen/icosdiv --- grid_gen/icosdiv/Makefile | 18 --- grid_gen/icosdiv/Point.cc | 144 ------------------- grid_gen/icosdiv/Point.h | 33 ----- grid_gen/icosdiv/Triangle.cc | 54 -------- grid_gen/icosdiv/Triangle.h | 19 --- grid_gen/icosdiv/icosdiv.cc | 259 ----------------------------------- grid_gen/icosdiv/locs.dat | 12 -- grid_gen/icosdiv/tri.dat | 20 --- 8 files changed, 559 deletions(-) delete mode 100644 grid_gen/icosdiv/Makefile delete mode 100644 grid_gen/icosdiv/Point.cc delete mode 100644 grid_gen/icosdiv/Point.h delete mode 100644 grid_gen/icosdiv/Triangle.cc delete mode 100644 grid_gen/icosdiv/Triangle.h delete mode 100644 grid_gen/icosdiv/icosdiv.cc delete mode 100644 grid_gen/icosdiv/locs.dat delete mode 100644 grid_gen/icosdiv/tri.dat diff --git a/grid_gen/icosdiv/Makefile b/grid_gen/icosdiv/Makefile deleted file mode 100644 index 6beafa2ff..000000000 --- a/grid_gen/icosdiv/Makefile +++ /dev/null @@ -1,18 +0,0 @@ -CC = g++ - -all: icosdiv - -icosdiv: icosdiv.o Point.o Triangle.o - $(CC) -o icosdiv icosdiv.o Point.o Triangle.o - -icosdiv.o: Point.o Triangle.o icosdiv.cc - $(CC) -c icosdiv.cc - -Point.o: Point.cc Point.h - $(CC) -c Point.cc - -Triangle.o: Triangle.cc Triangle.h - $(CC) -c Triangle.cc - -clean: - rm -f icosdiv.o Point.o Triangle.o icosdiv diff --git a/grid_gen/icosdiv/Point.cc b/grid_gen/icosdiv/Point.cc deleted file mode 100644 index 8a4f63575..000000000 --- a/grid_gen/icosdiv/Point.cc +++ /dev/null @@ -1,144 +0,0 @@ -#include "Point.h" - -Point::Point() -{ - x = 0.0; - y = 0.0; - z = 0.0; - num = 0; -} - - -Point::Point(double x, double y, double z) -{ - this->x = x; - this->y = y; - this->z = z; -} - - -Point::~Point() -{ - -} - - -void Point::setX(double x) -{ - this->x = x; -} - - -void Point::setY(double y) -{ - this->y = y; -} - - -void Point::setZ(double z) -{ - this->z = z; -} - - -void Point::setXYZ(double x, double y, double z) -{ - this->x = x; - this->y = y; - this->z = z; -} - - -void Point::setNum(int n) -{ - num = n; -} - - -double Point::getX() const -{ - return x; -} - - -double Point::getY() const -{ - return y; -} - - -double Point::getZ() const -{ - return z; -} - - -double Point::distance(Point& p) -{ - // Assume we're on the unit sphere - return acos(p.getX()*x + p.getY()*y + p.getZ()*z); -} - - -int Point::getNum() const -{ - return num; -} - - -void Point::normalize() -{ - double mag; - - mag = sqrt(x*x + y*y + z*z); - x = x / mag; - y = y / mag; - z = z / mag; -} - - -Point Point::operator+(Point p) -{ - Point retval; - - retval.x = x + p.x; - retval.y = y + p.y; - retval.z = z + p.z; - retval.num = num; - - return retval; -} - - -Point Point::operator-(Point p) -{ - Point retval; - - retval.x = x - p.x; - retval.y = y - p.y; - retval.z = z - p.z; - retval.num = num; - - return retval; -} - - -Point Point::operator*(double s) -{ - Point retval; - - retval.x = s * x; - retval.y = s * y; - retval.z = s * z; - retval.num = num; - - return retval; -} - - -ostream& operator<<(ostream& output, const Point& p) -{ - output << p.num << " : " << p.x << " " << p.y << " " << p.z; - // output << p.x << " " << p.y << " " << p.z; - return output; -} diff --git a/grid_gen/icosdiv/Point.h b/grid_gen/icosdiv/Point.h deleted file mode 100644 index 25a8ed5d8..000000000 --- a/grid_gen/icosdiv/Point.h +++ /dev/null @@ -1,33 +0,0 @@ -#ifndef _PointH -#define _PointH -#include -#include -using namespace std; - -class Point -{ - private: - double x, y, z; - int num; - public: - Point(); - Point(double x, double y, double z); - ~Point(); - void setX(double x); - void setY(double y); - void setZ(double z); - void setXYZ(double x, double y, double z); - void setNum(int n); - double getX() const; - double getY() const; - double getZ() const; - double distance(Point& p); - int getNum() const; - void normalize(); - Point operator+(Point p); - Point operator-(Point p); - Point operator*(double s); - friend ostream& operator<<(ostream& output, const Point& p); -}; - -#endif diff --git a/grid_gen/icosdiv/Triangle.cc b/grid_gen/icosdiv/Triangle.cc deleted file mode 100644 index 9e0503f57..000000000 --- a/grid_gen/icosdiv/Triangle.cc +++ /dev/null @@ -1,54 +0,0 @@ -#include "Triangle.h" - -/* - Point * points[3]; - public: - Triangle(); - Triangle(Point& a, Point& b, Point& c); - ~Triangle(); - void setPoint(Point& p, int n); - Point * getPoint(int n); - friend ostream& operator<<(ostream& output, const Triangle& t) -*/ - -Triangle::Triangle() -{ - points[0] = NULL; - points[1] = NULL; - points[2] = NULL; -} - - -Triangle::Triangle(Point& a, Point& b, Point& c) -{ - points[0] = &a; - points[1] = &b; - points[2] = &c; -} - - -Triangle::~Triangle() -{ - // Nothing to do... -} - - -void Triangle::setPoint(Point* p, int n) -{ - // assert(n >= 0 && n <= 2); - points[n] = p; -} - - -Point * Triangle::getPoint(int n) -{ - // assert(n >= 0 && n <= 2); - return points[n]; -} - - -ostream& operator<<(ostream& output, const Triangle& t) -{ - // output << "(" << p.x << ", " << p.y << ", " << p.z << ")"; - return output; -} diff --git a/grid_gen/icosdiv/Triangle.h b/grid_gen/icosdiv/Triangle.h deleted file mode 100644 index 7baf4b6e7..000000000 --- a/grid_gen/icosdiv/Triangle.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef _TriangleH -#define _TriangleH -#include -#include "Point.h" -using namespace std; - -class Triangle -{ - private: - Point * points[3]; - public: - Triangle(); - Triangle(Point& a, Point& b, Point& c); - ~Triangle(); - void setPoint(Point* p, int n); - Point * getPoint(int n); - friend ostream& operator<<(ostream& output, const Triangle& t); -}; -#endif diff --git a/grid_gen/icosdiv/icosdiv.cc b/grid_gen/icosdiv/icosdiv.cc deleted file mode 100644 index 3ae8847a6..000000000 --- a/grid_gen/icosdiv/icosdiv.cc +++ /dev/null @@ -1,259 +0,0 @@ -#include -#include -#include -#include -#include -#include "Point.h" -#include "Triangle.h" - -using namespace std; - -void add_point(set& points, Point* newpt, int& np) -{ - set::iterator ip; - - // If the point doesn't exist, we assign it the next highest number - // and add it to the set - ip = points.find(*newpt); - - if (ip == points.end()) { - newpt->setNum(np++); - points.insert(*newpt); - } - - // Otherwise, we want newpt to equal the existing point - else { - *newpt = *ip; - } -} - -inline bool operator<(Point const& lhs, Point const& rhs) -{ - if (lhs.getX() < rhs.getX()) - return true; - else if (lhs.getX() > rhs.getX()) - return false; - else - if (lhs.getY() < rhs.getY()) - return true; - else if (lhs.getY() > rhs.getY()) - return false; - else - if (lhs.getZ() < rhs.getZ()) - return true; - else - return false; -} - - -inline bool operator>(Point const& lhs, Point const& rhs) -{ - if (lhs.getX() > rhs.getX()) - return true; - else if (lhs.getX() > rhs.getX()) - return false; - else - if (lhs.getY() > rhs.getY()) - return true; - else if (lhs.getY() > rhs.getY()) - return false; - else - if (lhs.getZ() > rhs.getZ()) - return true; - else - return false; -} - - -inline bool operator==(Point const& lhs, Point const& rhs) -{ - if (lhs.getX() == rhs.getX() && - lhs.getY() == rhs.getY() && - lhs.getZ() == rhs.getZ()) - return true; - else - return false; -} - - -Point * great_circle_points(Point& p1, Point& p2, int n) -{ - double x1, x2, y1, y2, z1, z2; - double x, y, z; - double dtheta, dinc, dt, dx; - Point * pl; - int i; - - x1 = p1.getX(); y1 = p1.getY(); z1 = p1.getZ(); - x2 = p2.getX(); y2 = p2.getY(); z2 = p2.getZ(); - - // For unit sphere, distance is the same as arc angle - dtheta = p1.distance(p2); - dinc = dtheta / (double)(n-1); - - pl = new Point[n]; - - pl[0].setXYZ(x1, y1, z1); - pl[n-1].setXYZ(x2, y2, z2); - - // Fill in interior points - for(i=1; i points; - vector triangles; - set::iterator ip; - vector::iterator it; - - int div_factor = 76; - - // Read in 12 icosahedral vertices - fin.open("locs.dat",ifstream::in); - for(i=0; i<12; i++) { - fin >> x >> y >> z; - icos[i].setXYZ(x, y, z); - } - fin.close(); - - // Read in triangulation of icosahedral points - fin.open("tri.dat",ifstream::in); - for(i=0; i<20; i++) { - fin >> tri[i][0] >> tri[i][1] >> tri[i][2]; - } - fin.close(); - - np = 1; - - // In the code below, we actually know which points will be duplicated between - // the 20 large (icosahedral) triangles -- exactly those points along the perimeter - // of the triangle; so, we could take advantage of this information in the - // add_point() subroutine. - - - // Subdivide each triangle - for(k=0; k<20; k++) { - line = divide_triangle(icos[tri[k][0]-1], icos[tri[k][1]-1], icos[tri[k][2]-1], div_factor); - - // Get triangulation - for(i=1; isetPoint(p, 0); - p = new Point; *p = line[i-1][j]; add_point(points, p, np); t->setPoint(p, 1); - p = new Point; *p = line[i][j+1]; add_point(points, p, np); t->setPoint(p, 2); - triangles.push_back(*t); - -// cout << "Creating triangle from " << line[i-1][j].getNum() << " " << line[i-1][j+1].getNum() << " " << line[i][j+1].getNum() << endl; - t = new Triangle; - p = new Point; *p = line[i-1][j]; add_point(points, p, np); t->setPoint(p, 0); - p = new Point; *p = line[i-1][j+1]; add_point(points, p, np); t->setPoint(p, 1); - p = new Point; *p = line[i][j+1]; add_point(points, p, np); t->setPoint(p, 2); - triangles.push_back(*t); - } -// cout << "Creating triangle from " << line[i][i-1].getNum() << " " << line[i-1][i-1].getNum() << " " << line[i][i].getNum() <setPoint(p, 0); - p = new Point; *p = line[i-1][i-1]; add_point(points, p, np); t->setPoint(p, 1); - p = new Point; *p = line[i][i]; add_point(points, p, np); t->setPoint(p, 2); - triangles.push_back(*t); - } - p = NULL; t = NULL; - - for(j=0; jgetPoint(0)) << endl; - cout << *(it->getPoint(1)) << endl; - cout << *(it->getPoint(2)) << endl; - cout << *(it->getPoint(0)) << endl; - cout << endl; - cout << endl; - } -*/ - - triangles.clear(); - points.clear(); - - - return 0; -} diff --git a/grid_gen/icosdiv/locs.dat b/grid_gen/icosdiv/locs.dat deleted file mode 100644 index 1d284e890..000000000 --- a/grid_gen/icosdiv/locs.dat +++ /dev/null @@ -1,12 +0,0 @@ - 0.0000000000 0.5257311121 0.8506508084 - 0.0000000000 -0.5257311121 0.8506508084 - 0.0000000000 0.5257311121 -0.8506508084 - 0.0000000000 -0.5257311121 -0.8506508084 - 0.5257311121 0.8506508084 0.0000000000 - -0.5257311121 0.8506508084 0.0000000000 - 0.5257311121 -0.8506508084 0.0000000000 - -0.5257311121 -0.8506508084 0.0000000000 - 0.8506508084 0.0000000000 0.5257311121 - -0.8506508084 0.0000000000 0.5257311121 - 0.8506508084 0.0000000000 -0.5257311121 - -0.8506508084 0.0000000000 -0.5257311121 diff --git a/grid_gen/icosdiv/tri.dat b/grid_gen/icosdiv/tri.dat deleted file mode 100644 index b7c3621a3..000000000 --- a/grid_gen/icosdiv/tri.dat +++ /dev/null @@ -1,20 +0,0 @@ - 1 5 6 - 1 6 10 - 1 2 10 - 1 2 9 - 1 5 9 - 2 8 10 - 2 7 8 - 2 7 9 - 3 4 12 - 3 6 12 - 3 5 6 - 3 5 11 - 3 4 11 - 4 7 8 - 4 8 12 - 4 7 11 - 5 9 11 - 6 10 12 - 7 9 11 - 8 10 12 From 2eb843452f3255af433528a4149c109b1b4af12e Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:06:14 -0600 Subject: [PATCH 010/180] Remove grid_gen/periodic_general --- grid_gen/periodic_general/DensityFunction.cxx | 255 --- grid_gen/periodic_general/DensityFunction.h | 30 - grid_gen/periodic_general/Makefile | 28 - grid_gen/periodic_general/Point.cxx | 157 -- grid_gen/periodic_general/Point.h | 33 - grid_gen/periodic_general/PointSet.cxx | 576 ------ grid_gen/periodic_general/PointSet.h | 78 - grid_gen/periodic_general/Triangle.cxx | 351 ---- grid_gen/periodic_general/Triangle.h | 30 - grid_gen/periodic_general/cells_hex.ncl | 186 -- grid_gen/periodic_general/centroids.txt | 1600 ----------------- grid_gen/periodic_general/fortune/Makefile | 13 - grid_gen/periodic_general/fortune/edgelist.c | 188 -- grid_gen/periodic_general/fortune/geometry.c | 220 --- grid_gen/periodic_general/fortune/heap.c | 118 -- grid_gen/periodic_general/fortune/memory.c | 85 - grid_gen/periodic_general/fortune/output.c | 252 --- grid_gen/periodic_general/fortune/vdefs.h | 141 -- grid_gen/periodic_general/fortune/voronoi.c | 121 -- .../periodic_general/fortune/voronoi_main.c | 135 -- grid_gen/periodic_general/main.cxx | 586 ------ grid_gen/periodic_general/mkgrid.cxx | 635 ------- grid_gen/periodic_general/plot_grid.py | 34 - grid_gen/periodic_general/readme.txt | 125 -- 24 files changed, 5977 deletions(-) delete mode 100644 grid_gen/periodic_general/DensityFunction.cxx delete mode 100644 grid_gen/periodic_general/DensityFunction.h delete mode 100644 grid_gen/periodic_general/Makefile delete mode 100644 grid_gen/periodic_general/Point.cxx delete mode 100644 grid_gen/periodic_general/Point.h delete mode 100644 grid_gen/periodic_general/PointSet.cxx delete mode 100644 grid_gen/periodic_general/PointSet.h delete mode 100644 grid_gen/periodic_general/Triangle.cxx delete mode 100644 grid_gen/periodic_general/Triangle.h delete mode 100644 grid_gen/periodic_general/cells_hex.ncl delete mode 100644 grid_gen/periodic_general/centroids.txt delete mode 100644 grid_gen/periodic_general/fortune/Makefile delete mode 100644 grid_gen/periodic_general/fortune/edgelist.c delete mode 100644 grid_gen/periodic_general/fortune/geometry.c delete mode 100644 grid_gen/periodic_general/fortune/heap.c delete mode 100644 grid_gen/periodic_general/fortune/memory.c delete mode 100644 grid_gen/periodic_general/fortune/output.c delete mode 100644 grid_gen/periodic_general/fortune/vdefs.h delete mode 100644 grid_gen/periodic_general/fortune/voronoi.c delete mode 100644 grid_gen/periodic_general/fortune/voronoi_main.c delete mode 100644 grid_gen/periodic_general/main.cxx delete mode 100644 grid_gen/periodic_general/mkgrid.cxx delete mode 100644 grid_gen/periodic_general/plot_grid.py delete mode 100644 grid_gen/periodic_general/readme.txt diff --git a/grid_gen/periodic_general/DensityFunction.cxx b/grid_gen/periodic_general/DensityFunction.cxx deleted file mode 100644 index 60ff12565..000000000 --- a/grid_gen/periodic_general/DensityFunction.cxx +++ /dev/null @@ -1,255 +0,0 @@ -#include -#include -#include "DensityFunction.h" -#include "netcdf.h" -#include - -DensityFunction::DensityFunction(double X_PERIOD, double Y_PERIOD, int USE_DATA_DENSITY) -{ - - minX = minY = 0.0; - maxX = X_PERIOD; - maxY = Y_PERIOD; - use_data_density = USE_DATA_DENSITY; - - if (use_data_density == 1){ - read_density_netcdf(&xPosDG, &yPosDG, &densityDG, dxDG, dyDG); - - dxDG = xPosDG[1] - xPosDG[0]; - cout << " dx=" << dxDG <0) { - cout << "Error reading density.nc. Aborting." << endl; - exit(1); - } - - // Get needed dimensions - ncerr = nc_inq_dimid(ncid, "x", &x_dimID); - ncerr = nc_inq_dimlen(ncid, x_dimID, &temp); - x_dim = (int)temp; - ncerr = nc_inq_dimid(ncid, "y", &y_dimID); - ncerr = nc_inq_dimlen(ncid, y_dimID, &temp); - y_dim = (int)temp; - cout << " Got dimensions from file." <= nxDG - 1) { - xpos = nxDG - 2; - } - ypos = (int) floor( (y - yPosDG[0]) / dyDG); - if (ypos < 0) { - ypos = 0; - } else if (ypos >= nyDG - 1) { - ypos = nyDG - 2; - } - - return densityDG[ypos * nxDG + xpos]; -} - - - -double DensityFunction::BilinearInterp(double x, double y) -{ -// Gives the value of the density function at x,y using Bilinear Interpolation - - int xpos, ypos; // the cells that the point falls in - double value; - - xpos = (int) floor( (x - xPosDG[0]) / dxDG); // floor should not be needed since c++ will truncate... - if (xpos < 0) { - xpos = 0; - } else if (xpos >= nxDG - 1) { - xpos = nxDG - 2; - } - ypos = (int) floor( (y - yPosDG[0]) / dyDG); - if (ypos < 0) { - ypos = 0; - } else if (ypos >= nyDG - 1) { - ypos = nyDG - 2; - } - - value = ( - densityDG[ypos * nxDG + xpos] * (xPosDG[xpos+1] - x) * (yPosDG[ypos+1] - y) + - densityDG[(ypos+1) * nxDG + xpos] * (xPosDG[xpos+1] - x) * (y - yPosDG[ypos]) + - densityDG[ypos * nxDG + xpos+1] * (x - xPosDG[xpos]) * (yPosDG[ypos+1] - y) + - densityDG[(ypos+1) * nxDG + xpos+1] * (x - xPosDG[xpos]) * (y - yPosDG[ypos]) - ) / (dxDG * dyDG); - - return value; -} - diff --git a/grid_gen/periodic_general/DensityFunction.h b/grid_gen/periodic_general/DensityFunction.h deleted file mode 100644 index 96972a563..000000000 --- a/grid_gen/periodic_general/DensityFunction.h +++ /dev/null @@ -1,30 +0,0 @@ -#ifndef _DensityFunctionH -#define _DensityFunctionH -#include "Point.h" - -using namespace std; - -class DensityFunction -{ - private: - double minX, maxX, minY, maxY; - double f(double x, double y); - double AnalyticDensityFunction(double x, double y); - double DataDensityFunction(double x, double y); - double *xPosDG, *yPosDG, *densityDG; // The x (1d), y (1d), and density (2d) values of the data density function (regular grid) - double dxDG, dyDG; // grid spacing on the regular data density grid - int nxDG, nyDG; // number of cells on regular data density grid - void read_density_netcdf(double **xPosDG, double **yPosDG, double **densityDG, int dxDG, int dyDG); - double UniformValue(double x, double y); - double BilinearInterp(double x, double y); - int use_data_density; - public: - DensityFunction(double X_PERIOD, double Y_PERIOD, int USE_DATA_DENSITY); - ~DensityFunction(); - double evaluate(Point& p); - Point * randomPoint(); - void randomPoint(Point& p); -}; - - -#endif diff --git a/grid_gen/periodic_general/Makefile b/grid_gen/periodic_general/Makefile deleted file mode 100644 index 463ba6d0e..000000000 --- a/grid_gen/periodic_general/Makefile +++ /dev/null @@ -1,28 +0,0 @@ -.SUFFIXES: .cxx .o - -OMP = -CXX = g++ -CXXFLAGS = -O3 $(OMP) -I$(NETCDF)/include -I/usr/include/ - -OBJS = Point.o Triangle.o DensityFunction.o PointSet.o - -all: libfortune.a - ( $(MAKE) periodic_general ) - ( $(MAKE) mkgrid ) - -libfortune.a: - ( cd fortune; $(MAKE) all "CC=$(CXX)" "CFLAGS=$(CXXFLAGS)") - ( ln -s fortune/libfortune.a . ) - -periodic_general: main.o $(OBJS) - $(CXX) -O3 $(OMP) -o periodic_general main.o $(OBJS) -L$(NETCDF)/lib -L. -lnetcdf -lfortune - -mkgrid: mkgrid.o $(OBJS) - $(CXX) -O3 $(OMP) -o mkgrid mkgrid.o $(OBJS) -L$(NETCDF)/lib -L. -lnetcdf -lfortune - -clean: - ( cd fortune; $(MAKE) clean ) - rm -f *.o periodic_general libfortune.a - -.cxx.o: - $(CXX) $(CXXFLAGS) -c $< -I./fortune diff --git a/grid_gen/periodic_general/Point.cxx b/grid_gen/periodic_general/Point.cxx deleted file mode 100644 index 9e462962b..000000000 --- a/grid_gen/periodic_general/Point.cxx +++ /dev/null @@ -1,157 +0,0 @@ -#include "Point.h" - -Point::Point() -{ - x = 0.0; - y = 0.0; - boundary_point = 0; - num = 0; -} - - -Point::Point(double x, double y, int boundary_point) -{ - this->x = x; - this->y = y; - this->boundary_point = boundary_point; -} - - -Point::~Point() -{ - -} - - -void Point::setX(double x) -{ - this->x = x; -} - - -void Point::setY(double y) -{ - this->y = y; -} - - -void Point::setXY(double x, double y) -{ - this->x = x; - this->y = y; -} - - -void Point::setBoundaryPoint(int boundary_point) -{ - this->boundary_point = boundary_point; -} - - -void Point::setNum(int n) -{ - num = n; -} - - -double Point::getX() const -{ - return x; -} - - -double Point::getY() const -{ - return y; -} - - -double Point::distance(Point& p) -{ - double xd, yd; - - xd = p.getX() - x; - yd = p.getY() - y; - return sqrt(xd*xd + yd*yd); -} - - -int Point::isBoundaryPoint() const -{ - return boundary_point; -} - - -int Point::getNum() const -{ - return num; -} - - -Point Point::operator+(Point p) -{ - Point retval; - - retval.x = x + p.x; - retval.y = y + p.y; - retval.boundary_point = boundary_point; - retval.num = num; - - return retval; -} - - -Point Point::operator-(Point p) -{ - Point retval; - - retval.x = x - p.x; - retval.y = y - p.y; - retval.boundary_point = boundary_point; - retval.num = num; - - return retval; -} - - -Point Point::operator*(double s) -{ - Point retval; - - retval.x = s * x; - retval.y = s * y; - retval.boundary_point = boundary_point; - retval.num = num; - - return retval; -} - - -ostream& operator<<(ostream& output, const Point& p) -{ - output << p.x << " " << p.y; - return output; -} - - -bool operator<(Point const& lhs, Point const& rhs) -{ - double a[2], b[2]; - - a[0] = lhs.x; - a[1] = lhs.y; - - b[0] = rhs.x; - b[1] = rhs.y; - - if (a[0] < b[0]) { - return true; - } - else if (a[0] == b[0]) { - if (a[1] < b[1]) { - return true; - } - } - - return false; -} diff --git a/grid_gen/periodic_general/Point.h b/grid_gen/periodic_general/Point.h deleted file mode 100644 index 948cfa9e5..000000000 --- a/grid_gen/periodic_general/Point.h +++ /dev/null @@ -1,33 +0,0 @@ -#ifndef _PointH -#define _PointH -#include -#include -using namespace std; - -class Point -{ - private: - double x, y; - int boundary_point; - int num; - public: - Point(); - Point(double x, double y, int boundary_point); - ~Point(); - void setX(double x); - void setY(double y); - void setXY(double x, double y); - void setBoundaryPoint(int boundary_point); - void setNum(int n); - double getX() const; - double getY() const; - double distance(Point& p); - int isBoundaryPoint() const; - int getNum() const; - Point operator+(Point p); - Point operator-(Point p); - Point operator*(double s); - friend ostream& operator<<(ostream& output, const Point& p); - friend bool operator<(Point const& lhs, Point const& rhs); -}; -#endif diff --git a/grid_gen/periodic_general/PointSet.cxx b/grid_gen/periodic_general/PointSet.cxx deleted file mode 100644 index c9aa85a58..000000000 --- a/grid_gen/periodic_general/PointSet.cxx +++ /dev/null @@ -1,576 +0,0 @@ -#include -#include -#include -#include -#include -#include "PointSet.h" -#include "DensityFunction.h" - -#define MIN(A,B) (B)<(A)?(B):(A) -#define MAX(A,B) (B)>(A)?(B):(A) - -void voronoi_main(PointSet *); - - -PointSet::PointSet() -{ - nPoints = 0; -} - - -PointSet::~PointSet() -{ - -} - - -void PointSet::makeMCPoints(int n, double X_PERIOD, double Y_PERIOD, int USE_DATA_DENSITY) -{ - //Create Monte Carlo random point set - int i; - srand(2); // set the seed for reproducibility - double x, y; - Point * p; - DensityFunction density(X_PERIOD, Y_PERIOD, USE_DATA_DENSITY); - - for(i = 0; i < n; i++){ - p = density.randomPoint(); - - p->setNum(nPoints); - nPoints++; - points.push_back(p); - - } -} - - -int PointSet::initFromTextFile(double X_PERIOD, double Y_PERIOD, const char * filename) -{ - ifstream fin(filename); - double xloc, yloc; - Point * p; - ifstream new_edges("new_edges"); - - assert(fin.is_open()); - - fin >> xloc >> yloc; - do { - p = new Point(xloc, yloc, 0); - p->setNum(nPoints); - nPoints++; - points.push_back(p); - fin >> xloc >> yloc; - } while (!fin.eof()); -} - - -void PointSet::print() -{ - vector::iterator it; - - cout << "We have " << nPoints << " points" << endl; - - for (it = points.begin(); it != points.end(); it++) { - cout << **it << endl; - } -} - - -void PointSet::printToTextFile(const char * filename) -{ - ofstream fout(filename); - vector::iterator it; - - assert(fout.is_open()); - - for (it = points.begin(); it != points.end(); it++) { - fout << (*it)->getX() << " " << (*it)->getY() << " " << (*it)->isBoundaryPoint() << endl; - } -} - - -void PointSet::addPoint(double x, double y, int boundary_point) -{ - Point * p = new Point(x, y, boundary_point); - p->setNum(nPoints); - nPoints++; - - points.push_back(p); -} - - -void PointSet::addPoint(Point& p) -{ - Point * pp = new Point(p); - nPoints++; - - points.push_back(pp); -} - - -int PointSet::size() -{ - return nPoints; -} - - -vector* PointSet::getTriangulation() -{ - triangulation = new vector; - - voronoi_main(this); - - return triangulation; -} - - -vector * PointSet::getVoronoiDiagram() -{ - vector * t; - vector::iterator it; - vector * voronoiCorners = new vector[nPoints]; - Point p; - int i, n; - int nobtuse; - - double PI = 2.0 * acos(0.0); - - // 1) Get a triangulation - t = PointSet::getTriangulation(); - - // 2) For each triangle, compute the associated Voronoi point - // Add this point to the list of Voronoi corner for each of the triangle's vertices - nobtuse = 0; - for (it = triangulation->begin(); it != triangulation->end(); it++) { - if (fabs(angle(it->getVertex(0), it->getVertex(1), it->getVertex(2))) > PI/2.0) nobtuse++; - if (fabs(angle(it->getVertex(1), it->getVertex(2), it->getVertex(0))) > PI/2.0) nobtuse++; - if (fabs(angle(it->getVertex(2), it->getVertex(0), it->getVertex(1))) > PI/2.0) nobtuse++; - p = it->circumcenter(); - for (i=0; i<3; i++) { - n = it->getVertex(i).getNum(); - //assert(n >= 0 && n < nPoints); - voronoiCorners[n].push_back(p); - } - } - -cout << nobtuse << " obtuse angles\n"; - - delete t; - - // 3) For each point, order its list of Voronoi corners in ccw order - for (i=0; i * PointSet::getDelaunayAdjacency() -{ - vector * t; - vector::iterator it; - vector * adjacencyList = new vector[nPoints]; - Point p0, p1, p2; - int i, j, found, n0, n1, n2; - - t = PointSet::getTriangulation(); - - for (it = triangulation->begin(); it != triangulation->end(); it++) { - p0 = it->getVertex(0); - p1 = it->getVertex(1); - p2 = it->getVertex(2); - - n0 = p0.getNum(); - n1 = p1.getNum(); - n2 = p2.getNum(); - - found = 0; - for(j=0; j::iterator it; - - x = p.getX(); - y = p.getY(); - - minD = 1.e20; - for (it = points.begin(), idx=0; it != points.end(); it++, idx++) { - d = pow((*it)->getX() - x, 2.0) + pow((*it)->getY() - y, 2.0); - if (d < minD) {minD = d; minIdx = idx;} - } - - return minIdx; -} - - -Point* PointSet::operator[](int i) -{ - assert(i >= 0 && i < nPoints); - return points[i]; -} - - -double angle(Point o, Point p1, Point p2) -{ - double P1x, P1y, mP1; - double P2x, P2y, mP2; - double cos_angle; - - P1x = p1.getX() - o.getX(); - P1y = p1.getY() - o.getY(); - - mP1 = sqrt(P1x*P1x + P1y*P1y); - - P2x = p2.getX() - o.getX(); - P2y = p2.getY() - o.getY(); - - mP2 = sqrt(P2x*P2x + P2y*P2y); - - cos_angle = (P1x*P2x + P1y*P2y) / (mP1 * mP2); - - if (((P1x * P2y) - (P1y * P2x)) >= 0.0) - return acos(MAX(MIN(cos_angle,1.0),-1.0)); - else - return -acos(MAX(MIN(cos_angle,1.0),-1.0)); - - return 1.0; -} - - -void orderCCW(vector& vc, Point p) -{ - int i, j; - int vsize; - double * angles; - double ftemp; - Point ptemp; - - double PI = 2.0 * acos(0.0); - - vsize = vc.size(); - angles = new double[vsize]; - - angles[0] = 0.0; - for (i=1; i& vc, Point p, double x_period, double y_period) -{ - int i, j; - int vsize; - double * angles; - double ftemp; - Point ptemp; - - double PI = 2.0 * acos(0.0); - - vsize = vc.size(); - angles = new double[vsize]; - - for (i=0; i (x_period / 2.0) ) { - vc[i].setX( vc[i].getX() - x_period ); - } - else if ( (vc[i].getX() - p.getX()) < (-x_period / 2.0) ) { - vc[i].setX( vc[i].getX() + x_period ); - } - - if ( (vc[i].getY() - p.getY()) > (y_period / 2.0) ) { - vc[i].setY( vc[i].getY() - y_period ); - } - else if ( (vc[i].getY() - p.getY()) < (-y_period / 2.0) ) { - vc[i].setY( vc[i].getY() + y_period ); - } - } - - angles[0] = 0.0; - for (i=1; i& vc1, vector& vc2, Point p, double x_period, double y_period) -{ - int i, j; - double * angles; - double ftemp; - Point ptemp; - - double PI = 2.0 * acos(0.0); - - if (vc1.size() != vc2.size()) { - cerr << "Error: In orderCCW_normalize2, input vectors have different size." << endl; - return; - } - - angles = new double[vc1.size()]; - - - /* Normalize points in vc1 */ - for (i=0; i (x_period / 2.0) ) { - vc1[i].setX( vc1[i].getX() - x_period ); - } - else if ( (vc1[i].getX() - p.getX()) < (-x_period / 2.0) ) { - vc1[i].setX( vc1[i].getX() + x_period ); - } - - if ( (vc1[i].getY() - p.getY()) > (y_period / 2.0) ) { - vc1[i].setY( vc1[i].getY() - y_period ); - } - else if ( (vc1[i].getY() - p.getY()) < (-y_period / 2.0) ) { - vc1[i].setY( vc1[i].getY() + y_period ); - } - } - - - /* Normalize points in vc2 */ - for (i=0; i (x_period / 2.0) ) { - vc1[i].setX( vc1[i].getX() - x_period ); - } - else if ( (vc1[i].getX() - p.getX()) < (-x_period / 2.0) ) { - vc1[i].setX( vc1[i].getX() + x_period ); - } - - if ( (vc1[i].getY() - p.getY()) > (y_period / 2.0) ) { - vc1[i].setY( vc1[i].getY() - y_period ); - } - else if ( (vc1[i].getY() - p.getY()) < (-y_period / 2.0) ) { - vc1[i].setY( vc1[i].getY() + y_period ); - } - } - - - /* Order points in vc1 */ - angles[0] = 0.0; - for (i=1; i& vc, Point p) -{ - int i, j; - int vsize; - double * angles; - double ftemp; - Point ptemp; - - double PI = 2.0 * acos(0.0); - - vsize = vc.size(); - angles = new double[vsize]; - - angles[0] = 0.0; - for (i=1; i points; - vector * triangulation; - - public: - PointSet(); - ~PointSet(); - int initFromTextFile(double X_PERIOD, double Y_PERIOD, const char *); - void makeMCPoints(int n, double X_PERIOD, double Y_PERIOD, int USE_DATA_DENSITY); - void print(); - void printToTextFile(const char *); - void addPoint(double x, double y, int boundary_point); - void addPoint(Point& p); - int size(); - vector* getTriangulation(); - vector * getVoronoiDiagram(); - vector * getDelaunayAdjacency(); - int nearestPoint(Point& p); - Point* operator[](int i); - friend void readsites(PointSet * p); - friend void out_triple(PointSet * p, Site * s1, Site * s2, Site * s3); -}; - -double angle(Point o, Point p1, Point p2); -void orderCCW(vector& vc, Point p); -void orderCCW_normalize(vector& vc, Point p, double x_period, double y_period); -void orderCCW_normalize2(vector& vc1, vector& vc2, Point p, double x_period, double y_period); -double poly_area(vector& vc); -void orderCCW_print(vector& vc, Point p); -void periodic_normalize(vector& vc, double x_period, double y_period); -#endif diff --git a/grid_gen/periodic_general/Triangle.cxx b/grid_gen/periodic_general/Triangle.cxx deleted file mode 100644 index ddf41da94..000000000 --- a/grid_gen/periodic_general/Triangle.cxx +++ /dev/null @@ -1,351 +0,0 @@ -#include -#include -#include "Triangle.h" - -Triangle::Triangle() -{ - points[0] = Point(0.0, 0.0, 0); - points[1] = Point(0.0, 0.0, 0); - points[2] = Point(0.0, 0.0, 0); -} - -Triangle::Triangle(Point a, Point b, Point c) -{ - points[0] = a; - points[1] = b; - points[2] = c; -} - - -Triangle::~Triangle() -{ - -} - - -void Triangle::setVertex(int i, Point p) -{ - assert(i >= 0 && i <= 2); - points[i] = p; -} - - -Point Triangle::getVertex(int i) const -{ - assert(i >= 0 && i <= 2); - return points[i]; -} - - -double Triangle::area() -{ - double a, b, c, s, R; - - // Compute side lengths - a = sqrt(pow(points[0].getX() - points[1].getX(),2.0) + pow(points[0].getY() - points[1].getY(),2.0)); - b = sqrt(pow(points[1].getX() - points[2].getX(),2.0) + pow(points[1].getY() - points[2].getY(),2.0)); - c = sqrt(pow(points[0].getX() - points[2].getX(),2.0) + pow(points[0].getY() - points[2].getY(),2.0)); - - // Compute semiperimiter - s = (a + b + c) / 2.0; - - // Compute area - return sqrt(s*(a + b - s)*(a + c - s)*(b + c - s)); -} - - -Point Triangle::centroid() -{ - Point p; - - p.setX((points[0].getX() + points[1].getX() + points[2].getX()) * 0.33333333); - p.setY((points[0].getY() + points[1].getY() + points[2].getY()) * 0.33333333); - - return p; -} - - -void Triangle::divide_segment(Point p1, Point p2, Point list[], int n) -{ - int i; - Point vec; - - list[0] = p1; - list[n-1] = p2; - - vec.setXY(p2.getX() - p1.getX(), p2.getY() - p1.getY()); - - for(i=1; i=2 - int numTri = pow(GLEV - 1, 2); // Number of triangles created by subdividing based on GLEV - int i, j, k; - double density, total_weight; - Point o, c; - Point line[GLEV][GLEV]; - Point p1p2[GLEV]; - Point p1p3[GLEV]; - Point p[numTri][3]; - Triangle t(o,o,o); // Initially, we don't care what t is - - divide_segment(points[0], points[1], p1p2, GLEV); - divide_segment(points[0], points[2], p1p3, GLEV); - - line[0][0] = points[0]; - line[1][0] = p1p2[1]; - line[1][1] = p1p3[1]; - - for (i=2; i=2; i--) { - for(j=1; j xmax) - ngreater++; - } - - /* - * If at least two corners of the triangle are below the lower bound, shift all corners - */ - if (nless > 1) { - for(int j=0; j<3; j++) { - x = points[j].getX(); - points[j].setX(x + xmax - xmin); - } - } - /* - * Else if at least two corners of the triangle are more the upper bound, shift all corners - */ - else if (ngreater > 1) { - for(int j=0; j<3; j++) { - x = points[j].getX(); - points[j].setX(x - xmax + xmin); - } - } - - - /* - * Scan through y-coordinates and count the number that are at most the lower bound - * and that are greater than the upper bound - */ - nless = ngreater = 0; - for(int j=0; j<3; j++) { - y = points[j].getY(); - - if (y <= ymin) - nless++; - else if (y > ymax) - ngreater++; - } - - /* - * If at least two corners of the triangle are below the lower bound, shift all corners - */ - if (nless > 1) { - for(int j=0; j<3; j++) { - y = points[j].getY(); - points[j].setY(y + ymax - ymin); - } - } - /* - * Else if at least two corners of the triangle are more the upper bound, shift all corners - */ - else if (ngreater > 1) { - for(int j=0; j<3; j++) { - y = points[j].getY(); - points[j].setY(y - ymax + ymin); - } - } -} - - -double Triangle::det(double m[3][3]) -{ - return m[0][0] * (m[1][1]*m[2][2] - m[1][2]*m[2][1]) - m[0][1] * (m[1][0]*m[2][2] - m[1][2]*m[2][0]) + m[0][2] * (m[1][0]*m[2][1] - m[1][1]*m[2][0]); -} - - -bool operator==(Triangle& lhs, Triangle& rhs) -{ - int a[3], b[3]; - int j; - - a[0] = lhs.getVertex(0).getNum(); - a[1] = lhs.getVertex(1).getNum(); - a[2] = lhs.getVertex(2).getNum(); - - if (a[1] < a[0]) { j = a[1]; a[1] = a[0]; a[0] = j; } - if (a[2] < a[0]) { j = a[2]; a[2] = a[0]; a[0] = j; } - if (a[2] < a[1]) { j = a[2]; a[2] = a[1]; a[1] = j; } - - b[0] = rhs.getVertex(0).getNum(); - b[1] = rhs.getVertex(1).getNum(); - b[2] = rhs.getVertex(2).getNum(); - - if (b[1] < b[0]) { j = b[1]; b[1] = b[0]; b[0] = j; } - if (b[2] < b[0]) { j = b[2]; b[2] = b[0]; b[0] = j; } - if (b[2] < b[1]) { j = b[2]; b[2] = b[1]; b[1] = j; } - - if (a[0] == b[0] && a[1] == b[1] && a[2] == b[2]) - return true; - - return false; -} - - -bool operator<(Triangle const& lhs, Triangle const& rhs) -{ - int a[3], b[3]; - int j; - - a[0] = lhs.points[0].getNum(); - a[1] = lhs.points[1].getNum(); - a[2] = lhs.points[2].getNum(); - - if (a[1] < a[0]) { j = a[1]; a[1] = a[0]; a[0] = j; } - if (a[2] < a[0]) { j = a[2]; a[2] = a[0]; a[0] = j; } - if (a[2] < a[1]) { j = a[2]; a[2] = a[1]; a[1] = j; } - - b[0] = rhs.points[0].getNum(); - b[1] = rhs.points[1].getNum(); - b[2] = rhs.points[2].getNum(); - - if (b[1] < b[0]) { j = b[1]; b[1] = b[0]; b[0] = j; } - if (b[2] < b[0]) { j = b[2]; b[2] = b[0]; b[0] = j; } - if (b[2] < b[1]) { j = b[2]; b[2] = b[1]; b[1] = j; } - - if (a[0] < b[0]) { - return true; - } - else if (a[0] == b[0]) { - if (a[1] < b[1]) { - return true; - } - else if (a[1] == b[1]) { - if (a[2] < b[2]) { - return true; - } - } - } - - return false; -} diff --git a/grid_gen/periodic_general/Triangle.h b/grid_gen/periodic_general/Triangle.h deleted file mode 100644 index 089bb3849..000000000 --- a/grid_gen/periodic_general/Triangle.h +++ /dev/null @@ -1,30 +0,0 @@ -#ifndef _TriangleH -#define _TriangleH -#include -#include "Point.h" -#include "DensityFunction.h" -using namespace std; - -class Triangle -{ - private: - Point points[3]; - double det(double m[3][3]); - void divide_segment(Point p1, Point p2, Point list[], int n); - - public: - Triangle(); - Triangle(Point a, Point b, Point c); - ~Triangle(); - void setVertex(int i, Point p); - Point getVertex(int i) const; - double area(); - Point centroid(); - Point centroid(DensityFunction& d, double * mass); - Point circumcenter(); - void normalizeVertices(double xmin, double xmax, double ymin, double ymax); - friend bool operator==(Triangle& lhs, Triangle& rhs); - friend bool operator<(Triangle const& lhs, Triangle const& rhs); -}; - -#endif diff --git a/grid_gen/periodic_general/cells_hex.ncl b/grid_gen/periodic_general/cells_hex.ncl deleted file mode 100644 index b1db3d299..000000000 --- a/grid_gen/periodic_general/cells_hex.ncl +++ /dev/null @@ -1,186 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" - -begin - - plotfield = "area" - level = 5 - winds = False - nrows = 100 - ncols = 100 - maxedges = 9 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"wh-bl-gr-ye-re") - - f = addfile("grid.nc","r") - - xCell = f->xCell(:) - yCell = f->yCell(:) - zCell = f->zCell(:) - xEdge = f->xEdge(:) - yEdge = f->yEdge(:) - zEdge = f->zEdge(:) - xVertex = f->xVertex(:) - yVertex = f->yVertex(:) - zVertex = f->zVertex(:) - verticesOnCell = f->verticesOnCell(:,:) - edgesOnCell = f->edgesOnCell(:,:) - edgesOnEdge = f->edgesOnEdge(:,:) - verticesOnEdge = f->verticesOnEdge(:,:) - cellsOnEdge = f->cellsOnEdge(:,:) - cellsOnVertex = f->cellsOnVertex(:,:) - edgesOnVertex = f->edgesOnVertex(:,:) - nEdgesOnCell = f->nEdgesOnCell(:) - - res = True - - t = stringtointeger(getenv("T")) - - xpoly = new((/maxedges/), "double") - ypoly = new((/maxedges/), "double") - - xcb = new((/4/), "float") - ycb = new((/4/), "float") - - pres = True - pres@gsnFrame = False - pres@xyLineColor = "Background" - plot = gsn_xy(wks,xCell,yCell,pres) - - if (plotfield .eq. "area") then - fld = f->areaCell(:) - minfld = min(fld) - maxfld = max(fld) - end if - if (plotfield .eq. "w") then - fld = f->w(t,:,level) - minfld = min(fld) - maxfld = max(fld) - end if - if (plotfield .eq. "t") then - fld = f->theta(t,:,level) - minfld = min(fld) - maxfld = max(fld) - end if - if (plotfield .eq. "qr") then - fld = f->qr(t,:,level) - minfld = min(fld) - maxfld = max(fld) - end if - if (plotfield .eq. "ke") then - fld = f->ke(t,:,0) - minfld = min(fld) - maxfld = max(fld) - end if - if (plotfield .eq. "vorticity") then - fld = f->vorticity(t,:,0) - minfld = min(fld) - maxfld = max(fld) - end if - scalefac = 198.0/(maxfld - minfld) - - if (plotfield .eq. "vorticity") then - do iRow=1,nrows-2 - do iCol=1,ncols-2 - iCell = iRow*ncols+iCol - do iVertex=2*iCell,2*iCell+1 - do i=0,2 - xpoly(i) = xCell(cellsOnVertex(iVertex,i)-1) - ypoly(i) = yCell(cellsOnVertex(iVertex,i)-1) - res@gsFillColor = doubletointeger((fld(iVertex)-minfld)*scalefac)+2 - end do - gsn_polygon(wks,plot,xpoly,ypoly,res); - end do - end do - end do - end if - - if (plotfield .eq. "area" .or. plotfield .eq. "ke" .or. plotfield .eq. "t" .or. plotfield .eq. "w" .or. plotfield .eq. "qr") then - do iRow=1,nrows-2 - do iCol=1,ncols-2 - iCell = iRow*ncols+iCol - do i=0,nEdgesOnCell(iCell)-1 - xpoly(i) = xVertex(verticesOnCell(iCell,i)-1) - ypoly(i) = yVertex(verticesOnCell(iCell,i)-1) - end do - do i=nEdgesOnCell(iCell),maxedges-1 - xpoly(i) = xpoly(0) - ypoly(i) = xpoly(0) - end do - gsn_polyline(wks,plot,xpoly(0:nEdgesOnCell(iCell)-1),ypoly(0:nEdgesOnCell(iCell)-1),res); - end do - end do - do iCell=10000,16083 - do i=0,nEdgesOnCell(iCell)-1 - xpoly(i) = xVertex(verticesOnCell(iCell,i)-1) - ypoly(i) = yVertex(verticesOnCell(iCell,i)-1) - end do - do i=nEdgesOnCell(iCell),maxedges-1 - xpoly(i) = xpoly(0) - ypoly(i) = xpoly(0) - end do - gsn_polyline(wks,plot,xpoly(0:nEdgesOnCell(iCell)-1),ypoly(0:nEdgesOnCell(iCell)-1),res); - end do - end if - - if (winds) then - u = 2.*f->u(t,:,level) - v = 2.*f->v(t,:,level) - alpha = f->angleEdge(:) - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - xwind = new(dimsizes(u),float) - ywind = new(dimsizes(u),float) - do i=0,esizes(0)-1 - u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) - v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - xwind(i) = doubletofloat(xEdge(i)) - ywind(i) = doubletofloat(yEdge(i)) - end do - - wmsetp("VCH",0.0010) - wmsetp("VRN",0.010) - wmsetp("VRS",100.0) - wmsetp("VCW",0.10) - - wmvect(wks, xwind, ywind, u_earth, v_earth) - end if - - ; - ; Draw label bar - ; - tres = True - tres@txAngleF = 90.0 - tres@txFontHeightF = 0.015 - do i=2,200 - xcb(0) = 0.1 + i*0.8/198 - ycb(0) = 0.1 - - xcb(1) = 0.1 + (i+1)*0.8/198 - ycb(1) = 0.1 - - xcb(2) = 0.1 + (i+1)*0.8/198 - ycb(2) = 0.15 - - xcb(3) = 0.1 + i*0.8/198 - ycb(3) = 0.15 - - res@gsFillColor = i - - gsn_polygon_ndc(wks,xcb,ycb,res); - - j = (i-2) % 20 - if ((j .eq. 0) .or. (i .eq. 200)) then - ff = minfld + (i-2) / scalefac - label = sprintf("%7.3g", ff) - gsn_text_ndc(wks, label, xcb(0), 0.05, tres) - end if - - end do - - frame(wks) - -end - diff --git a/grid_gen/periodic_general/centroids.txt b/grid_gen/periodic_general/centroids.txt deleted file mode 100644 index 62e4e270b..000000000 --- a/grid_gen/periodic_general/centroids.txt +++ /dev/null @@ -1,1600 +0,0 @@ - 0.5 0.866025403784439 - 1.5 0.866025403784439 - 2.5 0.866025403784439 - 3.5 0.866025403784439 - 4.5 0.866025403784439 - 5.5 0.866025403784439 - 6.5 0.866025403784439 - 7.5 0.866025403784439 - 8.5 0.866025403784439 - 9.5 0.866025403784439 - 10.5 0.866025403784439 - 11.5 0.866025403784439 - 12.5 0.866025403784439 - 13.5 0.866025403784439 - 14.5 0.866025403784439 - 15.5 0.866025403784439 - 16.5 0.866025403784439 - 17.5 0.866025403784439 - 18.5 0.866025403784439 - 19.5 0.866025403784439 - 20.5 0.866025403784439 - 21.5 0.866025403784439 - 22.5 0.866025403784439 - 23.5 0.866025403784439 - 24.5 0.866025403784439 - 25.5 0.866025403784439 - 26.5 0.866025403784439 - 27.5 0.866025403784439 - 28.5 0.866025403784439 - 29.5 0.866025403784439 - 30.5 0.866025403784439 - 31.5 0.866025403784439 - 32.5 0.866025403784439 - 33.5 0.866025403784439 - 34.5 0.866025403784439 - 35.5 0.866025403784439 - 36.5 0.866025403784439 - 37.5 0.866025403784439 - 38.5 0.866025403784439 - 39.5 0.866025403784439 - 1 1.73205080756888 - 2 1.73205080756888 - 3 1.73205080756888 - 4 1.73205080756888 - 5 1.73205080756888 - 6 1.73205080756888 - 7 1.73205080756888 - 8 1.73205080756888 - 9 1.73205080756888 - 10 1.73205080756888 - 11 1.73205080756888 - 12 1.73205080756888 - 13 1.73205080756888 - 14 1.73205080756888 - 15 1.73205080756888 - 16 1.73205080756888 - 17 1.73205080756888 - 18 1.73205080756888 - 19 1.73205080756888 - 20 1.73205080756888 - 21 1.73205080756888 - 22 1.73205080756888 - 23 1.73205080756888 - 24 1.73205080756888 - 25 1.73205080756888 - 26 1.73205080756888 - 27 1.73205080756888 - 28 1.73205080756888 - 29 1.73205080756888 - 30 1.73205080756888 - 31 1.73205080756888 - 32 1.73205080756888 - 33 1.73205080756888 - 34 1.73205080756888 - 35 1.73205080756888 - 36 1.73205080756888 - 37 1.73205080756888 - 38 1.73205080756888 - 39 1.73205080756888 - 40 1.73205080756888 - 0.5 2.59807621135332 - 1.5 2.59807621135332 - 2.5 2.59807621135332 - 3.5 2.59807621135332 - 4.5 2.59807621135332 - 5.5 2.59807621135332 - 6.5 2.59807621135332 - 7.5 2.59807621135332 - 8.5 2.59807621135332 - 9.5 2.59807621135332 - 10.5 2.59807621135332 - 11.5 2.59807621135332 - 12.5 2.59807621135332 - 13.5 2.59807621135332 - 14.5 2.59807621135332 - 15.5 2.59807621135332 - 16.5 2.59807621135332 - 17.5 2.59807621135332 - 18.5 2.59807621135332 - 19.5 2.59807621135332 - 20.5 2.59807621135332 - 21.5 2.59807621135332 - 22.5 2.59807621135332 - 23.5 2.59807621135332 - 24.5 2.59807621135332 - 25.5 2.59807621135332 - 26.5 2.59807621135332 - 27.5 2.59807621135332 - 28.5 2.59807621135332 - 29.5 2.59807621135332 - 30.5 2.59807621135332 - 31.5 2.59807621135332 - 32.5 2.59807621135332 - 33.5 2.59807621135332 - 34.5 2.59807621135332 - 35.5 2.59807621135332 - 36.5 2.59807621135332 - 37.5 2.59807621135332 - 38.5 2.59807621135332 - 39.5 2.59807621135332 - 1 3.46410161513775 - 2 3.46410161513775 - 3 3.46410161513775 - 4 3.46410161513775 - 5 3.46410161513775 - 6 3.46410161513775 - 7 3.46410161513775 - 8 3.46410161513775 - 9 3.46410161513775 - 10 3.46410161513775 - 11 3.46410161513775 - 12 3.46410161513775 - 13 3.46410161513775 - 14 3.46410161513775 - 15 3.46410161513775 - 16 3.46410161513775 - 17 3.46410161513775 - 18 3.46410161513775 - 19 3.46410161513775 - 20 3.46410161513775 - 21 3.46410161513775 - 22 3.46410161513775 - 23 3.46410161513775 - 24 3.46410161513775 - 25 3.46410161513775 - 26 3.46410161513775 - 27 3.46410161513775 - 28 3.46410161513775 - 29 3.46410161513775 - 30 3.46410161513775 - 31 3.46410161513775 - 32 3.46410161513775 - 33 3.46410161513775 - 34 3.46410161513775 - 35 3.46410161513775 - 36 3.46410161513775 - 37 3.46410161513775 - 38 3.46410161513775 - 39 3.46410161513775 - 40 3.46410161513775 - 0.5 4.33012701892219 - 1.5 4.33012701892219 - 2.5 4.33012701892219 - 3.5 4.33012701892219 - 4.5 4.33012701892219 - 5.5 4.33012701892219 - 6.5 4.33012701892219 - 7.5 4.33012701892219 - 8.5 4.33012701892219 - 9.5 4.33012701892219 - 10.5 4.33012701892219 - 11.5 4.33012701892219 - 12.5 4.33012701892219 - 13.5 4.33012701892219 - 14.5 4.33012701892219 - 15.5 4.33012701892219 - 16.5 4.33012701892219 - 17.5 4.33012701892219 - 18.5 4.33012701892219 - 19.5 4.33012701892219 - 20.5 4.33012701892219 - 21.5 4.33012701892219 - 22.5 4.33012701892219 - 23.5 4.33012701892219 - 24.5 4.33012701892219 - 25.5 4.33012701892219 - 26.5 4.33012701892219 - 27.5 4.33012701892219 - 28.5 4.33012701892219 - 29.5 4.33012701892219 - 30.5 4.33012701892219 - 31.5 4.33012701892219 - 32.5 4.33012701892219 - 33.5 4.33012701892219 - 34.5 4.33012701892219 - 35.5 4.33012701892219 - 36.5 4.33012701892219 - 37.5 4.33012701892219 - 38.5 4.33012701892219 - 39.5 4.33012701892219 - 1 5.19615242270663 - 2 5.19615242270663 - 3 5.19615242270663 - 4 5.19615242270663 - 5 5.19615242270663 - 6 5.19615242270663 - 7 5.19615242270663 - 8 5.19615242270663 - 9 5.19615242270663 - 10 5.19615242270663 - 11 5.19615242270663 - 12 5.19615242270663 - 13 5.19615242270663 - 14 5.19615242270663 - 15 5.19615242270663 - 16 5.19615242270663 - 17 5.19615242270663 - 18 5.19615242270663 - 19 5.19615242270663 - 20 5.19615242270663 - 21 5.19615242270663 - 22 5.19615242270663 - 23 5.19615242270663 - 24 5.19615242270663 - 25 5.19615242270663 - 26 5.19615242270663 - 27 5.19615242270663 - 28 5.19615242270663 - 29 5.19615242270663 - 30 5.19615242270663 - 31 5.19615242270663 - 32 5.19615242270663 - 33 5.19615242270663 - 34 5.19615242270663 - 35 5.19615242270663 - 36 5.19615242270663 - 37 5.19615242270663 - 38 5.19615242270663 - 39 5.19615242270663 - 40 5.19615242270663 - 0.5 6.06217782649107 - 1.5 6.06217782649107 - 2.5 6.06217782649107 - 3.5 6.06217782649107 - 4.5 6.06217782649107 - 5.5 6.06217782649107 - 6.5 6.06217782649107 - 7.5 6.06217782649107 - 8.5 6.06217782649107 - 9.5 6.06217782649107 - 10.5 6.06217782649107 - 11.5 6.06217782649107 - 12.5 6.06217782649107 - 13.5 6.06217782649107 - 14.5 6.06217782649107 - 15.5 6.06217782649107 - 16.5 6.06217782649107 - 17.5 6.06217782649107 - 18.5 6.06217782649107 - 19.5 6.06217782649107 - 20.5 6.06217782649107 - 21.5 6.06217782649107 - 22.5 6.06217782649107 - 23.5 6.06217782649107 - 24.5 6.06217782649107 - 25.5 6.06217782649107 - 26.5 6.06217782649107 - 27.5 6.06217782649107 - 28.5 6.06217782649107 - 29.5 6.06217782649107 - 30.5 6.06217782649107 - 31.5 6.06217782649107 - 32.5 6.06217782649107 - 33.5 6.06217782649107 - 34.5 6.06217782649107 - 35.5 6.06217782649107 - 36.5 6.06217782649107 - 37.5 6.06217782649107 - 38.5 6.06217782649107 - 39.5 6.06217782649107 - 1 6.92820323027551 - 2 6.92820323027551 - 3 6.92820323027551 - 4 6.92820323027551 - 5 6.92820323027551 - 6 6.92820323027551 - 7 6.92820323027551 - 8 6.92820323027551 - 9 6.92820323027551 - 10 6.92820323027551 - 11 6.92820323027551 - 12 6.92820323027551 - 13 6.92820323027551 - 14 6.92820323027551 - 15 6.92820323027551 - 16 6.92820323027551 - 17 6.92820323027551 - 18 6.92820323027551 - 19 6.92820323027551 - 20 6.92820323027551 - 21 6.92820323027551 - 22 6.92820323027551 - 23 6.92820323027551 - 24 6.92820323027551 - 25 6.92820323027551 - 26 6.92820323027551 - 27 6.92820323027551 - 28 6.92820323027551 - 29 6.92820323027551 - 30 6.92820323027551 - 31 6.92820323027551 - 32 6.92820323027551 - 33 6.92820323027551 - 34 6.92820323027551 - 35 6.92820323027551 - 36 6.92820323027551 - 37 6.92820323027551 - 38 6.92820323027551 - 39 6.92820323027551 - 40 6.92820323027551 - 0.5 7.79422863405995 - 1.5 7.79422863405995 - 2.5 7.79422863405995 - 3.5 7.79422863405995 - 4.5 7.79422863405995 - 5.5 7.79422863405995 - 6.5 7.79422863405995 - 7.5 7.79422863405995 - 8.5 7.79422863405995 - 9.5 7.79422863405995 - 10.5 7.79422863405995 - 11.5 7.79422863405995 - 12.5 7.79422863405995 - 13.5 7.79422863405995 - 14.5 7.79422863405995 - 15.5 7.79422863405995 - 16.5 7.79422863405995 - 17.5 7.79422863405995 - 18.5 7.79422863405995 - 19.5 7.79422863405995 - 20.5 7.79422863405995 - 21.5 7.79422863405995 - 22.5 7.79422863405995 - 23.5 7.79422863405995 - 24.5 7.79422863405995 - 25.5 7.79422863405995 - 26.5 7.79422863405995 - 27.5 7.79422863405995 - 28.5 7.79422863405995 - 29.5 7.79422863405995 - 30.5 7.79422863405995 - 31.5 7.79422863405995 - 32.5 7.79422863405995 - 33.5 7.79422863405995 - 34.5 7.79422863405995 - 35.5 7.79422863405995 - 36.5 7.79422863405995 - 37.5 7.79422863405995 - 38.5 7.79422863405995 - 39.5 7.79422863405995 - 1 8.66025403784439 - 2 8.66025403784439 - 3 8.66025403784439 - 4 8.66025403784439 - 5 8.66025403784439 - 6 8.66025403784439 - 7 8.66025403784439 - 8 8.66025403784439 - 9 8.66025403784439 - 10 8.66025403784439 - 11 8.66025403784439 - 12 8.66025403784439 - 13 8.66025403784439 - 14 8.66025403784439 - 15 8.66025403784439 - 16 8.66025403784439 - 17 8.66025403784439 - 18 8.66025403784439 - 19 8.66025403784439 - 20 8.66025403784439 - 21 8.66025403784439 - 22 8.66025403784439 - 23 8.66025403784439 - 24 8.66025403784439 - 25 8.66025403784439 - 26 8.66025403784439 - 27 8.66025403784439 - 28 8.66025403784439 - 29 8.66025403784439 - 30 8.66025403784439 - 31 8.66025403784439 - 32 8.66025403784439 - 33 8.66025403784439 - 34 8.66025403784439 - 35 8.66025403784439 - 36 8.66025403784439 - 37 8.66025403784439 - 38 8.66025403784439 - 39 8.66025403784439 - 40 8.66025403784439 - 0.5 9.52627944162882 - 1.5 9.52627944162882 - 2.5 9.52627944162882 - 3.5 9.52627944162882 - 4.5 9.52627944162882 - 5.5 9.52627944162882 - 6.5 9.52627944162882 - 7.5 9.52627944162882 - 8.5 9.52627944162882 - 9.5 9.52627944162882 - 10.5 9.52627944162882 - 11.5 9.52627944162882 - 12.5 9.52627944162882 - 13.5 9.52627944162882 - 14.5 9.52627944162882 - 15.5 9.52627944162882 - 16.5 9.52627944162882 - 17.5 9.52627944162882 - 18.5 9.52627944162882 - 19.5 9.52627944162882 - 20.5 9.52627944162882 - 21.5 9.52627944162882 - 22.5 9.52627944162882 - 23.5 9.52627944162882 - 24.5 9.52627944162882 - 25.5 9.52627944162882 - 26.5 9.52627944162882 - 27.5 9.52627944162882 - 28.5 9.52627944162882 - 29.5 9.52627944162882 - 30.5 9.52627944162882 - 31.5 9.52627944162882 - 32.5 9.52627944162882 - 33.5 9.52627944162882 - 34.5 9.52627944162882 - 35.5 9.52627944162882 - 36.5 9.52627944162882 - 37.5 9.52627944162882 - 38.5 9.52627944162882 - 39.5 9.52627944162882 - 1 10.3923048454133 - 2 10.3923048454133 - 3 10.3923048454133 - 4 10.3923048454133 - 5 10.3923048454133 - 6 10.3923048454133 - 7 10.3923048454133 - 8 10.3923048454133 - 9 10.3923048454133 - 10 10.3923048454133 - 11 10.3923048454133 - 12 10.3923048454133 - 13 10.3923048454133 - 14 10.3923048454133 - 15 10.3923048454133 - 16 10.3923048454133 - 17 10.3923048454133 - 18 10.3923048454133 - 19 10.3923048454133 - 20 10.3923048454133 - 21 10.3923048454133 - 22 10.3923048454133 - 23 10.3923048454133 - 24 10.3923048454133 - 25 10.3923048454133 - 26 10.3923048454133 - 27 10.3923048454133 - 28 10.3923048454133 - 29 10.3923048454133 - 30 10.3923048454133 - 31 10.3923048454133 - 32 10.3923048454133 - 33 10.3923048454133 - 34 10.3923048454133 - 35 10.3923048454133 - 36 10.3923048454133 - 37 10.3923048454133 - 38 10.3923048454133 - 39 10.3923048454133 - 40 10.3923048454133 - 0.5 11.2583302491977 - 1.5 11.2583302491977 - 2.5 11.2583302491977 - 3.5 11.2583302491977 - 4.5 11.2583302491977 - 5.5 11.2583302491977 - 6.5 11.2583302491977 - 7.5 11.2583302491977 - 8.5 11.2583302491977 - 9.5 11.2583302491977 - 10.5 11.2583302491977 - 11.5 11.2583302491977 - 12.5 11.2583302491977 - 13.5 11.2583302491977 - 14.5 11.2583302491977 - 15.5 11.2583302491977 - 16.5 11.2583302491977 - 17.5 11.2583302491977 - 18.5 11.2583302491977 - 19.5 11.2583302491977 - 20.5 11.2583302491977 - 21.5 11.2583302491977 - 22.5 11.2583302491977 - 23.5 11.2583302491977 - 24.5 11.2583302491977 - 25.5 11.2583302491977 - 26.5 11.2583302491977 - 27.5 11.2583302491977 - 28.5 11.2583302491977 - 29.5 11.2583302491977 - 30.5 11.2583302491977 - 31.5 11.2583302491977 - 32.5 11.2583302491977 - 33.5 11.2583302491977 - 34.5 11.2583302491977 - 35.5 11.2583302491977 - 36.5 11.2583302491977 - 37.5 11.2583302491977 - 38.5 11.2583302491977 - 39.5 11.2583302491977 - 1 12.1243556529821 - 2 12.1243556529821 - 3 12.1243556529821 - 4 12.1243556529821 - 5 12.1243556529821 - 6 12.1243556529821 - 7 12.1243556529821 - 8 12.1243556529821 - 9 12.1243556529821 - 10 12.1243556529821 - 11 12.1243556529821 - 12 12.1243556529821 - 13 12.1243556529821 - 14 12.1243556529821 - 15 12.1243556529821 - 16 12.1243556529821 - 17 12.1243556529821 - 18 12.1243556529821 - 19 12.1243556529821 - 20 12.1243556529821 - 21 12.1243556529821 - 22 12.1243556529821 - 23 12.1243556529821 - 24 12.1243556529821 - 25 12.1243556529821 - 26 12.1243556529821 - 27 12.1243556529821 - 28 12.1243556529821 - 29 12.1243556529821 - 30 12.1243556529821 - 31 12.1243556529821 - 32 12.1243556529821 - 33 12.1243556529821 - 34 12.1243556529821 - 35 12.1243556529821 - 36 12.1243556529821 - 37 12.1243556529821 - 38 12.1243556529821 - 39 12.1243556529821 - 40 12.1243556529821 - 0.5 12.9903810567666 - 1.5 12.9903810567666 - 2.5 12.9903810567666 - 3.5 12.9903810567666 - 4.5 12.9903810567666 - 5.5 12.9903810567666 - 6.5 12.9903810567666 - 7.5 12.9903810567666 - 8.5 12.9903810567666 - 9.5 12.9903810567666 - 10.5 12.9903810567666 - 11.5 12.9903810567666 - 12.5 12.9903810567666 - 13.5 12.9903810567666 - 14.5 12.9903810567666 - 15.5 12.9903810567666 - 16.5 12.9903810567666 - 17.5 12.9903810567666 - 18.5 12.9903810567666 - 19.5 12.9903810567666 - 20.5 12.9903810567666 - 21.5 12.9903810567666 - 22.5 12.9903810567666 - 23.5 12.9903810567666 - 24.5 12.9903810567666 - 25.5 12.9903810567666 - 26.5 12.9903810567666 - 27.5 12.9903810567666 - 28.5 12.9903810567666 - 29.5 12.9903810567666 - 30.5 12.9903810567666 - 31.5 12.9903810567666 - 32.5 12.9903810567666 - 33.5 12.9903810567666 - 34.5 12.9903810567666 - 35.5 12.9903810567666 - 36.5 12.9903810567666 - 37.5 12.9903810567666 - 38.5 12.9903810567666 - 39.5 12.9903810567666 - 1 13.856406460551 - 2 13.856406460551 - 3 13.856406460551 - 4 13.856406460551 - 5 13.856406460551 - 6 13.856406460551 - 7 13.856406460551 - 8 13.856406460551 - 9 13.856406460551 - 10 13.856406460551 - 11 13.856406460551 - 12 13.856406460551 - 13 13.856406460551 - 14 13.856406460551 - 15 13.856406460551 - 16 13.856406460551 - 17 13.856406460551 - 18 13.856406460551 - 19 13.856406460551 - 20 13.856406460551 - 21 13.856406460551 - 22 13.856406460551 - 23 13.856406460551 - 24 13.856406460551 - 25 13.856406460551 - 26 13.856406460551 - 27 13.856406460551 - 28 13.856406460551 - 29 13.856406460551 - 30 13.856406460551 - 31 13.856406460551 - 32 13.856406460551 - 33 13.856406460551 - 34 13.856406460551 - 35 13.856406460551 - 36 13.856406460551 - 37 13.856406460551 - 38 13.856406460551 - 39 13.856406460551 - 40 13.856406460551 - 0.5 14.7224318643355 - 1.5 14.7224318643355 - 2.5 14.7224318643355 - 3.5 14.7224318643355 - 4.5 14.7224318643355 - 5.5 14.7224318643355 - 6.5 14.7224318643355 - 7.5 14.7224318643355 - 8.5 14.7224318643355 - 9.5 14.7224318643355 - 10.5 14.7224318643355 - 11.5 14.7224318643355 - 12.5 14.7224318643355 - 13.5 14.7224318643355 - 14.5 14.7224318643355 - 15.5 14.7224318643355 - 16.5 14.7224318643355 - 17.5 14.7224318643355 - 18.5 14.7224318643355 - 19.5 14.7224318643355 - 20.5 14.7224318643355 - 21.5 14.7224318643355 - 22.5 14.7224318643355 - 23.5 14.7224318643355 - 24.5 14.7224318643355 - 25.5 14.7224318643355 - 26.5 14.7224318643355 - 27.5 14.7224318643355 - 28.5 14.7224318643355 - 29.5 14.7224318643355 - 30.5 14.7224318643355 - 31.5 14.7224318643355 - 32.5 14.7224318643355 - 33.5 14.7224318643355 - 34.5 14.7224318643355 - 35.5 14.7224318643355 - 36.5 14.7224318643355 - 37.5 14.7224318643355 - 38.5 14.7224318643355 - 39.5 14.7224318643355 - 1 15.5884572681199 - 2 15.5884572681199 - 3 15.5884572681199 - 4 15.5884572681199 - 5 15.5884572681199 - 6 15.5884572681199 - 7 15.5884572681199 - 8 15.5884572681199 - 9 15.5884572681199 - 10 15.5884572681199 - 11 15.5884572681199 - 12 15.5884572681199 - 13 15.5884572681199 - 14 15.5884572681199 - 15 15.5884572681199 - 16 15.5884572681199 - 17 15.5884572681199 - 18 15.5884572681199 - 19 15.5884572681199 - 20 15.5884572681199 - 21 15.5884572681199 - 22 15.5884572681199 - 23 15.5884572681199 - 24 15.5884572681199 - 25 15.5884572681199 - 26 15.5884572681199 - 27 15.5884572681199 - 28 15.5884572681199 - 29 15.5884572681199 - 30 15.5884572681199 - 31 15.5884572681199 - 32 15.5884572681199 - 33 15.5884572681199 - 34 15.5884572681199 - 35 15.5884572681199 - 36 15.5884572681199 - 37 15.5884572681199 - 38 15.5884572681199 - 39 15.5884572681199 - 40 15.5884572681199 - 0.5 16.4544826719043 - 1.5 16.4544826719043 - 2.5 16.4544826719043 - 3.5 16.4544826719043 - 4.5 16.4544826719043 - 5.5 16.4544826719043 - 6.5 16.4544826719043 - 7.5 16.4544826719043 - 8.5 16.4544826719043 - 9.5 16.4544826719043 - 10.5 16.4544826719043 - 11.5 16.4544826719043 - 12.5 16.4544826719043 - 13.5 16.4544826719043 - 14.5 16.4544826719043 - 15.5 16.4544826719043 - 16.5 16.4544826719043 - 17.5 16.4544826719043 - 18.5 16.4544826719043 - 19.5 16.4544826719043 - 20.5 16.4544826719043 - 21.5 16.4544826719043 - 22.5 16.4544826719043 - 23.5 16.4544826719043 - 24.5 16.4544826719043 - 25.5 16.4544826719043 - 26.5 16.4544826719043 - 27.5 16.4544826719043 - 28.5 16.4544826719043 - 29.5 16.4544826719043 - 30.5 16.4544826719043 - 31.5 16.4544826719043 - 32.5 16.4544826719043 - 33.5 16.4544826719043 - 34.5 16.4544826719043 - 35.5 16.4544826719043 - 36.5 16.4544826719043 - 37.5 16.4544826719043 - 38.5 16.4544826719043 - 39.5 16.4544826719043 - 1 17.3205080756888 - 2 17.3205080756888 - 3 17.3205080756888 - 4 17.3205080756888 - 5 17.3205080756888 - 6 17.3205080756888 - 7 17.3205080756888 - 8 17.3205080756888 - 9 17.3205080756888 - 10 17.3205080756888 - 11 17.3205080756888 - 12 17.3205080756888 - 13 17.3205080756888 - 14 17.3205080756888 - 15 17.3205080756888 - 16 17.3205080756888 - 17 17.3205080756888 - 18 17.3205080756888 - 19 17.3205080756888 - 20 17.3205080756888 - 21 17.3205080756888 - 22 17.3205080756888 - 23 17.3205080756888 - 24 17.3205080756888 - 25 17.3205080756888 - 26 17.3205080756888 - 27 17.3205080756888 - 28 17.3205080756888 - 29 17.3205080756888 - 30 17.3205080756888 - 31 17.3205080756888 - 32 17.3205080756888 - 33 17.3205080756888 - 34 17.3205080756888 - 35 17.3205080756888 - 36 17.3205080756888 - 37 17.3205080756888 - 38 17.3205080756888 - 39 17.3205080756888 - 40 17.3205080756888 - 0.5 18.1865334794732 - 1.5 18.1865334794732 - 2.5 18.1865334794732 - 3.5 18.1865334794732 - 4.5 18.1865334794732 - 5.5 18.1865334794732 - 6.5 18.1865334794732 - 7.5 18.1865334794732 - 8.5 18.1865334794732 - 9.5 18.1865334794732 - 10.5 18.1865334794732 - 11.5 18.1865334794732 - 12.5 18.1865334794732 - 13.5 18.1865334794732 - 14.5 18.1865334794732 - 15.5 18.1865334794732 - 16.5 18.1865334794732 - 17.5 18.1865334794732 - 18.5 18.1865334794732 - 19.5 18.1865334794732 - 20.5 18.1865334794732 - 21.5 18.1865334794732 - 22.5 18.1865334794732 - 23.5 18.1865334794732 - 24.5 18.1865334794732 - 25.5 18.1865334794732 - 26.5 18.1865334794732 - 27.5 18.1865334794732 - 28.5 18.1865334794732 - 29.5 18.1865334794732 - 30.5 18.1865334794732 - 31.5 18.1865334794732 - 32.5 18.1865334794732 - 33.5 18.1865334794732 - 34.5 18.1865334794732 - 35.5 18.1865334794732 - 36.5 18.1865334794732 - 37.5 18.1865334794732 - 38.5 18.1865334794732 - 39.5 18.1865334794732 - 1 19.0525588832576 - 2 19.0525588832576 - 3 19.0525588832576 - 4 19.0525588832576 - 5 19.0525588832576 - 6 19.0525588832576 - 7 19.0525588832576 - 8 19.0525588832576 - 9 19.0525588832576 - 10 19.0525588832576 - 11 19.0525588832576 - 12 19.0525588832576 - 13 19.0525588832576 - 14 19.0525588832576 - 15 19.0525588832576 - 16 19.0525588832576 - 17 19.0525588832576 - 18 19.0525588832576 - 19 19.0525588832576 - 20 19.0525588832576 - 21 19.0525588832576 - 22 19.0525588832576 - 23 19.0525588832576 - 24 19.0525588832576 - 25 19.0525588832576 - 26 19.0525588832576 - 27 19.0525588832576 - 28 19.0525588832576 - 29 19.0525588832576 - 30 19.0525588832576 - 31 19.0525588832576 - 32 19.0525588832576 - 33 19.0525588832576 - 34 19.0525588832576 - 35 19.0525588832576 - 36 19.0525588832576 - 37 19.0525588832576 - 38 19.0525588832576 - 39 19.0525588832576 - 40 19.0525588832576 - 0.5 19.9185842870421 - 1.5 19.9185842870421 - 2.5 19.9185842870421 - 3.5 19.9185842870421 - 4.5 19.9185842870421 - 5.5 19.9185842870421 - 6.5 19.9185842870421 - 7.5 19.9185842870421 - 8.5 19.9185842870421 - 9.5 19.9185842870421 - 10.5 19.9185842870421 - 11.5 19.9185842870421 - 12.5 19.9185842870421 - 13.5 19.9185842870421 - 14.5 19.9185842870421 - 15.5 19.9185842870421 - 16.5 19.9185842870421 - 17.5 19.9185842870421 - 18.5 19.9185842870421 - 19.5 19.9185842870421 - 20.5 19.9185842870421 - 21.5 19.9185842870421 - 22.5 19.9185842870421 - 23.5 19.9185842870421 - 24.5 19.9185842870421 - 25.5 19.9185842870421 - 26.5 19.9185842870421 - 27.5 19.9185842870421 - 28.5 19.9185842870421 - 29.5 19.9185842870421 - 30.5 19.9185842870421 - 31.5 19.9185842870421 - 32.5 19.9185842870421 - 33.5 19.9185842870421 - 34.5 19.9185842870421 - 35.5 19.9185842870421 - 36.5 19.9185842870421 - 37.5 19.9185842870421 - 38.5 19.9185842870421 - 39.5 19.9185842870421 - 1 20.7846096908265 - 2 20.7846096908265 - 3 20.7846096908265 - 4 20.7846096908265 - 5 20.7846096908265 - 6 20.7846096908265 - 7 20.7846096908265 - 8 20.7846096908265 - 9 20.7846096908265 - 10 20.7846096908265 - 11 20.7846096908265 - 12 20.7846096908265 - 13 20.7846096908265 - 14 20.7846096908265 - 15 20.7846096908265 - 16 20.7846096908265 - 17 20.7846096908265 - 18 20.7846096908265 - 19 20.7846096908265 - 20 20.7846096908265 - 21 20.7846096908265 - 22 20.7846096908265 - 23 20.7846096908265 - 24 20.7846096908265 - 25 20.7846096908265 - 26 20.7846096908265 - 27 20.7846096908265 - 28 20.7846096908265 - 29 20.7846096908265 - 30 20.7846096908265 - 31 20.7846096908265 - 32 20.7846096908265 - 33 20.7846096908265 - 34 20.7846096908265 - 35 20.7846096908265 - 36 20.7846096908265 - 37 20.7846096908265 - 38 20.7846096908265 - 39 20.7846096908265 - 40 20.7846096908265 - 0.5 21.650635094611 - 1.5 21.650635094611 - 2.5 21.650635094611 - 3.5 21.650635094611 - 4.5 21.650635094611 - 5.5 21.650635094611 - 6.5 21.650635094611 - 7.5 21.650635094611 - 8.5 21.650635094611 - 9.5 21.650635094611 - 10.5 21.650635094611 - 11.5 21.650635094611 - 12.5 21.650635094611 - 13.5 21.650635094611 - 14.5 21.650635094611 - 15.5 21.650635094611 - 16.5 21.650635094611 - 17.5 21.650635094611 - 18.5 21.650635094611 - 19.5 21.650635094611 - 20.5 21.650635094611 - 21.5 21.650635094611 - 22.5 21.650635094611 - 23.5 21.650635094611 - 24.5 21.650635094611 - 25.5 21.650635094611 - 26.5 21.650635094611 - 27.5 21.650635094611 - 28.5 21.650635094611 - 29.5 21.650635094611 - 30.5 21.650635094611 - 31.5 21.650635094611 - 32.5 21.650635094611 - 33.5 21.650635094611 - 34.5 21.650635094611 - 35.5 21.650635094611 - 36.5 21.650635094611 - 37.5 21.650635094611 - 38.5 21.650635094611 - 39.5 21.650635094611 - 1 22.5166604983954 - 2 22.5166604983954 - 3 22.5166604983954 - 4 22.5166604983954 - 5 22.5166604983954 - 6 22.5166604983954 - 7 22.5166604983954 - 8 22.5166604983954 - 9 22.5166604983954 - 10 22.5166604983954 - 11 22.5166604983954 - 12 22.5166604983954 - 13 22.5166604983954 - 14 22.5166604983954 - 15 22.5166604983954 - 16 22.5166604983954 - 17 22.5166604983954 - 18 22.5166604983954 - 19 22.5166604983954 - 20 22.5166604983954 - 21 22.5166604983954 - 22 22.5166604983954 - 23 22.5166604983954 - 24 22.5166604983954 - 25 22.5166604983954 - 26 22.5166604983954 - 27 22.5166604983954 - 28 22.5166604983954 - 29 22.5166604983954 - 30 22.5166604983954 - 31 22.5166604983954 - 32 22.5166604983954 - 33 22.5166604983954 - 34 22.5166604983954 - 35 22.5166604983954 - 36 22.5166604983954 - 37 22.5166604983954 - 38 22.5166604983954 - 39 22.5166604983954 - 40 22.5166604983954 - 0.5 23.3826859021798 - 1.5 23.3826859021798 - 2.5 23.3826859021798 - 3.5 23.3826859021798 - 4.5 23.3826859021798 - 5.5 23.3826859021798 - 6.5 23.3826859021798 - 7.5 23.3826859021798 - 8.5 23.3826859021798 - 9.5 23.3826859021798 - 10.5 23.3826859021798 - 11.5 23.3826859021798 - 12.5 23.3826859021798 - 13.5 23.3826859021798 - 14.5 23.3826859021798 - 15.5 23.3826859021798 - 16.5 23.3826859021798 - 17.5 23.3826859021798 - 18.5 23.3826859021798 - 19.5 23.3826859021798 - 20.5 23.3826859021798 - 21.5 23.3826859021798 - 22.5 23.3826859021798 - 23.5 23.3826859021798 - 24.5 23.3826859021798 - 25.5 23.3826859021798 - 26.5 23.3826859021798 - 27.5 23.3826859021798 - 28.5 23.3826859021798 - 29.5 23.3826859021798 - 30.5 23.3826859021798 - 31.5 23.3826859021798 - 32.5 23.3826859021798 - 33.5 23.3826859021798 - 34.5 23.3826859021798 - 35.5 23.3826859021798 - 36.5 23.3826859021798 - 37.5 23.3826859021798 - 38.5 23.3826859021798 - 39.5 23.3826859021798 - 1 24.2487113059643 - 2 24.2487113059643 - 3 24.2487113059643 - 4 24.2487113059643 - 5 24.2487113059643 - 6 24.2487113059643 - 7 24.2487113059643 - 8 24.2487113059643 - 9 24.2487113059643 - 10 24.2487113059643 - 11 24.2487113059643 - 12 24.2487113059643 - 13 24.2487113059643 - 14 24.2487113059643 - 15 24.2487113059643 - 16 24.2487113059643 - 17 24.2487113059643 - 18 24.2487113059643 - 19 24.2487113059643 - 20 24.2487113059643 - 21 24.2487113059643 - 22 24.2487113059643 - 23 24.2487113059643 - 24 24.2487113059643 - 25 24.2487113059643 - 26 24.2487113059643 - 27 24.2487113059643 - 28 24.2487113059643 - 29 24.2487113059643 - 30 24.2487113059643 - 31 24.2487113059643 - 32 24.2487113059643 - 33 24.2487113059643 - 34 24.2487113059643 - 35 24.2487113059643 - 36 24.2487113059643 - 37 24.2487113059643 - 38 24.2487113059643 - 39 24.2487113059643 - 40 24.2487113059643 - 0.5 25.1147367097487 - 1.5 25.1147367097487 - 2.5 25.1147367097487 - 3.5 25.1147367097487 - 4.5 25.1147367097487 - 5.5 25.1147367097487 - 6.5 25.1147367097487 - 7.5 25.1147367097487 - 8.5 25.1147367097487 - 9.5 25.1147367097487 - 10.5 25.1147367097487 - 11.5 25.1147367097487 - 12.5 25.1147367097487 - 13.5 25.1147367097487 - 14.5 25.1147367097487 - 15.5 25.1147367097487 - 16.5 25.1147367097487 - 17.5 25.1147367097487 - 18.5 25.1147367097487 - 19.5 25.1147367097487 - 20.5 25.1147367097487 - 21.5 25.1147367097487 - 22.5 25.1147367097487 - 23.5 25.1147367097487 - 24.5 25.1147367097487 - 25.5 25.1147367097487 - 26.5 25.1147367097487 - 27.5 25.1147367097487 - 28.5 25.1147367097487 - 29.5 25.1147367097487 - 30.5 25.1147367097487 - 31.5 25.1147367097487 - 32.5 25.1147367097487 - 33.5 25.1147367097487 - 34.5 25.1147367097487 - 35.5 25.1147367097487 - 36.5 25.1147367097487 - 37.5 25.1147367097487 - 38.5 25.1147367097487 - 39.5 25.1147367097487 - 1 25.9807621135332 - 2 25.9807621135332 - 3 25.9807621135332 - 4 25.9807621135332 - 5 25.9807621135332 - 6 25.9807621135332 - 7 25.9807621135332 - 8 25.9807621135332 - 9 25.9807621135332 - 10 25.9807621135332 - 11 25.9807621135332 - 12 25.9807621135332 - 13 25.9807621135332 - 14 25.9807621135332 - 15 25.9807621135332 - 16 25.9807621135332 - 17 25.9807621135332 - 18 25.9807621135332 - 19 25.9807621135332 - 20 25.9807621135332 - 21 25.9807621135332 - 22 25.9807621135332 - 23 25.9807621135332 - 24 25.9807621135332 - 25 25.9807621135332 - 26 25.9807621135332 - 27 25.9807621135332 - 28 25.9807621135332 - 29 25.9807621135332 - 30 25.9807621135332 - 31 25.9807621135332 - 32 25.9807621135332 - 33 25.9807621135332 - 34 25.9807621135332 - 35 25.9807621135332 - 36 25.9807621135332 - 37 25.9807621135332 - 38 25.9807621135332 - 39 25.9807621135332 - 40 25.9807621135332 - 0.5 26.8467875173176 - 1.5 26.8467875173176 - 2.5 26.8467875173176 - 3.5 26.8467875173176 - 4.5 26.8467875173176 - 5.5 26.8467875173176 - 6.5 26.8467875173176 - 7.5 26.8467875173176 - 8.5 26.8467875173176 - 9.5 26.8467875173176 - 10.5 26.8467875173176 - 11.5 26.8467875173176 - 12.5 26.8467875173176 - 13.5 26.8467875173176 - 14.5 26.8467875173176 - 15.5 26.8467875173176 - 16.5 26.8467875173176 - 17.5 26.8467875173176 - 18.5 26.8467875173176 - 19.5 26.8467875173176 - 20.5 26.8467875173176 - 21.5 26.8467875173176 - 22.5 26.8467875173176 - 23.5 26.8467875173176 - 24.5 26.8467875173176 - 25.5 26.8467875173176 - 26.5 26.8467875173176 - 27.5 26.8467875173176 - 28.5 26.8467875173176 - 29.5 26.8467875173176 - 30.5 26.8467875173176 - 31.5 26.8467875173176 - 32.5 26.8467875173176 - 33.5 26.8467875173176 - 34.5 26.8467875173176 - 35.5 26.8467875173176 - 36.5 26.8467875173176 - 37.5 26.8467875173176 - 38.5 26.8467875173176 - 39.5 26.8467875173176 - 1 27.712812921102 - 2 27.712812921102 - 3 27.712812921102 - 4 27.712812921102 - 5 27.712812921102 - 6 27.712812921102 - 7 27.712812921102 - 8 27.712812921102 - 9 27.712812921102 - 10 27.712812921102 - 11 27.712812921102 - 12 27.712812921102 - 13 27.712812921102 - 14 27.712812921102 - 15 27.712812921102 - 16 27.712812921102 - 17 27.712812921102 - 18 27.712812921102 - 19 27.712812921102 - 20 27.712812921102 - 21 27.712812921102 - 22 27.712812921102 - 23 27.712812921102 - 24 27.712812921102 - 25 27.712812921102 - 26 27.712812921102 - 27 27.712812921102 - 28 27.712812921102 - 29 27.712812921102 - 30 27.712812921102 - 31 27.712812921102 - 32 27.712812921102 - 33 27.712812921102 - 34 27.712812921102 - 35 27.712812921102 - 36 27.712812921102 - 37 27.712812921102 - 38 27.712812921102 - 39 27.712812921102 - 40 27.712812921102 - 0.5 28.5788383248865 - 1.5 28.5788383248865 - 2.5 28.5788383248865 - 3.5 28.5788383248865 - 4.5 28.5788383248865 - 5.5 28.5788383248865 - 6.5 28.5788383248865 - 7.5 28.5788383248865 - 8.5 28.5788383248865 - 9.5 28.5788383248865 - 10.5 28.5788383248865 - 11.5 28.5788383248865 - 12.5 28.5788383248865 - 13.5 28.5788383248865 - 14.5 28.5788383248865 - 15.5 28.5788383248865 - 16.5 28.5788383248865 - 17.5 28.5788383248865 - 18.5 28.5788383248865 - 19.5 28.5788383248865 - 20.5 28.5788383248865 - 21.5 28.5788383248865 - 22.5 28.5788383248865 - 23.5 28.5788383248865 - 24.5 28.5788383248865 - 25.5 28.5788383248865 - 26.5 28.5788383248865 - 27.5 28.5788383248865 - 28.5 28.5788383248865 - 29.5 28.5788383248865 - 30.5 28.5788383248865 - 31.5 28.5788383248865 - 32.5 28.5788383248865 - 33.5 28.5788383248865 - 34.5 28.5788383248865 - 35.5 28.5788383248865 - 36.5 28.5788383248865 - 37.5 28.5788383248865 - 38.5 28.5788383248865 - 39.5 28.5788383248865 - 1 29.4448637286709 - 2 29.4448637286709 - 3 29.4448637286709 - 4 29.4448637286709 - 5 29.4448637286709 - 6 29.4448637286709 - 7 29.4448637286709 - 8 29.4448637286709 - 9 29.4448637286709 - 10 29.4448637286709 - 11 29.4448637286709 - 12 29.4448637286709 - 13 29.4448637286709 - 14 29.4448637286709 - 15 29.4448637286709 - 16 29.4448637286709 - 17 29.4448637286709 - 18 29.4448637286709 - 19 29.4448637286709 - 20 29.4448637286709 - 21 29.4448637286709 - 22 29.4448637286709 - 23 29.4448637286709 - 24 29.4448637286709 - 25 29.4448637286709 - 26 29.4448637286709 - 27 29.4448637286709 - 28 29.4448637286709 - 29 29.4448637286709 - 30 29.4448637286709 - 31 29.4448637286709 - 32 29.4448637286709 - 33 29.4448637286709 - 34 29.4448637286709 - 35 29.4448637286709 - 36 29.4448637286709 - 37 29.4448637286709 - 38 29.4448637286709 - 39 29.4448637286709 - 40 29.4448637286709 - 0.5 30.3108891324554 - 1.5 30.3108891324554 - 2.5 30.3108891324554 - 3.5 30.3108891324554 - 4.5 30.3108891324554 - 5.5 30.3108891324554 - 6.5 30.3108891324554 - 7.5 30.3108891324554 - 8.5 30.3108891324554 - 9.5 30.3108891324554 - 10.5 30.3108891324554 - 11.5 30.3108891324554 - 12.5 30.3108891324554 - 13.5 30.3108891324554 - 14.5 30.3108891324554 - 15.5 30.3108891324554 - 16.5 30.3108891324554 - 17.5 30.3108891324554 - 18.5 30.3108891324554 - 19.5 30.3108891324554 - 20.5 30.3108891324554 - 21.5 30.3108891324554 - 22.5 30.3108891324554 - 23.5 30.3108891324554 - 24.5 30.3108891324554 - 25.5 30.3108891324554 - 26.5 30.3108891324554 - 27.5 30.3108891324554 - 28.5 30.3108891324554 - 29.5 30.3108891324554 - 30.5 30.3108891324554 - 31.5 30.3108891324554 - 32.5 30.3108891324554 - 33.5 30.3108891324554 - 34.5 30.3108891324554 - 35.5 30.3108891324554 - 36.5 30.3108891324554 - 37.5 30.3108891324554 - 38.5 30.3108891324554 - 39.5 30.3108891324554 - 1 31.1769145362398 - 2 31.1769145362398 - 3 31.1769145362398 - 4 31.1769145362398 - 5 31.1769145362398 - 6 31.1769145362398 - 7 31.1769145362398 - 8 31.1769145362398 - 9 31.1769145362398 - 10 31.1769145362398 - 11 31.1769145362398 - 12 31.1769145362398 - 13 31.1769145362398 - 14 31.1769145362398 - 15 31.1769145362398 - 16 31.1769145362398 - 17 31.1769145362398 - 18 31.1769145362398 - 19 31.1769145362398 - 20 31.1769145362398 - 21 31.1769145362398 - 22 31.1769145362398 - 23 31.1769145362398 - 24 31.1769145362398 - 25 31.1769145362398 - 26 31.1769145362398 - 27 31.1769145362398 - 28 31.1769145362398 - 29 31.1769145362398 - 30 31.1769145362398 - 31 31.1769145362398 - 32 31.1769145362398 - 33 31.1769145362398 - 34 31.1769145362398 - 35 31.1769145362398 - 36 31.1769145362398 - 37 31.1769145362398 - 38 31.1769145362398 - 39 31.1769145362398 - 40 31.1769145362398 - 0.5 32.0429399400242 - 1.5 32.0429399400242 - 2.5 32.0429399400242 - 3.5 32.0429399400242 - 4.5 32.0429399400242 - 5.5 32.0429399400242 - 6.5 32.0429399400242 - 7.5 32.0429399400242 - 8.5 32.0429399400242 - 9.5 32.0429399400242 - 10.5 32.0429399400242 - 11.5 32.0429399400242 - 12.5 32.0429399400242 - 13.5 32.0429399400242 - 14.5 32.0429399400242 - 15.5 32.0429399400242 - 16.5 32.0429399400242 - 17.5 32.0429399400242 - 18.5 32.0429399400242 - 19.5 32.0429399400242 - 20.5 32.0429399400242 - 21.5 32.0429399400242 - 22.5 32.0429399400242 - 23.5 32.0429399400242 - 24.5 32.0429399400242 - 25.5 32.0429399400242 - 26.5 32.0429399400242 - 27.5 32.0429399400242 - 28.5 32.0429399400242 - 29.5 32.0429399400242 - 30.5 32.0429399400242 - 31.5 32.0429399400242 - 32.5 32.0429399400242 - 33.5 32.0429399400242 - 34.5 32.0429399400242 - 35.5 32.0429399400242 - 36.5 32.0429399400242 - 37.5 32.0429399400242 - 38.5 32.0429399400242 - 39.5 32.0429399400242 - 1 32.9089653438087 - 2 32.9089653438087 - 3 32.9089653438087 - 4 32.9089653438087 - 5 32.9089653438087 - 6 32.9089653438087 - 7 32.9089653438087 - 8 32.9089653438087 - 9 32.9089653438087 - 10 32.9089653438087 - 11 32.9089653438087 - 12 32.9089653438087 - 13 32.9089653438087 - 14 32.9089653438087 - 15 32.9089653438087 - 16 32.9089653438087 - 17 32.9089653438087 - 18 32.9089653438087 - 19 32.9089653438087 - 20 32.9089653438087 - 21 32.9089653438087 - 22 32.9089653438087 - 23 32.9089653438087 - 24 32.9089653438087 - 25 32.9089653438087 - 26 32.9089653438087 - 27 32.9089653438087 - 28 32.9089653438087 - 29 32.9089653438087 - 30 32.9089653438087 - 31 32.9089653438087 - 32 32.9089653438087 - 33 32.9089653438087 - 34 32.9089653438087 - 35 32.9089653438087 - 36 32.9089653438087 - 37 32.9089653438087 - 38 32.9089653438087 - 39 32.9089653438087 - 40 32.9089653438087 - 0.5 33.7749907475931 - 1.5 33.7749907475931 - 2.5 33.7749907475931 - 3.5 33.7749907475931 - 4.5 33.7749907475931 - 5.5 33.7749907475931 - 6.5 33.7749907475931 - 7.5 33.7749907475931 - 8.5 33.7749907475931 - 9.5 33.7749907475931 - 10.5 33.7749907475931 - 11.5 33.7749907475931 - 12.5 33.7749907475931 - 13.5 33.7749907475931 - 14.5 33.7749907475931 - 15.5 33.7749907475931 - 16.5 33.7749907475931 - 17.5 33.7749907475931 - 18.5 33.7749907475931 - 19.5 33.7749907475931 - 20.5 33.7749907475931 - 21.5 33.7749907475931 - 22.5 33.7749907475931 - 23.5 33.7749907475931 - 24.5 33.7749907475931 - 25.5 33.7749907475931 - 26.5 33.7749907475931 - 27.5 33.7749907475931 - 28.5 33.7749907475931 - 29.5 33.7749907475931 - 30.5 33.7749907475931 - 31.5 33.7749907475931 - 32.5 33.7749907475931 - 33.5 33.7749907475931 - 34.5 33.7749907475931 - 35.5 33.7749907475931 - 36.5 33.7749907475931 - 37.5 33.7749907475931 - 38.5 33.7749907475931 - 39.5 33.7749907475931 - 1 34.6410161513775 - 2 34.6410161513775 - 3 34.6410161513775 - 4 34.6410161513775 - 5 34.6410161513775 - 6 34.6410161513775 - 7 34.6410161513775 - 8 34.6410161513775 - 9 34.6410161513775 - 10 34.6410161513775 - 11 34.6410161513775 - 12 34.6410161513775 - 13 34.6410161513775 - 14 34.6410161513775 - 15 34.6410161513775 - 16 34.6410161513775 - 17 34.6410161513775 - 18 34.6410161513775 - 19 34.6410161513775 - 20 34.6410161513775 - 21 34.6410161513775 - 22 34.6410161513775 - 23 34.6410161513775 - 24 34.6410161513775 - 25 34.6410161513775 - 26 34.6410161513775 - 27 34.6410161513775 - 28 34.6410161513775 - 29 34.6410161513775 - 30 34.6410161513775 - 31 34.6410161513775 - 32 34.6410161513775 - 33 34.6410161513775 - 34 34.6410161513775 - 35 34.6410161513775 - 36 34.6410161513775 - 37 34.6410161513775 - 38 34.6410161513775 - 39 34.6410161513775 - 40 34.6410161513775 diff --git a/grid_gen/periodic_general/fortune/Makefile b/grid_gen/periodic_general/fortune/Makefile deleted file mode 100644 index 31b2e0bee..000000000 --- a/grid_gen/periodic_general/fortune/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -.SUFFIXES: .c .o - - -OBJS = edgelist.o geometry.o heap.o memory.o output.o voronoi.o voronoi_main.o - -all: $(OBJS) - ar -ru libfortune.a $(OBJS) - -clean: - rm -f *.o libfortune.a - -.c.o: - $(CC) $(CFLAGS) -c $< -I../ diff --git a/grid_gen/periodic_general/fortune/edgelist.c b/grid_gen/periodic_general/fortune/edgelist.c deleted file mode 100644 index 95373b988..000000000 --- a/grid_gen/periodic_general/fortune/edgelist.c +++ /dev/null @@ -1,188 +0,0 @@ - -/*** EDGELIST.C ***/ - -#include "vdefs.h" - -int ELhashsize ; -Site * bottomsite ; -Freelist hfl ; -Halfedge * ELleftend, * ELrightend, **ELhash ; - -int ntry, totalsearch ; - -void -ELinitialize(void) -{ - int i ; - - freeinit(&hfl, sizeof(Halfedge)) ; - ELhashsize = 2 * sqrt_nsites ; - ELhash = (Halfedge **)myalloc( sizeof(*ELhash) * ELhashsize) ; - for (i = 0 ; i < ELhashsize ; i++) - { - ELhash[i] = (Halfedge *)NULL ; - } - ELleftend = HEcreate((Edge *)NULL, 0) ; - ELrightend = HEcreate((Edge *)NULL, 0) ; - ELleftend->ELleft = (Halfedge *)NULL ; - ELleftend->ELright = ELrightend ; - ELrightend->ELleft = ELleftend ; - ELrightend->ELright = (Halfedge *)NULL ; - ELhash[0] = ELleftend ; - ELhash[ELhashsize-1] = ELrightend ; -} - -Halfedge * -HEcreate(Edge * e, int pm) -{ - Halfedge * answer ; - - answer = (Halfedge *)getfree(&hfl) ; - answer->ELedge = e ; - answer->ELpm = pm ; - answer->PQnext = (Halfedge *)NULL ; - answer->vertex = (Site *)NULL ; - answer->ELrefcnt = 0 ; - return (answer) ; -} - -void -ELinsert(Halfedge * lb, Halfedge * nnew) -{ - nnew->ELleft = lb ; - nnew->ELright = lb->ELright ; - (lb->ELright)->ELleft = nnew ; - lb->ELright = nnew ; -} - -/* Get entry from hash table, pruning any deleted nodes */ - -Halfedge * -ELgethash(int b) -{ - Halfedge * he ; - - if ((b < 0) || (b >= ELhashsize)) - { - return ((Halfedge *)NULL) ; - } - he = ELhash[b] ; - if ((he == (Halfedge *)NULL) || (he->ELedge != (Edge *)DELETED)) - { - return (he) ; - } - /* Hash table points to deleted half edge. Patch as necessary. */ - ELhash[b] = (Halfedge *)NULL ; - if ((--(he->ELrefcnt)) == 0) - { - makefree((Freenode *)he, (Freelist *)&hfl) ; - } - return ((Halfedge *)NULL) ; -} - -Halfedge * -ELleftbnd(VPoint * p) -{ - int i, bucket ; - Halfedge * he ; - - /* Use hash table to get close to desired halfedge */ - bucket = (p->x - xmin) / deltax * ELhashsize ; - if (bucket < 0) - { - bucket = 0 ; - } - if (bucket >= ELhashsize) - { - bucket = ELhashsize - 1 ; - } - he = ELgethash(bucket) ; - if (he == (Halfedge *)NULL) - { - for (i = 1 ; 1 ; i++) - { - if ((he = ELgethash(bucket-i)) != (Halfedge *)NULL) - { - break ; - } - if ((he = ELgethash(bucket+i)) != (Halfedge *)NULL) - { - break ; - } - } - totalsearch += i ; - } - ntry++ ; - /* Now search linear list of halfedges for the corect one */ - if (he == ELleftend || (he != ELrightend && right_of(he,p))) - { - do { - he = he->ELright ; - } while (he != ELrightend && right_of(he,p)) ; - he = he->ELleft ; - } - else - { - do { - he = he->ELleft ; - } while (he != ELleftend && !right_of(he,p)) ; - } - /*** Update hash table and reference counts ***/ - if ((bucket > 0) && (bucket < ELhashsize-1)) - { - if (ELhash[bucket] != (Halfedge *)NULL) - { - (ELhash[bucket]->ELrefcnt)-- ; - } - ELhash[bucket] = he ; - (ELhash[bucket]->ELrefcnt)++ ; - } - return (he) ; -} - -/*** This delete routine can't reclaim node, since pointers from hash - : table may be present. - ***/ - -void -ELdelete(Halfedge * he) -{ - (he->ELleft)->ELright = he->ELright ; - (he->ELright)->ELleft = he->ELleft ; - he->ELedge = (Edge *)DELETED ; -} - -Halfedge * -ELright(Halfedge * he) -{ - return (he->ELright) ; -} - -Halfedge * -ELleft(Halfedge * he) -{ - return (he->ELleft) ; -} - -Site * -leftreg(Halfedge * he) -{ - if (he->ELedge == (Edge *)NULL) - { - return(bottomsite) ; - } - return (he->ELpm == le ? he->ELedge->reg[le] : - he->ELedge->reg[re]) ; -} - -Site * -rightreg(Halfedge * he) -{ - if (he->ELedge == (Edge *)NULL) - { - return(bottomsite) ; - } - return (he->ELpm == le ? he->ELedge->reg[re] : - he->ELedge->reg[le]) ; -} - diff --git a/grid_gen/periodic_general/fortune/geometry.c b/grid_gen/periodic_general/fortune/geometry.c deleted file mode 100644 index fb06e2923..000000000 --- a/grid_gen/periodic_general/fortune/geometry.c +++ /dev/null @@ -1,220 +0,0 @@ - -/*** GEOMETRY.C ***/ - -#include -#include "vdefs.h" - -double deltax, deltay ; -int nedges, sqrt_nsites, nvertices ; -Freelist efl ; - -void -geominit(void) -{ - freeinit(&efl, sizeof(Edge)) ; - nvertices = nedges = 0 ; - sqrt_nsites = sqrt((double)(nsites+4)) ; - deltay = ymax - ymin ; - deltax = xmax - xmin ; -} - -Edge * -bisect(Site * s1, Site * s2) -{ - double dx, dy, adx, ady ; - Edge * newedge ; - - newedge = (Edge *)getfree(&efl) ; - newedge->reg[0] = s1 ; - newedge->reg[1] = s2 ; - ref(s1) ; - ref(s2) ; - newedge->ep[0] = newedge->ep[1] = (Site *)NULL ; - dx = s2->coord.x - s1->coord.x ; - dy = s2->coord.y - s1->coord.y ; - adx = dx>0 ? dx : -dx ; - ady = dy>0 ? dy : -dy ; - newedge->c = s1->coord.x * dx + s1->coord.y * dy + (dx*dx + dy*dy) * 0.5 ; - if (adx > ady) - { - newedge->a = 1.0 ; - newedge->b = dy/dx ; - newedge->c /= dx ; - } - else - { - newedge->b = 1.0 ; - newedge->a = dx/dy ; - newedge->c /= dy ; - } - newedge->edgenbr = nedges ; - out_bisector(newedge) ; - nedges++ ; - return (newedge) ; -} - -Site * -intersect(Halfedge * el1, Halfedge * el2) -{ - Edge * e1, * e2, * e ; - Halfedge * el ; - double d, xint, yint ; - int right_of_site ; - Site * v ; - - e1 = el1->ELedge ; - e2 = el2->ELedge ; - if ((e1 == (Edge*)NULL) || (e2 == (Edge*)NULL)) - { - return ((Site *)NULL) ; - } - if (e1->reg[1] == e2->reg[1]) - { - return ((Site *)NULL) ; - } - d = (e1->a * e2->b) - (e1->b * e2->a) ; - if ((-1.0e-10 < d) && (d < 1.0e-10)) - { - return ((Site *)NULL) ; - } - xint = (e1->c * e2->b - e2->c * e1->b) / d ; - yint = (e2->c * e1->a - e1->c * e2->a) / d ; - if ((e1->reg[1]->coord.y < e2->reg[1]->coord.y) || - (e1->reg[1]->coord.y == e2->reg[1]->coord.y && - e1->reg[1]->coord.x < e2->reg[1]->coord.x)) - { - el = el1 ; - e = e1 ; - } - else - { - el = el2 ; - e = e2 ; - } - right_of_site = (xint >= e->reg[1]->coord.x) ; - if ((right_of_site && (el->ELpm == le)) || - (!right_of_site && (el->ELpm == re))) - { - return ((Site *)NULL) ; - } - v = (Site *)getfree(&sfl) ; - v->refcnt = 0 ; - v->coord.x = xint ; - v->coord.y = yint ; - return (v) ; -} - -/*** returns 1 if p is to right of halfedge e ***/ - -int -right_of(Halfedge * el, VPoint * p) -{ - Edge * e ; - Site * topsite ; - int right_of_site, above, fast ; - double dxp, dyp, dxs, t1, t2, t3, yl ; - - e = el->ELedge ; - topsite = e->reg[1] ; - right_of_site = (p->x > topsite->coord.x) ; - if (right_of_site && (el->ELpm == le)) - { - return (1) ; - } - if(!right_of_site && (el->ELpm == re)) - { - return (0) ; - } - if (e->a == 1.0) - { - dyp = p->y - topsite->coord.y ; - dxp = p->x - topsite->coord.x ; - fast = 0 ; - if ((!right_of_site & (e->b < 0.0)) || - (right_of_site & (e->b >= 0.0))) - { - fast = above = (dyp >= e->b*dxp) ; - } - else - { - above = ((p->x + p->y * e->b) > (e->c)) ; - if (e->b < 0.0) - { - above = !above ; - } - if (!above) - { - fast = 1 ; - } - } - if (!fast) - { - dxs = topsite->coord.x - (e->reg[0])->coord.x ; - above = (e->b * (dxp*dxp - dyp*dyp)) - < - (dxs * dyp * (1.0 + 2.0 * dxp / - dxs + e->b * e->b)) ; - if (e->b < 0.0) - { - above = !above ; - } - } - } - else /*** e->b == 1.0 ***/ - { - yl = e->c - e->a * p->x ; - t1 = p->y - yl ; - t2 = p->x - topsite->coord.x ; - t3 = yl - topsite->coord.y ; - above = ((t1*t1) > ((t2 * t2) + (t3 * t3))) ; - } - return (el->ELpm == le ? above : !above) ; -} - -void -endpoint(Edge * e, int lr, Site * s) -{ - e->ep[lr] = s ; - ref(s) ; - if (e->ep[re-lr] == (Site *)NULL) - { - return ; - } - out_ep(e) ; - deref(e->reg[le]) ; - deref(e->reg[re]) ; - makefree((Freenode *)e, (Freelist *) &efl) ; -} - -double -dist(Site * s, Site * t) -{ - double dx,dy ; - - dx = s->coord.x - t->coord.x ; - dy = s->coord.y - t->coord.y ; - return (sqrt(dx*dx + dy*dy)) ; -} - -void -makevertex(Site * v) -{ - v->sitenbr = nvertices++ ; - out_vertex(v) ; -} - -void -deref(Site * v) -{ - if (--(v->refcnt) == 0 ) - { - makefree((Freenode *)v, (Freelist *)&sfl) ; - } -} - -void -ref(Site * v) -{ - ++(v->refcnt) ; -} - diff --git a/grid_gen/periodic_general/fortune/heap.c b/grid_gen/periodic_general/fortune/heap.c deleted file mode 100644 index 87bfbbd14..000000000 --- a/grid_gen/periodic_general/fortune/heap.c +++ /dev/null @@ -1,118 +0,0 @@ - -/*** HEAP.C ***/ - - -#include "vdefs.h" - -int PQmin, PQcount, PQhashsize ; -Halfedge * PQhash ; - -void -PQinsert(Halfedge * he, Site * v, double offset) -{ - Halfedge * last, * next ; - - he->vertex = v ; - ref(v) ; - he->ystar = v->coord.y + offset ; - last = &PQhash[ PQbucket(he)] ; - while ((next = last->PQnext) != (Halfedge *)NULL && - (he->ystar > next->ystar || - (he->ystar == next->ystar && - v->coord.x > next->vertex->coord.x))) - { - last = next ; - } - he->PQnext = last->PQnext ; - last->PQnext = he ; - PQcount++ ; -} - -void -PQdelete(Halfedge * he) -{ - Halfedge * last; - - if(he -> vertex != (Site *) NULL) - { - last = &PQhash[PQbucket(he)] ; - while (last -> PQnext != he) - { - last = last->PQnext ; - } - last->PQnext = he->PQnext; - PQcount-- ; - deref(he->vertex) ; - he->vertex = (Site *)NULL ; - } -} - -int -PQbucket(Halfedge * he) -{ - int bucket ; - - - if (he->ystar < ymin) bucket = 0; - else if (he->ystar >= ymax) bucket = PQhashsize-1; - else bucket = (he->ystar - ymin)/deltay * PQhashsize; - if (bucket < 0) - { - bucket = 0 ; - } - if (bucket >= PQhashsize) - { - bucket = PQhashsize-1 ; - } - if (bucket < PQmin) - { - PQmin = bucket ; - } - return (bucket); -} - -int -PQempty(void) -{ - return (PQcount == 0) ; -} - - -VPoint -PQ_min(void) -{ - VPoint answer ; - - while (PQhash[PQmin].PQnext == (Halfedge *)NULL) - { - ++PQmin ; - } - answer.x = PQhash[PQmin].PQnext->vertex->coord.x ; - answer.y = PQhash[PQmin].PQnext->ystar ; - return (answer) ; -} - -Halfedge * -PQextractmin(void) -{ - Halfedge * curr ; - - curr = PQhash[PQmin].PQnext ; - PQhash[PQmin].PQnext = curr->PQnext ; - PQcount-- ; - return (curr) ; -} - -void -PQinitialize(void) -{ - int i ; - - PQcount = PQmin = 0 ; - PQhashsize = 4 * sqrt_nsites ; - PQhash = (Halfedge *)myalloc(PQhashsize * sizeof *PQhash) ; - for (i = 0 ; i < PQhashsize; i++) - { - PQhash[i].PQnext = (Halfedge *)NULL ; - } -} diff --git a/grid_gen/periodic_general/fortune/memory.c b/grid_gen/periodic_general/fortune/memory.c deleted file mode 100644 index 13a75d870..000000000 --- a/grid_gen/periodic_general/fortune/memory.c +++ /dev/null @@ -1,85 +0,0 @@ - -/*** MEMORY.C ***/ - -#include -#include /* malloc(), exit() */ - -#include "vdefs.h" - -extern int sqrt_nsites, siteidx ; -char** memory_map; -int nallocs = 0; - -void -freeinit(Freelist * fl, int size) -{ - fl->head = (Freenode *)NULL ; - fl->nodesize = size ; -} - -char * -getfree(Freelist * fl) -{ - int i ; - Freenode * t ; - if (fl->head == (Freenode *)NULL) - { - t = (Freenode *) myalloc(sqrt_nsites * fl->nodesize) ; - for(i = 0 ; i < sqrt_nsites ; i++) - { - makefree((Freenode *)((char *)t+i*fl->nodesize), fl) ; - } - } - t = fl->head ; - fl->head = (fl->head)->nextfree ; - return ((char *)t) ; -} - -void -makefree(Freenode * curr, Freelist * fl) -{ - curr->nextfree = fl->head ; - fl->head = curr ; -} - -int total_alloc; - -char * -myalloc(unsigned n) -{ - char * t ; - if ((t=(char*)malloc(n)) == (char *) 0) - { - fprintf(stderr,"Insufficient memory processing site %d (%d bytes in use)\n", - siteidx, total_alloc) ; - exit(0) ; - } - total_alloc += n ; - - if (nallocs % 20000 == 0) - { - if (nallocs == 0) - memory_map = (char **)malloc((nallocs+20000)*sizeof(char*)); - else - memory_map = (char **)realloc(memory_map,(nallocs+20000)*sizeof(char*)); - } - memory_map[nallocs++] = t; - return (t) ; -} - -void free_all(void) -{ - int i; - - for (i=0; i -#include "vdefs.h" - -extern int triangulate, plot, debug ; -extern double ymax, ymin, xmax, xmin ; - -double pxmin, pxmax, pymin, pymax, cradius; - -void -openpl(void) -{ -} - -#pragma argsused -void -line(double ax, double ay, double bx, double by) -{ -} - -#pragma argsused -void -circle(double ax, double ay, double radius) -{ -} - -#pragma argsused -void -range(double pxmin, double pxmax, double pymin, double pymax) -{ -} - -void -out_bisector(Edge * e) -{ - if (triangulate && plot && !debug) - { - line(e->reg[0]->coord.x, e->reg[0]->coord.y, - e->reg[1]->coord.x, e->reg[1]->coord.y) ; - } - if (!triangulate && !plot && !debug) - { - printf("l %f %f %f\n", e->a, e->b, e->c) ; - } - if (debug) - { - printf("line(%d) %gx+%gy=%g, bisecting %d %d\n", e->edgenbr, - e->a, e->b, e->c, e->reg[le]->sitenbr, e->reg[re]->sitenbr) ; - } -} - -void -out_ep(Edge * e) -{ - if (!triangulate && plot) - { - clip_line(e) ; - } - if (!triangulate && !plot) - { - printf("e %d", e->edgenbr); - printf(" %d ", e->ep[le] != (Site *)NULL ? e->ep[le]->sitenbr : -1) ; - printf("%d\n", e->ep[re] != (Site *)NULL ? e->ep[re]->sitenbr : -1) ; - } -} - -void -out_vertex(Site * v) -{ - if (!triangulate && !plot && !debug) - { - printf ("v %f %f\n", v->coord.x, v->coord.y) ; - } - if (debug) - { - printf("vertex(%d) at %f %f\n", v->sitenbr, v->coord.x, v->coord.y) ; - } -} - -void -out_site(Site * s) -{ - if (!triangulate && plot && !debug) - { - circle (s->coord.x, s->coord.y, cradius) ; - } - if (!triangulate && !plot && !debug) - { - printf("s %f %f\n", s->coord.x, s->coord.y) ; - } - if (debug) - { - printf("site (%d) at %f %f\n", s->sitenbr, s->coord.x, s->coord.y) ; - } -} - -void -out_triple(PointSet * p, Site * s1, Site * s2, Site * s3) -{ - Point a, b, c; - a.setX(s1->coord.x); - a.setY(s1->coord.y); - a.setNum(s1->sitenbr); - b.setX(s2->coord.x); - b.setY(s2->coord.y); - b.setNum(s2->sitenbr); - c.setX(s3->coord.x); - c.setY(s3->coord.y); - c.setNum(s3->sitenbr); - p->triangulation->push_back(Triangle(a, b, c)); -//cout << "in out_triple " << a << " " << b << endl; -//cout << "in out_triple " << b << " " << a << endl; -//cout << "in out_triple " << a << " " << c << endl; -//cout << "in out_triple " << c << " " << a << endl; -//cout << "in out_triple " << b << " " << c << endl; -//cout << "in out_triple " << c << " " << b << endl; -// printf("%d %d %d\n", s1->sitenbr, s2->sitenbr, s3->sitenbr) ; -} - -void -plotinit(void) -{ - double dx, dy, d ; - - dy = ymax - ymin ; - dx = xmax - xmin ; - d = ( dx > dy ? dx : dy) * 1.1 ; - pxmin = xmin - (d-dx) / 2.0 ; - pxmax = xmax + (d-dx) / 2.0 ; - pymin = ymin - (d-dy) / 2.0 ; - pymax = ymax + (d-dy) / 2.0 ; - cradius = (pxmax - pxmin) / 350.0 ; - openpl() ; - range(pxmin, pymin, pxmax, pymax) ; -} - -void -clip_line(Edge * e) -{ - Site * s1, * s2 ; - double x1, x2, y1, y2 ; - - if (e->a == 1.0 && e->b >= 0.0) - { - s1 = e->ep[1] ; - s2 = e->ep[0] ; - } - else - { - s1 = e->ep[0] ; - s2 = e->ep[1] ; - } - if (e->a == 1.0) - { - y1 = pymin ; - if (s1 != (Site *)NULL && s1->coord.y > pymin) - { - y1 = s1->coord.y ; - } - if (y1 > pymax) - { - return ; - } - x1 = e->c - e->b * y1 ; - y2 = pymax ; - if (s2 != (Site *)NULL && s2->coord.y < pymax) - { - y2 = s2->coord.y ; - } - if (y2 < pymin) - { - return ; - } - x2 = e->c - e->b * y2 ; - if (((x1 > pxmax) && (x2 > pxmax)) || ((x1 < pxmin) && (x2 < pxmin))) - { - return ; - } - if (x1 > pxmax) - { - x1 = pxmax ; - y1 = (e->c - x1) / e->b ; - } - if (x1 < pxmin) - { - x1 = pxmin ; - y1 = (e->c - x1) / e->b ; - } - if (x2 > pxmax) - { - x2 = pxmax ; - y2 = (e->c - x2) / e->b ; - } - if (x2 < pxmin) - { - x2 = pxmin ; - y2 = (e->c - x2) / e->b ; - } - } - else - { - x1 = pxmin ; - if (s1 != (Site *)NULL && s1->coord.x > pxmin) - { - x1 = s1->coord.x ; - } - if (x1 > pxmax) - { - return ; - } - y1 = e->c - e->a * x1 ; - x2 = pxmax ; - if (s2 != (Site *)NULL && s2->coord.x < pxmax) - { - x2 = s2->coord.x ; - } - if (x2 < pxmin) - { - return ; - } - y2 = e->c - e->a * x2 ; - if (((y1 > pymax) && (y2 > pymax)) || ((y1 < pymin) && (y2 pymax) - { - y1 = pymax ; - x1 = (e->c - y1) / e->a ; - } - if (y1 < pymin) - { - y1 = pymin ; - x1 = (e->c - y1) / e->a ; - } - if (y2 > pymax) - { - y2 = pymax ; - x2 = (e->c - y2) / e->a ; - } - if (y2 < pymin) - { - y2 = pymin ; - x2 = (e->c - y2) / e->a ; - } - } - line(x1,y1,x2,y2); -} - diff --git a/grid_gen/periodic_general/fortune/vdefs.h b/grid_gen/periodic_general/fortune/vdefs.h deleted file mode 100644 index 8a3590838..000000000 --- a/grid_gen/periodic_general/fortune/vdefs.h +++ /dev/null @@ -1,141 +0,0 @@ -#ifndef __VDEFS_H -#define __VDEFS_H - -#include "PointSet.h" - -#ifndef NULL -#define NULL 0 -#endif - -#define DELETED -2 - -/* -typedef struct tagFreenode - { - struct tagFreenode * nextfree; - } Freenode ; - - -typedef struct tagFreelist - { - Freenode * head; - int nodesize; - } Freelist ; - -typedef struct tagPoint - { - double x ; - double y ; - } VPoint ; - -// structure used both for sites and for vertices // - -typedef struct tagSite - { - VPoint coord ; - int sitenbr ; - int refcnt ; - } Site ; - - -typedef struct tagEdge - { - double a, b, c ; - Site * ep[2] ; - Site * reg[2] ; - int edgenbr ; - } Edge ; -*/ - -#define le 0 -#define re 1 - -typedef struct tagHalfedge - { - struct tagHalfedge * ELleft ; - struct tagHalfedge * ELright ; - Edge * ELedge ; - int ELrefcnt ; - char ELpm ; - Site * vertex ; - double ystar ; - struct tagHalfedge * PQnext ; - } Halfedge ; - -/* edgelist.c */ -void ELinitialize(void) ; -Halfedge * HEcreate(Edge *, int) ; -void ELinsert(Halfedge *, Halfedge *) ; -Halfedge * ELgethash(int) ; -Halfedge * ELleftbnd(VPoint *) ; -void ELdelete(Halfedge *) ; -Halfedge * ELright(Halfedge *) ; -Halfedge * ELleft(Halfedge *) ; -Site * leftreg(Halfedge *) ; -Site * rightreg(Halfedge *) ; -extern int ELhashsize ; -extern Site * bottomsite ; -extern Freelist hfl ; -extern Halfedge * ELleftend, * ELrightend, **ELhash ; - -/* geometry.c */ -void geominit(void) ; -Edge * bisect(Site *, Site *) ; -Site * intersect(Halfedge *, Halfedge *) ; -int right_of(Halfedge *, VPoint *) ; -void endpoint(Edge *, int, Site *) ; -double dist(Site *, Site *) ; -void makevertex(Site *) ; -void deref(Site *) ; -void ref(Site *) ; -extern double deltax, deltay ; -extern int nsites, nedges, sqrt_nsites, nvertices ; -extern Freelist sfl, efl ; - -/* heap.c */ -void PQinsert(Halfedge *, Site *, double) ; -void PQdelete(Halfedge *) ; -int PQbucket(Halfedge *) ; -int PQempty(void) ; -VPoint PQ_min(void) ; -Halfedge * PQextractmin(void) ; -void PQinitialize(void) ; -extern int PQmin, PQcount, PQhashsize ; -extern Halfedge * PQhash ; - -/* main.c */ -extern int sorted, triangulate, plot, debug, nsites, siteidx ; -extern double xmin, xmax, ymin, ymax ; -extern Site * sites ; -extern Freelist sfl ; -int voronoi_main(int, char **); - -/* getopt.c */ -extern int getopt(int, char *const *, const char *); - -/* memory.c */ -void freeinit(Freelist *, int) ; -char *getfree(Freelist *) ; -void makefree(Freenode *, Freelist *) ; -char *myalloc(unsigned) ; -void free_all(void); - -/* output.c */ -void openpl(void) ; -void line(double, double, double, double) ; -void circle(double, double, double) ; -void range(double, double, double, double) ; -void out_bisector(Edge *) ; -void out_ep(Edge *) ; -void out_vertex(Site *) ; -void out_site(Site *) ; -void out_triple(PointSet *, Site *, Site *, Site *) ; -void plotinit(void) ; -void clip_line(Edge *) ; - -/* voronoi.c */ -void voronoi(PointSet * p, Site *(*)()) ; - -#endif - - diff --git a/grid_gen/periodic_general/fortune/voronoi.c b/grid_gen/periodic_general/fortune/voronoi.c deleted file mode 100644 index ec5319f7d..000000000 --- a/grid_gen/periodic_general/fortune/voronoi.c +++ /dev/null @@ -1,121 +0,0 @@ - -/*** VORONOI.C ***/ - -#include "vdefs.h" - -extern Site * bottomsite ; -extern Halfedge * ELleftend, * ELrightend ; - -/*** implicit parameters: nsites, sqrt_nsites, xmin, xmax, ymin, ymax, - : deltax, deltay (can all be estimates). - : Performance suffers if they are wrong; better to make nsites, - : deltax, and deltay too big than too small. (?) - ***/ - -void -voronoi(PointSet * ptset, Site *(*nextsite)(void)) -{ - Site * newsite, * bot, * top, * temp, * p, * v ; - VPoint newintstar ; - int pm ; - Halfedge * lbnd, * rbnd, * llbnd, * rrbnd, * bisector ; - Edge * e ; - - PQinitialize() ; - bottomsite = (*nextsite)() ; - out_site(bottomsite) ; - ELinitialize() ; - newsite = (*nextsite)() ; - while (1) - { - if(!PQempty()) - { - newintstar = PQ_min() ; - } - if (newsite != (Site *)NULL && (PQempty() - || newsite -> coord.y < newintstar.y - || (newsite->coord.y == newintstar.y - && newsite->coord.x < newintstar.x))) {/* new site is -smallest */ - { - out_site(newsite) ; - } - lbnd = ELleftbnd(&(newsite->coord)) ; - rbnd = ELright(lbnd) ; - bot = rightreg(lbnd) ; - e = bisect(bot, newsite) ; - bisector = HEcreate(e, le) ; - ELinsert(lbnd, bisector) ; - p = intersect(lbnd, bisector) ; - if (p != (Site *)NULL) - { - PQdelete(lbnd) ; - PQinsert(lbnd, p, dist(p,newsite)) ; - } - lbnd = bisector ; - bisector = HEcreate(e, re) ; - ELinsert(lbnd, bisector) ; - p = intersect(bisector, rbnd) ; - if (p != (Site *)NULL) - { - PQinsert(bisector, p, dist(p,newsite)) ; - } - newsite = (*nextsite)() ; - } - else if (!PQempty()) /* intersection is smallest */ - { - lbnd = PQextractmin() ; - llbnd = ELleft(lbnd) ; - rbnd = ELright(lbnd) ; - rrbnd = ELright(rbnd) ; - bot = leftreg(lbnd) ; - top = rightreg(rbnd) ; - out_triple(ptset, bot, top, rightreg(lbnd)) ; - v = lbnd->vertex ; - makevertex(v) ; - endpoint(lbnd->ELedge, lbnd->ELpm, v); - endpoint(rbnd->ELedge, rbnd->ELpm, v) ; - ELdelete(lbnd) ; - PQdelete(rbnd) ; - ELdelete(rbnd) ; - pm = le ; - if (bot->coord.y > top->coord.y) - { - temp = bot ; - bot = top ; - top = temp ; - pm = re ; - } - e = bisect(bot, top) ; - bisector = HEcreate(e, pm) ; - ELinsert(llbnd, bisector) ; - endpoint(e, re-pm, v) ; - deref(v) ; - p = intersect(llbnd, bisector) ; - if (p != (Site *) NULL) - { - PQdelete(llbnd) ; - PQinsert(llbnd, p, dist(p,bot)) ; - } - p = intersect(bisector, rrbnd) ; - if (p != (Site *) NULL) - { - PQinsert(bisector, p, dist(p,bot)) ; - } - } - else - { - break ; - } - } - - for( lbnd = ELright(ELleftend) ; - lbnd != ELrightend ; - lbnd = ELright(lbnd)) - { - e = lbnd->ELedge ; - out_ep(e) ; - } - -} - diff --git a/grid_gen/periodic_general/fortune/voronoi_main.c b/grid_gen/periodic_general/fortune/voronoi_main.c deleted file mode 100644 index 04662acf8..000000000 --- a/grid_gen/periodic_general/fortune/voronoi_main.c +++ /dev/null @@ -1,135 +0,0 @@ -/*** MAIN.C ***/ - -#include -#include /* realloc(), qsort() */ -#include "vdefs.h" -using namespace std; - -Site * readone(void), * nextone(void) ; -void readsites(PointSet * p) ; - -int sorted, triangulate, plot, debug, nsites, siteidx ; -double xmin, xmax, ymin, ymax ; -Site * sites ; -Freelist sfl ; - -void -voronoi_main(PointSet * p) -{ - int c ; - Site *(*next)() ; - - sorted = plot = debug = 0 ; - triangulate = 1 ; - - freeinit(&sfl, sizeof(Site)) ; - readsites(p) ; - next = nextone ; - siteidx = 0 ; - geominit() ; - voronoi(p, next) ; - free_all(); -} - -/*** sort sites on y, then x, coord ***/ - -int -scomp(const void * vs1, const void * vs2) -{ - VPoint * s1 = (VPoint *)vs1 ; - VPoint * s2 = (VPoint *)vs2 ; - - if (s1->y < s2->y) - { - return (-1) ; - } - if (s1->y > s2->y) - { - return (1) ; - } - if (s1->x < s2->x) - { - return (-1) ; - } - if (s1->x > s2->x) - { - return (1) ; - } - return (0) ; -} - -/*** return a single in-storage site ***/ - -Site * -nextone(void) -{ - Site * s ; - - if (siteidx < nsites) - { - s = &sites[siteidx++]; - return (s) ; - } - else - { - return ((Site *)NULL) ; - } -} - -/*** read all sites, sort, and compute xmin, xmax, ymin, ymax ***/ - -void -readsites(PointSet * p) -{ - int i ; - int j ; - - int MaxSize = 2000000; - - nsites = 0 ; - sites = (Site *) myalloc(MaxSize * sizeof(Site)); - for(j=0; jnPoints; j++) { - sites[nsites].coord.x = p->points[j]->getX(); - sites[nsites].coord.y = p->points[j]->getY(); - sites[nsites].sitenbr = p->points[j]->getNum() ; - sites[nsites++].refcnt = 0 ; - if (nsites % MaxSize == 0) { - sites = (Site *)realloc(sites,(nsites+MaxSize)*sizeof(Site)); - } - } - - qsort((void *)sites, nsites, sizeof(Site), scomp) ; - xmin = sites[0].coord.x ; - xmax = sites[0].coord.x ; - for (i = 1 ; i < nsites ; ++i) - { - if(sites[i].coord.x < xmin) - { - xmin = sites[i].coord.x ; - } - if (sites[i].coord.x > xmax) - { - xmax = sites[i].coord.x ; - } - } - ymin = sites[0].coord.y ; - ymax = sites[nsites-1].coord.y ; -} - -/*** read one site ***/ - -Site * -readone(void) -{ - Site * s ; - - s = (Site *)getfree(&sfl) ; - s->refcnt = 0 ; - s->sitenbr = siteidx++ ; - if (scanf("%lf %lf", &(s->coord.x), &(s->coord.y)) == EOF) - { - return ((Site *)NULL ) ; - } - return (s) ; -} - diff --git a/grid_gen/periodic_general/main.cxx b/grid_gen/periodic_general/main.cxx deleted file mode 100644 index 6e986fcca..000000000 --- a/grid_gen/periodic_general/main.cxx +++ /dev/null @@ -1,586 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include "PointSet.h" -#include "Triangle.h" -#include "DensityFunction.h" -#include "netcdf.h" -#include -using namespace std; - -#define ALLOC_INT2D(ARR,I,J) (ARR) = new int*[(I)]; for(int i=0; i<(I); i++) (ARR)[i] = new int[(J)]; -#define DEALLOC_INT2D(ARR,I,J) for(int i=0; i<(I); i++) delete [] (ARR)[i]; delete [] (ARR); - -#define ALLOC_REAL2D(ARR,I,J) (ARR) = new double*[(I)]; for(int i=0; i<(I); i++) (ARR)[i] = new double[(J)]; -#define DEALLOC_REAL2D(ARR,I,J) for(int i=0; i<(I); i++) delete [] (ARR)[i]; delete [] (ARR); - -int obtuse_triangle(Triangle &t); - -void write_netcdf(int nCells, int nVertices, int vertexDegree, - double *xCell, double *yCell, double *zCell, - double *xVertex, double *yVertex, double *zVertex, - double *meshDensity, int *cellsOnVertex, - double x_period, double y_period); - -void readParamsFile(); - -Point segment_intersect(Point& p0, Point &p1, Point &q0, Point&q1); - - -// run-time parameters - double EPS = 1.0e-7; - double X_PERIOD = 1.0; - double Y_PERIOD = 1.0; - double X_BUFFER_FRAC = 0.05; - double Y_BUFFER_FRAC = 0.05; - double X_BUFFER_W = 1.0; // Buffer width in length units - these get calculated automatically from X/Y_BUFFER_FRAC - double Y_BUFFER_W = 1.0; - int NUMPOINTS = 200; - int MAXITR = 100; - int USE_MC = 1; // 1=true, 0=read from file - int USE_DATA_DENSITY = 0; // 1=true, 0=analytic density function in DensityFunction.cxx - - - -int main(int argc, char ** argv) -{ - -// read user-specified settings - readParamsFile(); - - int i, ii, jj, n, iter, idx, npts, np; - DensityFunction f(X_PERIOD, Y_PERIOD, USE_DATA_DENSITY); - PointSet pset; - PointSet out_pset; - vector * vcs; - Point * cells; - Point * temp_p; - Point * temp_pp; - Point p3; - Triangle t; - Point p, p2; - vector * clist; - vector * triangulation; - vector::iterator it; - set delaunay_tri; - set::iterator dti; - list norm_dt; - list::iterator norm_dti; - vector< vector > vertices_on_cell; - vector< vector > cells_on_cell; - vector< set > coc; - set::iterator cell_iter; - vector< vector > cv_on_cell; - Triangle * tri; - vector * vlist; - vector * elist; - double xcell, ycell; - double x, y; - double total_mass, mass; - FILE * restart; - int nCells, nVertices, vertexDegree; - double *xCell, *yCell, *zCell, *xVertex, *yVertex, *zVertex, *meshDensity; - int *cellsOnVertex; - - - - - - - if (USE_MC == 1) { - cout << "Generating Monte Carlo points..." <getX() < (double)( X_BUFFER_W ) || pset[i]->getX() > (double)( X_PERIOD - X_BUFFER_W )) - pset[i]->setBoundaryPoint(1); - if (pset[i]->getY() < (double)( Y_BUFFER_W ) || pset[i]->getY() > (double)( Y_PERIOD - Y_BUFFER_W )) - pset[i]->setBoundaryPoint(1); - } - - - /* - * Lloyd iteration - */ - for (iter=0; iterisBoundaryPoint()) { - total_mass = 0.0; - p.setXY(0.0, 0.0); - for (int j=0; jsetXY(p.getX(), p.getY()); - - /* If point has drifted into boundary region, push it back... */ - pset[i]->setX(pset[i]->getX() < (double)( X_BUFFER_W ) ? (double)( X_BUFFER_W ) : pset[i]->getX()); - pset[i]->setX(pset[i]->getX() > (double)( X_PERIOD - X_BUFFER_W ) ? (double)( X_PERIOD - X_BUFFER_W ) : pset[i]->getX()); - pset[i]->setY(pset[i]->getY() < (double)( Y_BUFFER_W ) ? (double)( Y_BUFFER_W ) : pset[i]->getY()); - pset[i]->setY(pset[i]->getY() > (double)( Y_PERIOD - Y_BUFFER_W ) ? (double)( Y_PERIOD - Y_BUFFER_W ) : pset[i]->getY()); - } - } - delete [] vcs; - if (iter % 20 == 0) { - // Write restart file every 20 iterations (could become a runtime configurable setting) - cout << "Writing restart.txt..." << endl; - restart = fopen("restart.txt","w"); - for(i=0; igetX(), pset[i]->getY()); - } - fclose(restart); - } - - } - - // Write restart again at the end - cout << "Writing restart.txt..." << endl; - - restart = fopen("restart.txt","w"); - for(i=0; igetX(), pset[i]->getY()); - } - fclose(restart); - - /* - * To get a triangulation of the points, we'll need to make copies of the boundary points - */ - cout << "Creating triangulation..." << endl; - npts = pset.size(); - for (i=0; igetX(), pset[i]->getY(), 0); - temp_p->setNum(pset[i]->getNum()); - if (pset[i]->isBoundaryPoint()) - temp_p->setBoundaryPoint(1); - out_pset.addPoint(*temp_p); - - /* If this is a boundary point, add it again in a periodic way */ - if (temp_p->isBoundaryPoint()) { - - if (temp_p->getX() < (double)( X_BUFFER_W )) { - - /* RIGHT SIDE */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setX(temp_pp->getX() + (double)( X_PERIOD )); - out_pset.addPoint(*temp_pp); - - if (temp_p->getY() < (double)( Y_BUFFER_W )) { - - /* UPPER-RIGHT CORNER */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setX(temp_pp->getX() + (double)( X_PERIOD )); - temp_pp->setY(temp_pp->getY() + (double)( Y_PERIOD )); - out_pset.addPoint(*temp_pp); - } - else if (temp_p->getY() > (double)( Y_PERIOD - Y_BUFFER_W )) { - - /* LOWER-RIGHT CORNER */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setX(temp_pp->getX() + (double)( X_PERIOD )); - temp_pp->setY(temp_pp->getY() - (double)( Y_PERIOD )); - out_pset.addPoint(*temp_pp); - } - } - else if (temp_p->getX() > (double)( X_PERIOD - X_BUFFER_W )) { - - /* LEFT SIDE */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setX(temp_pp->getX() - (double)( X_PERIOD )); - out_pset.addPoint(*temp_pp); - - if (temp_p->getY() < (double)( Y_BUFFER_W )) { - - /* UPPER-LEFT CORNER */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setX(temp_pp->getX() - (double)( X_PERIOD )); - temp_pp->setY(temp_pp->getY() + (double)( Y_PERIOD )); - out_pset.addPoint(*temp_pp); - } - else if (temp_p->getY() > (double)( Y_PERIOD - Y_BUFFER_W )) { - - /* LOWER-LEFT CORNER */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setX(temp_pp->getX() - (double)( X_PERIOD )); - temp_pp->setY(temp_pp->getY() - (double)( Y_PERIOD )); - out_pset.addPoint(*temp_pp); - } - } - - if (temp_p->getY() < (double)( Y_BUFFER_W )) { - - /* TOP SIDE */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setY(temp_pp->getY() + (double)( Y_PERIOD )); - out_pset.addPoint(*temp_pp); - } - else if (temp_p->getY() > (double)( Y_PERIOD - Y_BUFFER_W )) { - - /* BOTTOM SIDE */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setY(temp_pp->getY() - (double)( Y_PERIOD )); - out_pset.addPoint(*temp_pp); - } - - } - - } - - - - /* - * Having obtained a triangulation of "real" generating points as well as "ghost" points, - * we need to scan through the triangles and keep a unique set that triangulates a truly - * doubly-periodic grid - */ - cout << "Finding unique set of triangles..." << endl; - triangulation = out_pset.getTriangulation(); - for (it = triangulation->begin(); it != triangulation->end(); it++) { - /* - * Ghost/halo points have a negative index; if all of the vertices of a triangle - * are negative, the triangle is redundant - */ - ii = 0; - for (int j=0; j<3; j++) - if ( it->getVertex(j).getNum() >= 0 ) - ii++; - - /* - * If at least one corner of the triangle is non-negative, we consider keeping it, - * but only if it isn't redundant with another triangle already added to the set - */ - if ( ii > 0 ) { - tri = new Triangle(); - - for (int j=0; j<3; j++) { - temp_p = new Point(it->getVertex(j).getX(), it->getVertex(j).getY(), 0); - temp_p->setNum(it->getVertex(j).getNum()); - - /* Set point number back to positive value */ - if (temp_p->getNum() < 0) - temp_p->setNum(-1 * (temp_p->getNum() + 1)); - tri->setVertex(j, *temp_p); - } - - dti = delaunay_tri.find(*tri); - if (dti == delaunay_tri.end()) - delaunay_tri.insert(*tri); - else - delete tri; - } - } - - cout << "Ensuring corner locations are in range of domain..." << endl; - - /* - * Scan through triangles and ensure that corner locations are in the range (0,X_PERIOD],(0,Y_PERIOD] - */ - for (dti = delaunay_tri.begin(); dti != delaunay_tri.end(); dti++) { - t = *dti; - t.normalizeVertices((double)( EPS ), (double)( X_PERIOD + EPS ), (double)( EPS ), (double)( Y_PERIOD + EPS )); - norm_dt.push_back(t); - } - - - delete triangulation; - - - /* - * Generate {x,y,z}{Cell,Vertex}, meshDensity, and cellsOnVertex fields into simple arrays - */ - cout << "Generating {x,y,z}{Cell,Vertex}, meshDensity, and cellsOnVertex fields into simple arrays..." << endl; - nCells = pset.size(); - nVertices = norm_dt.size(); - vertexDegree = 3; - cout << "nCells = " << nCells << endl; - cout << "nVertices = " << nVertices << endl; - - xCell = (double *)malloc(sizeof(double) * (size_t)nCells); - yCell = (double *)malloc(sizeof(double) * (size_t)nCells); - zCell = (double *)malloc(sizeof(double) * (size_t)nCells); - - xVertex = (double *)malloc(sizeof(double) * (size_t)nVertices); - yVertex = (double *)malloc(sizeof(double) * (size_t)nVertices); - zVertex = (double *)malloc(sizeof(double) * (size_t)nVertices); - - meshDensity = (double *)malloc(sizeof(double) * (size_t)nCells); - - cellsOnVertex = (int *)malloc(sizeof(int) * (size_t)nVertices * (size_t)vertexDegree); - - npts = pset.size(); - for (i=0; igetX(); - yCell[i] = pset[i]->getY(); - zCell[i] = 0.0; - meshDensity[i] = f.evaluate(*pset[i]); - } - - i = 0; - ii = 0; - for (norm_dti = norm_dt.begin(); norm_dti != norm_dt.end(); norm_dti++) { - p = norm_dti->circumcenter(); - xVertex[i] = p.getX(); - yVertex[i] = p.getY(); - zVertex[i] = 0.0; - for (int j=0; j<3; j++) - cellsOnVertex[ii++] = norm_dti->getVertex(j).getNum() + 1; /* indices are 1-based in MPAS */ -// cellsOnVertex[ii++] = norm_dti->getVertex(j).getNum(); /* Do not use the 0-based indices when making meshes for MPAS */ - i++; - } - - - /* - * Write fields to NetCDF file - */ - cout << "Writing to netCDF file..." << endl; - write_netcdf(nCells, nVertices, vertexDegree, xCell, yCell, zCell, xVertex, yVertex, zVertex, meshDensity, cellsOnVertex, (double)( X_PERIOD ), (double)( Y_PERIOD )); - - - free(xCell); - free(yCell); - free(zCell); - free(xVertex); - free(yVertex); - free(zVertex); - free(meshDensity); - free(cellsOnVertex); - - - cout << "Successful completion." << endl; - return 0; -} - - -int obtuse_triangle(Triangle &t) -{ - int i; - Point p[3]; - double PI = 2.0 * acos(0.0); - - p[0] = t.getVertex(0); - p[1] = t.getVertex(1); - p[2] = t.getVertex(2); - - for(i=0; i<3; i++) { - if (fabs(angle(p[i], p[(i+1)%3], p[(i+2)%3])) > PI/2.0) { -cout << p[i] << " " << p[(i+1)%3] << " " << p[(i+2)%3] << endl; - return i+1; - } - } - - return 0; -} - - -Point segment_intersect(Point& p0, Point &p1, Point &q0, Point&q1) -{ - Point retval; - - Point u = (p1 - p0); - Point v = (q1 - q0); - Point w = (p0 - q0); - - double s; - - s = (v.getY()*w.getX() - v.getX()*w.getY())/(v.getX()*u.getY() - v.getY()*u.getX()); - - retval = p0 + u*s; - - return retval; -} - - -void write_netcdf(int nCells, int nVertices, int vertexDegree, - double * xCell, double * yCell, double * zCell, - double * xVertex, double * yVertex, double * zVertex, - double * meshDensity, int * cellsOnVertex, - double x_period, double y_period - ) -{ - int i, j, k; - int ncerr; - int ncid; - int dimIDnCells, dimIDnVertices, dimIDvertexDegree; - int varIDxCell, varIDyCell, varIDzCell; - int varIDxVertex, varIDyVertex, varIDzVertex; - int varIDcellsOnVertex, varIDmeshDensity; - - int dimids1[1]; - int dimids2[2]; - int dimids3[3]; - size_t start1[1], count1[1]; - size_t start2[2], count2[2]; - size_t start3[3], count3[3]; - - double sphere_radius = 0.0; - - - ncerr = nc_create("grid.nc", NC_SHARE, &ncid); - - ncerr = nc_def_dim(ncid, "nCells", (size_t)nCells, &dimIDnCells); - ncerr = nc_def_dim(ncid, "nVertices", (size_t)nVertices, &dimIDnVertices); - ncerr = nc_def_dim(ncid, "vertexDegree", (size_t)vertexDegree, &dimIDvertexDegree); - - dimids1[0] = dimIDnCells; - ncerr = nc_def_var(ncid, "xCell", NC_DOUBLE, 1, dimids1, &varIDxCell); - ncerr = nc_def_var(ncid, "yCell", NC_DOUBLE, 1, dimids1, &varIDyCell); - ncerr = nc_def_var(ncid, "zCell", NC_DOUBLE, 1, dimids1, &varIDzCell); - ncerr = nc_def_var(ncid, "meshDensity", NC_DOUBLE, 1, dimids1, &varIDmeshDensity); - dimids1[0] = dimIDnVertices; - ncerr = nc_def_var(ncid, "xVertex", NC_DOUBLE, 1, dimids1, &varIDxVertex); - ncerr = nc_def_var(ncid, "yVertex", NC_DOUBLE, 1, dimids1, &varIDyVertex); - ncerr = nc_def_var(ncid, "zVertex", NC_DOUBLE, 1, dimids1, &varIDzVertex); - dimids2[0] = dimIDnVertices; - dimids2[1] = dimIDvertexDegree; - ncerr = nc_def_var(ncid, "cellsOnVertex", NC_INT, 2, dimids2, &varIDcellsOnVertex); - - ncerr = nc_put_att_text(ncid, NC_GLOBAL, "on_a_sphere", 16, "NO "); - ncerr = nc_put_att_text(ncid, NC_GLOBAL, "is_periodic", 16, "YES "); - ncerr = nc_put_att_double(ncid, NC_GLOBAL, "sphere_radius", NC_DOUBLE, 1, &sphere_radius); - ncerr = nc_put_att_double(ncid, NC_GLOBAL, "x_offset", NC_DOUBLE, 1, &x_period); - ncerr = nc_put_att_double(ncid, NC_GLOBAL, "y_offset", NC_DOUBLE, 1, &y_period); - - ncerr = nc_enddef(ncid); - - start1[0] = 0; - start2[0] = 0; - start2[1] = 0; - count1[0] = nCells; - ncerr = nc_put_vara_double(ncid, varIDxCell, start1, count1, xCell); - ncerr = nc_put_vara_double(ncid, varIDyCell, start1, count1, yCell); - ncerr = nc_put_vara_double(ncid, varIDzCell, start1, count1, zCell); - ncerr = nc_put_vara_double(ncid, varIDmeshDensity, start1, count1, meshDensity); - count1[0] = nVertices; - ncerr = nc_put_vara_double(ncid, varIDxVertex, start1, count1, xVertex); - ncerr = nc_put_vara_double(ncid, varIDyVertex, start1, count1, yVertex); - ncerr = nc_put_vara_double(ncid, varIDzVertex, start1, count1, zVertex); - count2[0] = nVertices; - count2[1] = vertexDegree; - ncerr = nc_put_vara_int(ncid, varIDcellsOnVertex, start2, count2, cellsOnVertex); - - ncerr = nc_close(ncid); -} - - - -/* ***** Setup Routines ***** */ -void readParamsFile(){ - //Read in parameters from Params. - //If Params doesn't exist, write out Params with a default set of parameters - string junk; - ifstream params("Params.txt"); - int temp_restart_mode; - int temp_fileio_mode; - - if(!params){ - cout << "Error opening Params.txt file." << endl; - cout << "Writing a default Params.txt file." << endl; - cout << "Exiting, please set up Params.txt, and rerun." << endl; - ofstream pout("Params.txt"); - pout << "Convergence tolerance to use:" << endl; - pout << EPS << endl; - pout << "Maximum number of iterations to perform:" << endl; - pout << MAXITR << endl; - pout << "How to get initial pointset. 0=from file; 1=Monte Carlo points from density function" << endl; - pout << USE_MC << endl; - pout << "If using Monte Carlo points, how many do you want?" << endl; - pout << NUMPOINTS << endl; - pout << "Domain width (x)" << endl; - pout << X_PERIOD << endl; - pout << "Domain height (y)" << endl; - pout << Y_PERIOD << endl; - pout << "Fraction of domain to set as a buffer in which initial point locations remain fixed, x-direction" << endl; - pout << X_BUFFER_FRAC << endl; - pout << "Fraction of domain to set as a buffer in which initial point locations remain fixed, y-direction" << endl; - pout << Y_BUFFER_FRAC << endl; - pout << "Use data density in file named density.nc with variables x, y, density. 1=true, 0=analytic density function in DensityFunction.cxx" << endl; - pout << USE_DATA_DENSITY << endl; - - pout.close(); - - exit(1); - } - - - getline(params,junk); - params >> EPS; - params.ignore(10000,'\n'); - getline(params,junk); - params >> MAXITR; - params.ignore(10000,'\n'); - getline(params,junk); - params >> USE_MC; - params.ignore(10000,'\n'); - getline(params,junk); - params >> NUMPOINTS; - params.ignore(10000,'\n'); - getline(params,junk); - params >> X_PERIOD; - params.ignore(10000,'\n'); - getline(params,junk); - params >> Y_PERIOD; - params.ignore(10000,'\n'); - getline(params,junk); - params >> X_BUFFER_FRAC; - params.ignore(10000,'\n'); - getline(params,junk); - params >> Y_BUFFER_FRAC; - params.ignore(10000,'\n'); - getline(params,junk); - params >> USE_DATA_DENSITY; - params.ignore(10000,'\n'); - - params.close(); - - cout << "=== Specified settings are: ===" << endl; - cout << "Convergence tolerance to use:" << endl; - cout << EPS << endl; - cout << "Maximum number of iterations to perform:" << endl; - cout << MAXITR << endl; - cout << "How to get initial pointset. 0=from file; 1=Monte Carlo points from density function" << endl; - cout << USE_MC << endl; - cout << "If using Monte Carlo points, how many do you want?" << endl; - cout << NUMPOINTS << endl; - cout << "Domain width (x)" << endl; - cout << X_PERIOD << endl; - cout << "Domain height (y)" << endl; - cout << Y_PERIOD << endl; - cout << "Fraction of domain to set as a buffer in which initial point locations remain fixed, x-direction" << endl; - cout << X_BUFFER_FRAC << endl; - cout << "Fraction of domain to set as a buffer in which initial point locations remain fixed, y-direction" << endl; - cout << Y_BUFFER_FRAC << endl; - cout << "Use data density in file named density.nc with variables x, y, density. 1=true, 0=analytic density function in DensityFunction.cxx" << endl; - cout << USE_DATA_DENSITY << endl; - - X_BUFFER_W = X_PERIOD * X_BUFFER_FRAC; - Y_BUFFER_W = Y_PERIOD * Y_BUFFER_FRAC; -} diff --git a/grid_gen/periodic_general/mkgrid.cxx b/grid_gen/periodic_general/mkgrid.cxx deleted file mode 100644 index eb469b05c..000000000 --- a/grid_gen/periodic_general/mkgrid.cxx +++ /dev/null @@ -1,635 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include "PointSet.h" -#include "Triangle.h" -#include "DensityFunction.h" -#include "netcdf.h" -using namespace std; - -#define EPS 1.0e-7 - -#define ALLOC_INT2D(ARR,I,J) (ARR) = new int*[(I)]; for(int i=0; i<(I); i++) (ARR)[i] = new int[(J)]; -#define DEALLOC_INT2D(ARR,I,J) for(int i=0; i<(I); i++) delete [] (ARR)[i]; delete [] (ARR); - -#define ALLOC_REAL2D(ARR,I,J) (ARR) = new double*[(I)]; for(int i=0; i<(I); i++) (ARR)[i] = new double[(J)]; -#define DEALLOC_REAL2D(ARR,I,J) for(int i=0; i<(I); i++) delete [] (ARR)[i]; delete [] (ARR); - -void read_netcdf(int *nCells, int *nVertices, int *vertexDegree, - double **xCell, double **yCell, double **zCell, - double **xVertex, double **yVertex, double **zVertex, - double **meshDensity, int **cellsOnVertex, - double *x_period, double *y_period); - -void write_netcdf(int nCells, int nEdges, int nVertices, int maxEdges, int vertexDegree, - int * indexToCellID, int * indexToEdgeID, int * indexToVertexID, - double * xCell, double * yCell, double * zCell, double * latCell, double * lonCell, - double * xEdge, double * yEdge, double * zEdge, double * latEdge, double * lonEdge, - double * xVertex, double * yVertex, double * zVertex, double * latVertex, double * lonVertex, - int * nEdgesOnCell, int * nEdgesOnEdge, - int ** cellsOnCell, int ** edgesOnCell, int ** verticesOnCell, - int ** cellsOnEdge, int ** verticesOnEdge, int ** edgesOnEdge, - int ** edgesOnVertex, int ** cellsOnVertex, double ** kiteAreasOnVertex, - double * fEdge, double * fVertex, double * dvEdge, double * dcEdge, double * areaCell, double * areaTriangle, double * angleEdge, - double ** weightsOnEdge); - -Point segment_intersect(Point& p0, Point &p1, Point &q0, Point&q1); - -int main(int argc, char ** argv) -{ - int i, j, k, ii, jj; -// DensityFunction f; -// PointSet out_pset; -// Point * cells; - Point * temp_p; -// Point * temp_pp; -// Point p3; - Triangle t; - Point p; -// vector * clist; -// vector * triangulation; -// vector::iterator it; -// set delaunay_tri; -// set::iterator dti; -// list norm_dt; -// list::iterator norm_dti; - vector< set > cellsOnCell_temp; - vector< vector > cellsOnVertex_v; - vector< vector > edgesOnVertex_v; - vector< vector > verticesOnCell_v; - vector< vector > cellsOnCell_v; - vector< vector > cellsOnEdge_v; - vector< vector > verticesOnEdge_v; - vector< vector > edgesOnCell_v; - vector areaCell_v, areaTriangle_v; - vector dcEdge_v, dvEdge_v; - vector angleEdge_v; - vector nEdgesOnCell_v; - vector cells_v; - vector vertices_v; - vector edges_v; - vector edge_segments; - set::iterator cell_iter; /* TESTING CODE */ -// vector< vector > cv_on_cell; /* TESTING CODE */ -// Triangle * tri; -// double xcell, ycell; - double x, y; -// double total_mass, mass; -// FILE * restart; - int nCells, nVertices, nEdges, vertexDegree; - double *xCell, *yCell, *zCell, *xVertex, *yVertex, *zVertex, *meshDensity; - int *cellsOnVertex; - double x_period, y_period; - - - /* - * Read basic grid info from NetCDF file - */ - read_netcdf(&nCells, &nVertices, &vertexDegree, &xCell, &yCell, &zCell, &xVertex, &yVertex, &zVertex, &meshDensity, &cellsOnVertex, &x_period, &y_period); - - cout << "Read from input file:" << endl; - cout << " nCells = " << nCells << endl; - cout << " nVertices = " << nVertices << endl; - cout << " vertexDegree = " << vertexDegree << endl; - cout << " x_period = " << x_period << endl; - cout << " y_period = " << y_period << endl; - cout << endl; - - /* - * vector of cells - */ - cells_v.resize(nCells); - for (i=0; isetNum(i); - cells_v[i] = *temp_p; - } - - - /* - * vector of vertices - */ - vertices_v.resize(nVertices); - for (i=0; isetNum(i); - vertices_v[i] = *temp_p; - } - - - /* - * cellsOnVertex - */ - cellsOnVertex_v.resize(nVertices); - for (i=0; i Date: Fri, 25 May 2018 11:07:21 -0600 Subject: [PATCH 011/180] Remove grid_gen/periodic_hex_minimal --- grid_gen/periodic_hex_minimal/Makefile | 79 ------- grid_gen/periodic_hex_minimal/README | 16 -- .../module_cell_indexing.F | 170 -------------- .../module_write_netcdf.F | 211 ------------------ grid_gen/periodic_hex_minimal/namelist.input | 5 - grid_gen/periodic_hex_minimal/periodic_grid.F | 103 --------- 6 files changed, 584 deletions(-) delete mode 100644 grid_gen/periodic_hex_minimal/Makefile delete mode 100644 grid_gen/periodic_hex_minimal/README delete mode 100644 grid_gen/periodic_hex_minimal/module_cell_indexing.F delete mode 100644 grid_gen/periodic_hex_minimal/module_write_netcdf.F delete mode 100644 grid_gen/periodic_hex_minimal/namelist.input delete mode 100644 grid_gen/periodic_hex_minimal/periodic_grid.F diff --git a/grid_gen/periodic_hex_minimal/Makefile b/grid_gen/periodic_hex_minimal/Makefile deleted file mode 100644 index 3799439a2..000000000 --- a/grid_gen/periodic_hex_minimal/Makefile +++ /dev/null @@ -1,79 +0,0 @@ -# IBM with Xlf compilers -#FC = xlf90 -#CC = xlc -#FFLAGS = -qrealsize=8 -g -C -#CFLAGS = -g -#LDFLAGS = -g -C - -# pgf90 -#FC = pgf90 -#CC = pgcc -#FFLAGS = -r8 -O3 -#CFLAGS = -O3 -#LDFLAGS = -O3 - -# gfortran -FC = gfortran -CC = gcc -#FFLAGS = -O3 -m64 -ffree-line-length-none -fdefault-real-8 -fconvert=big-endian -FFLAGS = -g -m64 -ffree-line-length-none -fdefault-real-8 -fconvert=big-endian -CFLAGS = -O3 -m64 -LDFLAGS = -O3 -m64 - -# ifort -#FC = ifort -#CC = icc -#FFLAGS = -real-size 64 -O3 -#CFLAGS = -O3 -#LDFLAGS = -O3 - -# absoft -#FC = f90 -#CC = gcc -#FFLAGS = -dp -O3 -#CFLAGS = -O3 -#LDFLAGS = -O3 - - -CPP = cpp -P -traditional -CPPFLAGS = -CPPINCLUDES = -INCLUDES = -I$(NETCDF)/include - -# Specify NetCDF libraries, checking if netcdff is required (it will be present in v4 of netCDF) -LIBS = -L$(NETCDF)/lib -NCLIB = -lnetcdf -NCLIBF = -lnetcdff -ifneq ($(wildcard $(NETCDF)/lib/libnetcdff.*), ) # CHECK FOR NETCDF4 - LIBS += $(NCLIBF) -endif # CHECK FOR NETCDF4 -LIBS += $(NCLIB) - - - -RM = rm -f - -########################## - -.SUFFIXES: .F .o - - -OBJS = periodic_grid.o \ - module_cell_indexing.o \ - module_write_netcdf.o - -all: periodic_grid - -periodic_grid.o: module_cell_indexing.o module_write_netcdf.o - -periodic_grid: $(OBJS) - $(FC) $(LDFLAGS) -o $@ $(OBJS) $(LIBS) - -clean: - $(RM) *.o *.mod periodic_grid - -.F.o: - $(RM) $@ $*.mod - $(CPP) $(CPPFLAGS) $(CPPINCLUDES) $< > $*.f90 - $(FC) $(FFLAGS) -c $*.f90 $(INCLUDES) - $(RM) $*.f90 diff --git a/grid_gen/periodic_hex_minimal/README b/grid_gen/periodic_hex_minimal/README deleted file mode 100644 index 99a7f9817..000000000 --- a/grid_gen/periodic_hex_minimal/README +++ /dev/null @@ -1,16 +0,0 @@ - -This dir contains a version of the orginal periodic hex code that creats a "minimal" grid.nc file -to be then processed using the mesh convergion tool ("mpas_mesh_converter.cpp" in the -"grid_gen/mesh_conversion_tools/" subdir). E.g., invoking ... - -> mpas_mesh_converter ./grid.nc - -...will create a full mpas mesh called "mesh.nc". - -These alterations were made to greatly speed up the mesh generation process for large meshes. In -periodic_grid.F, only the necessary fields are constructed and written to netcdf, and these are -allocated, written, and deallocated in sequence to minimze the memory footprint. In -module_write_netcdf.F, the minimal number of fields are included and all fields are optional so -that the netcdf write command can be called as many times as needed in periodic_grid.F. - -S. Price, 6-11-15 diff --git a/grid_gen/periodic_hex_minimal/module_cell_indexing.F b/grid_gen/periodic_hex_minimal/module_cell_indexing.F deleted file mode 100644 index b360cf6ff..000000000 --- a/grid_gen/periodic_hex_minimal/module_cell_indexing.F +++ /dev/null @@ -1,170 +0,0 @@ -module cell_indexing - -! this subroutine provide index mapping for hexagon meshes dimensioned (nx, ny) - - integer, parameter :: maxEdges = 6 - - integer :: nx, ny, nVertLevels, nTracers, vertexDegree - real (kind=8) :: dc - integer, dimension(20) :: nproc - - - contains - - - subroutine cell_indexing_read_nl() - - implicit none - - namelist /periodic_grid/ nx, ny, dc, nVertLevels, nTracers, nproc, vertexDegree - - nx = 200 - ny = 200 - dc = 10000. - nVertLevels = 1 - nTracers = 2 - nproc(:) = -1 - vertexDegree = 3 - - open(20,file='namelist.input',status='old') - read(20,periodic_grid) - close(20) - - if (mod(ny, 2) /= 0) then - print *, "Error: ny must be divisible by 2 for the grid's periodicity to work properly." - print *, "Please adjust ny in your namelist file and rerun the program." - call exit() - endif - - end subroutine cell_indexing_read_nl - - - subroutine cellColRow(idx, iCol, iRow) - - implicit none - - integer, intent(in) :: idx - integer, intent(out) :: iCol, iRow - - iRow = ((idx-1) / nx) + 1 - iCol = mod((idx-1), nx) + 1 - - end subroutine cellColRow - - - integer function cellIdx(iCol, iRow) - - implicit none - - integer, intent(in) :: iCol, iRow - - cellIdx = (iRow-1)*nx + iCol - - end function cellIdx - - - integer function cellOnCell(iCol, iRow, neighborNumber) - - implicit none - - integer, intent(in) :: iCol, iRow, neighborNumber - - integer :: mx, px, my, py - - mx = iCol - 1 - if (mx == 0) mx = nx - my = iRow - 1 - if (my == 0) my = ny - px = iCol + 1 - if (px == nx + 1) px = 1 - py = iRow + 1 - if (py == ny + 1) py = 1 - - if (mod(iRow,2) == 1) then - if (neighborNumber == 1) then - cellOnCell = cellIdx(mx, iRow) - else if (neighborNumber == 2) then - cellOnCell = cellIdx(mx, my) - else if (neighborNumber == 3) then - cellOnCell = cellIdx(iCol, my) - else if (neighborNumber == 4) then - cellOnCell = cellIdx(px, iRow) - else if (neighborNumber == 5) then - cellOnCell = cellIdx(iCol, py) - else if (neighborNumber == 6) then - cellOnCell = cellIdx(mx, py) - end if - else - if (neighborNumber == 1) then - cellOnCell = cellIdx(mx, iRow) - else if (neighborNumber == 2) then - cellOnCell = cellIdx(iCol, my) - else if (neighborNumber == 3) then - cellOnCell = cellIdx(px, my) - else if (neighborNumber == 4) then - cellOnCell = cellIdx(px, iRow) - else if (neighborNumber == 5) then - cellOnCell = cellIdx(px, py) - else if (neighborNumber == 6) then - cellOnCell = cellIdx(iCol, py) - end if - end if - - end function cellOnCell - - - integer function edgeOnCell(iCell, neighborNumber) - - implicit none - - integer, intent(in) :: iCell, neighborNumber - - integer :: myRow, myCol - - call cellColRow(iCell, myCol, myRow) - - if (neighborNumber == 1) then - edgeOnCell = 3*(iCell - 1) + 1 - else if (neighborNumber == 2) then - edgeOnCell = 3*(iCell - 1) + 2 - else if (neighborNumber == 3) then - edgeOnCell = 3*(iCell - 1) + 3 - else if (neighborNumber == 4) then - edgeOnCell = 3*(cellOnCell(myCol, myRow, 4) - 1) + 1 - else if (neighborNumber == 5) then - edgeOnCell = 3*(cellOnCell(myCol, myRow, 5) - 1) + 2 - else if (neighborNumber == 6) then - edgeOnCell = 3*(cellOnCell(myCol, myRow, 6) - 1) + 3 - end if - - end function edgeOnCell - - - integer function vertexOnCell(iCell, neighborNumber) - - implicit none - - integer, intent(in) :: iCell, neighborNumber - - integer :: myRow, myCol - - call cellColRow(iCell, myCol, myRow) - - if (neighborNumber == 1) then - vertexOnCell = 2*(iCell - 1) + 1 - else if (neighborNumber == 2) then - vertexOnCell = 2*(iCell - 1) + 2 - else if (neighborNumber == 3) then - vertexOnCell = 2*(cellOnCell(myCol, myRow, 3) - 1) + 1 - else if (neighborNumber == 4) then - vertexOnCell = 2*(cellOnCell(myCol, myRow, 4) - 1) + 2 - else if (neighborNumber == 5) then - vertexOnCell = 2*(cellOnCell(myCol, myRow, 4) - 1) + 1 - else if (neighborNumber == 6) then - vertexOnCell = 2*(cellOnCell(myCol, myRow, 5) - 1) + 2 - end if - - end function vertexOnCell - - -end module cell_indexing diff --git a/grid_gen/periodic_hex_minimal/module_write_netcdf.F b/grid_gen/periodic_hex_minimal/module_write_netcdf.F deleted file mode 100644 index b8cbee3a9..000000000 --- a/grid_gen/periodic_hex_minimal/module_write_netcdf.F +++ /dev/null @@ -1,211 +0,0 @@ -module write_netcdf - - integer :: wr_ncid - integer :: wrDimIDnCells - integer :: wrDimIDnEdges - integer :: wrDimIDnVertices - integer :: wrDimIDmaxEdges - integer :: wrDimIDmaxEdges2 - integer :: wrDimIDTWO - integer :: wrDimIDvertexDegree - integer :: wrVarIDxCell - integer :: wrVarIDyCell - integer :: wrVarIDzCell - integer :: wrVarIDxVertex - integer :: wrVarIDyVertex - integer :: wrVarIDzVertex - integer :: wrVarIDcellsOnVertex - - integer :: wrLocalnCells - integer :: wrLocalnVertices - - contains - - subroutine write_netcdf_init( & - nCells, & - nEdges, & - nVertices, & - maxEdges, & - vertexDegree, & - dc, & - nx, & - ny ) - - implicit none - - include 'netcdf.inc' - - integer, intent(in) :: nCells - integer, intent(in) :: nEdges - integer, intent(in) :: nVertices - integer, intent(in) :: maxEdges - integer, intent(in) :: vertexDegree - real (kind=8), intent(in) :: dc - integer, intent(in) :: nx - integer, intent(in) :: ny - - integer :: nferr - integer, dimension(10) :: dimlist - character (len=16) :: on_a_sphere - character (len=16) :: is_periodic - real (kind=8) :: sphere_radius - real (kind=8) :: x_period, y_period - - - wrLocalnCells = nCells - wrLocalnVertices = nVertices - - on_a_sphere = 'NO' - is_periodic = 'YES' - sphere_radius = 0.0 - x_period = (nx) * dc - y_period = (ny) * (dc * sqrt(3.0)) / 2.0 - - nferr = nf_create('grid.nc', IOR(NF_CLOBBER,NF_64BIT_OFFSET), wr_ncid) - - ! - ! Define dimensions - ! - nferr = nf_def_dim(wr_ncid, 'nCells', nCells, wrDimIDnCells) - nferr = nf_def_dim(wr_ncid, 'nEdges', nEdges, wrDimIDnEdges) - nferr = nf_def_dim(wr_ncid, 'nVertices', nVertices, wrDimIDnVertices) - nferr = nf_def_dim(wr_ncid, 'maxEdges', maxEdges, wrDimIDmaxEdges) - nferr = nf_def_dim(wr_ncid, 'maxEdges2', 2*maxEdges, wrDimIDmaxEdges2) - nferr = nf_def_dim(wr_ncid, 'TWO', 2, wrDimIDTWO) - nferr = nf_def_dim(wr_ncid, 'vertexDegree', vertexDegree, wrDimIDvertexDegree) - - - ! - ! Define attributes - ! - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'on_a_sphere', 16, on_a_sphere) - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'is_periodic', 16, is_periodic) - nferr = nf_put_att_double(wr_ncid, NF_GLOBAL, 'sphere_radius', NF_DOUBLE, 1, sphere_radius) - nferr = nf_put_att_double(wr_ncid, NF_GLOBAL, 'x_period', NF_DOUBLE, 1, x_period) - nferr = nf_put_att_double(wr_ncid, NF_GLOBAL, 'y_period', NF_DOUBLE, 1, y_period) - - - ! - ! Define variables - ! - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'xCell', NF_DOUBLE, 1, dimlist, wrVarIDxCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'yCell', NF_DOUBLE, 1, dimlist, wrVarIDyCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'zCell', NF_DOUBLE, 1, dimlist, wrVarIDzCell) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'xVertex', NF_DOUBLE, 1, dimlist, wrVarIDxVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'yVertex', NF_DOUBLE, 1, dimlist, wrVarIDyVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'zVertex', NF_DOUBLE, 1, dimlist, wrVarIDzVertex) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'cellsOnVertex', NF_INT, 2, dimlist, wrVarIDcellsOnVertex) - - nferr = nf_enddef(wr_ncid) - - end subroutine write_netcdf_init - - - subroutine write_netcdf_fields( & - xCell, & - yCell, & - zCell, & - xVertex, & - yVertex, & - zVertex, & - cellsOnVertex ) - - implicit none - - include 'netcdf.inc' - - real (kind=8), dimension(:), optional, intent(in) :: xCell - real (kind=8), dimension(:), optional, intent(in) :: yCell - real (kind=8), dimension(:), optional, intent(in) :: zCell - real (kind=8), dimension(:), optional, intent(in) :: xVertex - real (kind=8), dimension(:), optional, intent(in) :: yVertex - real (kind=8), dimension(:), optional, intent(in) :: zVertex - integer, dimension(:,:), optional, intent(in) :: cellsOnVertex - - integer :: nferr - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - start1(1) = 1 - - start2(1) = 1 - start2(2) = 1 - - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - - start4(1) = 1 - start4(2) = 1 - start4(3) = 1 - start4(4) = 1 - - if(present(xCell))then - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDxCell, start1, count1, xCell) - endif - - if(present(yCell))then - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDyCell, start1, count1, yCell) - endif - - if(present(zCell))then - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDzCell, start1, count1, zCell) - endif - - if(present(xVertex))then - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDxVertex, start1, count1, xVertex) - endif - - if(present(yVertex))then - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDyVertex, start1, count1, yVertex) - endif - - if(present(zVertex))then - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDzVertex, start1, count1, zVertex) - endif - - if(present(cellsOnVertex))then - start2(2) = 1 - count2( 1) = 3 - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnVertex, start2, count2, cellsOnVertex) - endif - - end subroutine write_netcdf_fields - - - subroutine write_netcdf_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr - - nferr = nf_close(wr_ncid) - - end subroutine write_netcdf_finalize - -end module write_netcdf diff --git a/grid_gen/periodic_hex_minimal/namelist.input b/grid_gen/periodic_hex_minimal/namelist.input deleted file mode 100644 index 6faa7f857..000000000 --- a/grid_gen/periodic_hex_minimal/namelist.input +++ /dev/null @@ -1,5 +0,0 @@ -&periodic_grid - nx = 500, - ny = 500, - dc = 10000., -/ diff --git a/grid_gen/periodic_hex_minimal/periodic_grid.F b/grid_gen/periodic_hex_minimal/periodic_grid.F deleted file mode 100644 index 53caa2e41..000000000 --- a/grid_gen/periodic_hex_minimal/periodic_grid.F +++ /dev/null @@ -1,103 +0,0 @@ -program hexagonal_periodic_grid - - use cell_indexing - use write_netcdf - - implicit none - - real (kind=8), parameter :: pi = 3.141592653589793 - real (kind=8), parameter :: ONE = 1.0_8 - real (kind=8), parameter :: TWO = 2.0_8 - real (kind=8), parameter :: THREE = 3.0_8 - real (kind=8), parameter :: FOUR = 4.0_8 - real (kind=8), parameter :: SIX = 6.0_8 - - integer, allocatable, dimension(:,:) :: verticesOnCell, cellsOnVertex - real (kind=8), allocatable, dimension(:) :: xCell, yCell, zCell - real (kind=8), allocatable, dimension(:) :: xVertex, yVertex, zVertex - - integer :: i, j, np, iCell - integer :: nCells, nEdges, nVertices - integer :: iRow, iCol, ii, jj - integer :: nprocx, nprocy - real (kind=8) :: r - character (len=32) :: decomp_fname - - call cell_indexing_read_nl() - - nCells = nx*ny - nEdges = 3*nCells - nVertices = 2*nCells - - call write_netcdf_init( nCells, nEdges, nVertices, maxEdges, vertexDegree, dc, nx, ny ) - - allocate(verticesOnCell(maxEdges, nCells)) - - do iRow = 1, ny - do iCol = 1, nx - iCell = cellIdx(iCol,iRow) - do j=1,maxEdges - verticesOnCell(j,iCell) = vertexOnCell(iCell,j) - end do - end do - end do - - allocate(cellsOnVertex(3,nVertices)) - - do iRow = 1, ny - do iCol = 1, nx - iCell = cellIdx(iCol,iRow) - cellsOnVertex(3,verticesOnCell(2,iCell)) = iCell - cellsOnVertex(1,verticesOnCell(4,iCell)) = iCell - cellsOnVertex(2,verticesOnCell(6,iCell)) = iCell - cellsOnVertex(1,verticesOnCell(1,iCell)) = iCell - cellsOnVertex(2,verticesOnCell(3,iCell)) = iCell - cellsOnVertex(3,verticesOnCell(5,iCell)) = iCell - end do - end do - - call write_netcdf_fields( cellsOnVertex=cellsOnVertex ) - - allocate(xCell(nCells)) - allocate(yCell(nCells)) - allocate(zCell(nCells)) - allocate(xVertex(nVertices)) - allocate(yVertex(nVertices)) - allocate(zVertex(nVertices)) - - do iRow = 1, ny - do iCol = 1, nx - iCell = cellIdx(iCol, iRow) - if (mod(iRow,2) == 1) then - xCell(iCell) = dc*real(iCol) - 0.5*dc - yCell(iCell) = dc*real(iRow)*sqrt(THREE) / TWO - zCell(iCell) = 0.0 - else - xCell(iCell) = dc*real(iCol) - yCell(iCell) = dc*real(iRow)*sqrt(THREE) / TWO - zCell(iCell) = 0.0 - end if - xVertex(verticesOnCell(1,iCell)) = xCell(iCell) - 0.5*dc - yVertex(verticesOnCell(1,iCell)) = yCell(iCell) + dc * sqrt(THREE) / SIX - zVertex(verticesOnCell(1,iCell)) = 0.0 - xVertex(verticesOnCell(2,iCell)) = xCell(iCell) - 0.5*dc - yVertex(verticesOnCell(2,iCell)) = yCell(iCell) - dc * sqrt(THREE) / SIX - zVertex(verticesOnCell(2,iCell)) = 0.0 - end do - end do - - deallocate(verticesOnCell) - - call write_netcdf_fields( xCell=xCell, yCell=yCell, zCell=zCell ) - deallocate(xCell) - deallocate(yCell) - deallocate(zCell) - - call write_netcdf_fields( xVertex=xVertex, yVertex=yVertex, zVertex=zVertex ) - deallocate(xVertex) - deallocate(yVertex) - deallocate(zVertex) - - call write_netcdf_finalize() - -end program hexagonal_periodic_grid From d64a5da684343ba4ec5994c59ac1deb40b796b64 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:08:30 -0600 Subject: [PATCH 012/180] Remove python_scripts/update_version_numbers --- python_scripts/update_version_numbers/README | 25 ------ .../update_version_numbers/update_versions.py | 77 ------------------- 2 files changed, 102 deletions(-) delete mode 100644 python_scripts/update_version_numbers/README delete mode 100755 python_scripts/update_version_numbers/update_versions.py diff --git a/python_scripts/update_version_numbers/README b/python_scripts/update_version_numbers/README deleted file mode 100644 index 784eb045b..000000000 --- a/python_scripts/update_version_numbers/README +++ /dev/null @@ -1,25 +0,0 @@ -Author: Doug Jacobsen -Date: 05/02/2013 - -About: - This script (update_versions.py) is used to increment version numbers in - the MPAS Core Registry.xml files. - - It can be run from the root MPAS directory (with the root Makefile). - - It searches the current directory for all files with .xml extensions. Then - extracts the version number from each of them and increments the - appropriate version number. Then re-writes the file. - - To script can be run as follows: - ./update_versions.py [--major] [--minor] - - The --major and --minor flags are optional. If both are omitted, the script - does nothing. - - The --major flag increments the major version number by 1, and resets the - minor version number to zero. - - The --minor flag increments the minor version number by 1, and ignores the - major version number. - diff --git a/python_scripts/update_version_numbers/update_versions.py b/python_scripts/update_version_numbers/update_versions.py deleted file mode 100755 index c8f7b037d..000000000 --- a/python_scripts/update_version_numbers/update_versions.py +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/python -from optparse import OptionParser -import xml.etree.ElementTree as ET -import os - - -parser = OptionParser() -parser.add_option("--major", action="store_true", dest="major", help="Increment Major Version (Auto-resets minor version.") -parser.add_option("--minor", action="store_true", dest="minor", help="Increment Minor Version.") - -options, args = parser.parse_args() - -if not options.major and not options.minor: - parser.error('Either major or minor version is required.') - -for r, d, f in os.walk("."): - for files in f: - if files.endswith(".xml"): - path = os.path.join(r, files) - registry_tree = ET.parse(path) - registry = registry_tree.getroot() - version = registry.attrib['version'] - version = version.split('.') - major_ver = int(version[0]) - minor_ver = int(version[1]) - - if options.major: - new_major_ver = major_ver + 1 - new_minor_ver = 0 - elif options.minor: - new_major_ver = major_ver - new_minor_ver = minor_ver + 1 - - print "%s version: %d.%d"%(path, new_major_ver, new_minor_ver) - - registry_file = open(path, 'r+') - - lines = registry_file.readlines() - registry_file.seek(0) - registry_file.truncate() - for line in lines: - if 'version="%d.%d"'%(major_ver,minor_ver) in line: - if 'xml' in line: - new_line = line - else: - new_line = line.replace('%d.%d'%(major_ver, minor_ver), '%d.%d'%(new_major_ver, new_minor_ver)) - else: - new_line = line - registry_file.write(new_line) - elif files == "README.md": - path = os.path.join(r, files) - readme_file = open(path, 'r+') - - lines = readme_file.readlines() - readme_file.seek(0) - readme_file.truncate() - - for line in lines: - if line.find('MPAS-v') >= 0: - version_num = line.replace('MPAS-v', '') - version_array = version_num.split('.') - major_ver = int(version_array[0]) - minor_ver = int(version_array[1]) - - if options.major: - new_major_ver = major_ver + 1 - new_minor_ver = 0 - elif options.minor: - new_major_ver = major_ver - new_minor_ver = minor_ver + 1 - - print "%s version: %d.%d"%(path, new_major_ver, new_minor_ver) - - readme_file.write(line.replace('v%d.%d'%(major_ver, minor_ver), 'v%d.%d'%(new_major_ver, new_minor_ver))) - else: - readme_file.write(line) - From 4d57cbe7b4d57806a89a6e37412b7b0eaa682a5d Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:25:09 -0600 Subject: [PATCH 013/180] Remove python_scripts/namelist_generation --- python_scripts/namelist_generation/.gitignore | 3 - python_scripts/namelist_generation/README | 66 -- python_scripts/namelist_generation/cmunrm.otf | Bin 330492 -> 0 bytes .../parse_ocean_xml_registry.py | 1039 ----------------- .../namelist_generation/parse_xml_registry.py | 609 ---------- 5 files changed, 1717 deletions(-) delete mode 100644 python_scripts/namelist_generation/.gitignore delete mode 100644 python_scripts/namelist_generation/README delete mode 100644 python_scripts/namelist_generation/cmunrm.otf delete mode 100755 python_scripts/namelist_generation/parse_ocean_xml_registry.py delete mode 100755 python_scripts/namelist_generation/parse_xml_registry.py diff --git a/python_scripts/namelist_generation/.gitignore b/python_scripts/namelist_generation/.gitignore deleted file mode 100644 index 14cb08aa5..000000000 --- a/python_scripts/namelist_generation/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -# Ignore LaTeX files and generated namelist -namelist.input.generated -*.tex diff --git a/python_scripts/namelist_generation/README b/python_scripts/namelist_generation/README deleted file mode 100644 index b25bc069a..000000000 --- a/python_scripts/namelist_generation/README +++ /dev/null @@ -1,66 +0,0 @@ -Authors: Doug Jacobsen, Xylar Asay-Davis -Date: 04/19/13 -Last Modified: 10/26/17 - -This script parses a MPAS Registry.xml file to generates documentation for a -users or developers guide. - -Typical usage is as follows: - - # set the core, one of ocean, landice, cice, etc. - export CORE= - # Set your repo directories: - export MPAS_REPO=~/repos/MPAS - export MPAS_TOOLS_REPO=~/repos/MPAS-Tools - export MPAS_DOCUMENTS_REPO=~/repos/MPAS-Documents - cd $MPAS_REPO - # Compile MPAS so you have a src/core_ocean/Registry_processed.xml file. - # Change the compiler as needed. - make CORE=$CORE gfortran - cd $MPAS_DOCUMENTS_REPO/users_guide/$CORE - # clean up blank lines at the top of the XML file - sed '/./,$!d' $MPAS_REPO/src/core_${CORE}/Registry_processed.xml > \ - Registry_cleaned.xml - $MPAS_TOOLS_REPO/python_scripts/namelist_generation/parse_xml_registry.py \ - -f Registry_cleaned.xml -d section_descriptions \ - -p ${CORE}/section_descriptions - cd .. - make clean CORE=$CORE - make CORE=$CORE - -The -f flag points to the processed registry file (typically with a full path). - -The -d flag points to the local or full path to .tex files that containing -section descriptions for providing additional information in the output latex -documentation. - -Section descriptions are required to be named whatever the section is. For -example, in a namelist, there might be a namelist record named -"&time_management". The script searches the directory listed with the -d -flag for a latex file named time_management.tex, and adds an input line to -the output latex documentation to include this file. - -The -p flag specifies the relative path inside the latex documentation where -the file should be input from. As an example, one might -run it as follows to generate the ocean core's documentation:: - - ./parse_xml_registry.xml -f mpas_root/src/core_ocean/Registry.xml \ - -d mpas_doc_root/users_guide/ocean/section_descriptions \ - -p ocean/section_descriptions - -On output, several files are created which are listed below. - namelist.input.generated - A default namelist.input file for the core that - owns the Registry.xml file. - dimensions.tex - A tabulated description of the dimensions for the core. - namelist_table_documentation.tex - A tabulated description of the namelist - options for the core. - namelist_section_documentation.tex - A more detailed section format - description of the namelist options - for the core. - variable_table_documentation.tex - A tabulated description of the variables - in the core. - variable_section_documentation.tex - A more detailed section formate - description of the variable in the - core. - define_version.tex - A simple file which can be included to define \version - inside the users guide. diff --git a/python_scripts/namelist_generation/cmunrm.otf b/python_scripts/namelist_generation/cmunrm.otf deleted file mode 100644 index b449df04c3c62152d99906a90a772d463d81e712..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 330492 zcmeFacbF7K^fp?Zdp74BW^-V}OixeR$cAZh&Z5Gw47&@P*g!I&D3~xK21Jx3iUBbP zFo9q~MNu(voayiv<;assrE$U{7VS;=Tt%>pBXcuRN-QWtc7#O!u2yJdZrrJ9$bDG z*t>@i^;|=%yFh51o6e!QF@Vk)!5+U0y z)olGAGr}uQkznp8J!SWdjiY?>-#gD=C49ka5SIz7r@}wDj@#TXl08B{{O%UMKuh?e zew05UWGs=7fXcB`nUh2yguR)44dS<=WDXHC1NK-Va{3j7942i4c;@F{S(fma0>{CG z^!Ibdeh1mZZ6^N!^uzjZ%v0&Juy~cy*3g8DD;R z63!NT(*+X7PWS2}=r?-x5R%D$>(xWaK7O574UwRNNtDnjbsk|)=pYMn+WGjr760tZqh~1oj`x} zKzbu-0UI5p34Sqx1>L5RG{Em2q}hu(V67I;?xAN)AoD2p95@s08tJty@T(haU6|WZ zWCGxly7smOp3ZL1Y~!rO#+tU-ou0+U$tA|I?zZ*?#yOqstw#N6nbY3Z-Bn_o(BA4X zHnuG8XzDV$J3YpR_Ks$_lAc!E(&I5sn70_uHFkPhJnk;9*-;a8Br=Ljf`UwdjC4Xl z=0MV@NyZ7D&Sp@@)8gyZMaH+cx|+4Sl*h)sfXn`yP^X+}~8 z;eTQn11#e?X5t{l#0EhjCiu?@Q0=W9J>8y8-n$^z`VBGyb_uaVFo8}cdpf(C+uMw$5_^dW;%I!J zvWf!=qMN4mR_v{-Ca9xssH-wk3jZ}gy%j^f1=ddqG=O$U>x5rDQ1|ViHG)WM4nmNCqsAyud;c{C05_nxO!C2nxvA zg}6p)UVQQ>Ny<)c^wJp-DDAj zBFKf_fb~8BuXn?uTt1?tosVtGT5F zT3jg@fi?-{9cbA9FKgld$sq6ra3b{S1nw8n8Bcf(nI$Yi@9Aker}G=6^THj#c{3qV zA~WHatJ$~NpV@!7a4wQd;uOxxRdU0)QQT$REUt%J!d=bX%-za8!hOVj!hOzt%N^$W zxKsQ>emVaUf0pkTqJ`A3>Qs^{r3R;lrY5GQq>e}(l{z7HYU=dV%TwK{i&F1SU7fls zb#Gd9T3%Xl+K9AHE{aPCuT0 zGX1tZ&7S25)NnMH!l|5{)1Zbg(Gogm>fEs=$bx#^eOGqnBGo?*TYf8HkYIt2bNl!>O zrVmXY>#gA#=}XgJNZ*})ApP6)6X~ao#25iJoIq>XWOPCeR~zeT4PWE0;U{Pf$9Zcw z7i##9jMW#^a6xwFMK%1SzlM`G%-uA2)6h+mHzxdQ};Z0D(Po123a_%W|Dhg`&qtpALhR=Fy*mTAXHGJsI-~SS<;r_$@Z}jiz zf4cwi{w@8F^>6Oq)W5O+q5cQ@Z|%Rie{p|Le`kMde@p+o{<-~4{ht1Y{#pGK`p5Qb z{q_BI{k8pr`%C)sPfN#!9IH82eXQ!3dQ3SMaxC~*&@uU#bWA)Z^d9g1viI}eExnt2 z=l8bu&g*UNb@yJ@JFRzW@5J8Gy(4>v_YUpVdR@Ksy>-2{y@PwJd#idYddqvAy^dac zueDe0we*^LOM3HrGkT4^>Afkvk-ZT|dynou`p(g}kG^^Ijiav~op*HF(FsS#A02me z^wFV5jvYC2WZ{vPBTYx<{P^mRGk?6~hlay99JU|YacIM#%MXn_l=c1L@4x%L`(VbG zR~%R}t7zubnd4__GnUO*JfnL?=Zvdn0d;?1}7(?2H^8Sr?fSaVp}ah!;a&44O>) z4%&$S&*6Vj4TR`+Q4&Fq>f?Lki%+ zL4O?dNDS;NAfq89LHes8Qy{>+O}E9V5Uijd1Zjs*3Hmo6H3-8%{}Ck2-}Jy90}1)$ zE(0Crb{yn`n+5tWAiE*J+>-qrTRh46_tz^Ih_+#AY3ehJ}Q2;~sIhj17|4Fuh;9_k^S@&*^BL&7hFIFyNp zyz$FH-vRPQ2s+mLAYsnM13n)G;T#0$x%mplt&Nz2#^U75| zoD2cd_{%|F0%1DnZjf+KQX!3R015Xc71DSQ$VC|VMv%b!sdqzq6UfyV_!f}wK-dN8 ztswV6*b90)A!$&qG`;NeK|-F>@<6`3AJAa_I9 z4{IAd$O90*1|9By`nM3EEsJ`-PC$VCii1F&hHxJA5<-jwLMZ5VkP#4KK!h;Xlrn=pvYAh%!;q0Ws@K-dcB z&ja}^1`*mx28Tgx0|{l!h=+5a&19ru5TVUv7%_;QAfeoP5W7G^*)yOmiQOQf?(`ro z015Xs<8IIwf?N&ZKG3fqBvZs7!u`vP!XRD^5-{~3LisY`+0=sw<;>iTLA(~$u>eyK z;&mXQp0l9sipyZR3(~T)K)->I92SGP0^~Rh;*B8R#~|JWvKNDRGa0}1t}2NBAbmx@8W6Nc(gM|u$NBBTV`h8{#HzX{4=N(TKN zLNsK^ZD`XJ(AN;M8Lrz5eTujiB-F7U#C0Gc zzj_ei-fxCBrU&s+kX8)hdP0uFGoS|%+C?9M5DxlAkdYAJzKYL-%z&WR+uI=FKK5M- zI@EU`JYRi1p#KMQA%vwcy?|$+59+-S>R5v7`=EaNAdeE%eIMkn5Aq^8Kmv9jU`iDr zHxY6o0`w}7aIa5{1|8bt3BWm_pAYxp1e~h}xd^0wKKw32*-k(^)q`vX33Y$sY0woy zPR@h?ZCqACLVcd>1ig|FC@MJxSh5BZuui3e4)u6?F9sQ&m(w3X_!!dXfP}u`^a0SD zL4JopZUqT_*y(>Dy$vLkUk`Ez$bJkm+~YIZ7-YacV}bzBwhYh088?I_pkDob2t09!JY=U>S4zdYzK5*YnrkYTRJ)I%5o>F*Kd3z+diAoA-(WP{)W z!r=HpSO35MV+cz)m{|%$gn6S(3^1DvCLtsg*0;k+1c@Y3BpOyvV@Vu|CkZ5xB#~s2 z0>qdGox71_kW7+AvPllfC3z&D6p%tP2xhCrFyAjFCRp!Kh=r)c3X52FSj=+5dO4i$%qW5luI z2yrBVdrNjQ4soW~B3>d+mtT@!7AKO0;wW((@WOTEW^ygLh1^J1lH17b&lbdwZA50he8z@``$a(~D3DgWMiE1plIxga5%z?1~|@oIa1flQwcZY zmME!Dw;)3+iG$XV4fovyceo1f<#4!LGog;U$r7lEbs zqwHhsQ|xo>PWCuxm>P@GjlesjvLEO;V$QzVJ3eK ztd!ir-OH`x9_OCsUf^Em-r?ToKIOjR4spl0Ke$ueIiBN#_((p1Pv>*_VqWFT`C5J$ zKbD`uU(S2@7QPE+{nx_0ewzw;;fGyHi$ z5JF(}B}vE-@`X~tCR7Ub!U$o!Fin^#Gzx7(k8q{1Ot@LNQ@BrfSlA>yDLf~wFpl6a}uAkGy##0BD2;`QP! z;$7nX;yQ7&_>}m(_?q~(_@4NQ_@(&0*em`no)phZvJ@`GNf}a+WRc3G8fmCBMw%>L zCe4=SNuAPSX{oeAx=mUot&!GCk4xL6m!zH2Zs|kmGwG1@oAj6TkIcxj94^PnsdBbF zNLFO0JXjtgkCrFNm&y(DTzS5{NWMm1F0YjDksp*FmAA;x$S=w}<=yg!@@Mkb@(=RQ z@}Ke<`Mg0egcza?NrntVzM<4$GgKPt4I>QW4bu!W4UL92LyzG~!!pCohC2=S86Gxl zGCXN`&hVF@+d@`_tO;2k@_5L$ke5PshU^acFyyn4uS0$a`8njzkTW6YLxs?g z(CE;l(2UUh(9%#_Xk}=9=!nqqq0>TVhBk(Fgf0$U8oDC%w$N3fYeLtDJ|6mF=#J1` zp&x|q4?P%qIP|B`8g2ST162pvPd0{1C*074Oy0GD4p(VPA*+5cYG}pJ8Xh&W8)(A>q;CN#Pmc z`QfGEw(!dE`tT9qMOeKK#w__rt#lKOBBE{Fm^) z@V~>)h4)AB5pqOmM07-KM0`YIgfSvBA}69SqBug0ut$_dR7TWCjE$HP(HzkeaaF|f zhzBAbj#wY@Sj3YN&%t`fs}XNRyc4lEVqe68h%X}!MjVd#F`_r(w}`%olM(+$oR4H8 z`A9=#Y-Dz%DY7C`iyRp_HgZbj<&mDq7Fao15_xUpjj)PxZ{$Of8zP^Kd?9jYx zQ7=cm74?49XHnlq^+p|!`a7D54vLPBPKnNmE{V2B4~`xdJwAGR^z7)?=mpW&MBf;F zXY>Qn>!Y_uKOem#`rYV{qrZy&A^O+olhOahh%w=# zI${>bTo-do%snv=#cYasI_9OAH)Gz5*&p*w%#Sg@#hi*c7t6(l#Ky*^#Ad}7#+qXt zvDL9!?5Nm@vD0H`#Wu%w#4e0o8hcahU9hh7XzZ5QXJTKB-4VMh_Ji2{u?J%h$Nm(1 zJoa?#e{p5!udoYF(ffMF)1-4F+Z_1(Uw@5Sf4l|aeU&m z#F>eWiEW8JiB~2rOT0Pp&cyo?A5Pqq_+;X9iLWNUmAE(Y#%iT@-q zNpeznQe0AMQg+gyBqhn2G&pHU(&(f~NtY%yB+X5lpR_3Hnxy4PE0gX?dNAqHq%BF$ zB)yolBWYLC2TA*r4kjH=`YGvn(&?oClKJG|C~M@n^ymNF`3V#@TCSt-pa9VrV_u1>ij<<^wDQyxfpB;~P`r&C@? zc|GNwl=o9UP5CP2P|C5CKT=MmoJ-|WgHj`5wK_dDH?=rbO)XEYO&yjxHgyWT74W3C zq;|r(_0_4@r>;!BH}#>^4XID0KAZY->Km!=rhb(AdFnT*Kc@bY+L!uwYJZxT7Md25 zmYkNERsd_;_Oz-rSK7$532B$4xnZ5VJ#9hSRcY6!-I8_}yj572wmI#owCB@aOM5%* zy|ho#zD)Z*tvBuWw3BK7rnBjW^oaEM^tAMx^rCc2dRcl6tc;IIpA2svW~a|f?@V6| ztK%!uZ%bd5z9xNr`s3-_(qBs739IEFrhf+SBz{Q$IsMP{Gw@zQFoqbTjY-A~W4^J} zXoFSrdgBOKHJ@gjX>2sM8oP{3jMo}(G~RB!*Z2^;r+^i7E($VC8TO2-3|Gd;j0qW+WVkb$GTJj1WL%YTea0;rcV*n4 zu`XkC##0&3XS|m2cE)=dpJaTQ@qI>b#_t&?GyctFGYy#$@Wvx8Gbghs(~?=1S(7<5 zb4=#s%*!%oXU@y)%v_wgG;>AfZJDbw*JQ5Gd^~eo=1ZA7Gk0fxnE6@e*O@`&3Y?qZ`Q|IUu1okbu{a@tiQ7U$!4_OQ|wljNh_K@t+ z*^{y_&2Gq^n>{~!QT8?2%d=Nz-;@1d_M_QbvY*L*F?&b$uIvx8_h%o>KAini_VMh~ z+5hG6Il(zmIf*&OoV=Wp9BWQRPF>D$c%L&hXGYGPoYtJ~oGWs!%eg7%j-1swYjZZ{ zY|Yu8^GeQ}IeT*UwxsT*Nmiu(>3%Rf7zLWcY?x(q5!YUEN^Vyw7i*ljd^W(J$cvV-IRAn-s-%yc^mV#=55b= zCGX9=J$d``4&)uoJDm4Z-fww-=AFqqpD*Nxo^$MyixFe!GVIq1-}&hQEc;qVV>@)rD&dHx_O!++O%f;hTkf3ilNrEc~JH z_rg_gMJwF*PwGnQc+Y< zQc-46VUeY%vZ$_Tc+vQxX+^V&nu`_`EibyaXj9P(MZ1bVEBdkML@_CjEY2*p6xSC| zES_E5U3`7JN{#h({{U;In$#W%dmwZ_Ab;*&E6D9qnhSG%6tkRs)l2T`>RywkDa%n?pOX9*2WOLvv-EB&hUXlY;ReNH(ry47^QX_INY z=?&9|rh}$qrjusI9A-{7=bMYoirH?iG}oC&nkSoQn48S=%}dPJn^&6eH9u^A%)HI~ zqWN9($L6ohKbU_vpH>7VR!LQI6^r6fT*^4*GNoDRQLa^1D)%cJm2JxF%3kGj<*@R* za@HbPA}y(w0*lpBV;NzhKFimZW0t=x{c5lpuV$&G zYPmW@ouJN8o7EomTJ;unwfczqg!;U?Q{AiXSHD$%R!^z@R)aO#nrh9nnyuwlmvx+V zy47QCw=T9Wv#zwRwmxEg!uq^*r**G&zx7+|F>9aoUz=bHwRv)9fC5 zn|+~usr@GVUG{bME%s;auiE$8Kec~tKVtvge%ir1LLG6AbVsqn=BRcIb&PXd;%IWr zcPw!%bKL4!!||@;W5-vH!;X`VerJ#~)@gJOa@w3V&XLY3&RNb@=OX7a z=WWjWog17_J700`a(?Xm+S%*ubDk@c%A(5R$}-A|%1X=Bva+(lW%Xr4%0`xrFPmJ} zShlF_rm_dho+x{*?8CC}%8r+l^2qYca!YxA`K0nWSqk_tz~kcw#)4Hc~w3oEXzxTWHr ziiawmta!0vN5$@n4=X;e_^#r|ir*?uRh+H(uac=0Dh-vPm64UPm5G(9l^K;el?9c> zmF7xorL(fKvZm5iIjnM2<+#d8mD4IOtDIBWUU^mJ^2)m^AE;m9JI4S9zfF z`^ujxPgk*3a#dtiQdL%!xvH#6s~S@^waQ)9UUfy)^;Nf4-B z+p6BGKdMewov#k9j;T(q&Z;h`F0Hm!S67d%o?bntdVckl)hnv+s(z^YvFh#BJF53q ze^&i{^)J<@29v?TgX0Eg3@#dM9(@1c#|CeM?KLJ-jR{3P6}8ryR(C^ZJ8T`KqGnd7 zXMx8}k2UR$?QNcUZWMLy2H2|9K#z3|&7BQBt#evDiy8*hy4mgB?grTM(cR$J>Kfc& zyn!C;>2bDK?`(H>&jw_7w+o>?C|nfVLys=M9gkmgVRk$yT+|w53s8m(WTkOH9YQTN z(&G@nl}5idq@lgF)s6XX4A6%Tm}nYMht6_$>J?{J?8Ca7TV{Kj>2Vn5y&1(Y3f4@I z!!V!CD2DlAhYfF@JAAW!TqPg@IYZE0>8NpC|7J&p`GrzJoiNh_s= z9!COh3xv`1iZ*&29dK@2fIgaD(S}F2zK684y`#zFYHRegHIBg&!cIB8(Tt%*Y^TRD zO+9UmP&utF?w;=Uz{q$C*GZ4#1AsaM^zjs~lO89~GrQ<T+UusriE!t-Axxq-tcM;aVKI78OrquJp~p#`&25c6`e9OFQF;O+lklGQ zpqMg{y@dnnv;lSTfO-in+Y)-b#NQ;A__dbyw#F{n%pCM-H9O20Q&3n?s3@!`Y$)s~ z94MS9YEaaos6#<}0JFn|F^vi*;yDq|i9k*Saw3ouft(2BL?9;uIT6S?3ymR=6M>uv zj!#P4Hl{fv1$;j2C-@os|K-Z5UU2UY7nai3sr+yHHcN?nndmUL29u; zwTM-VSha{%i&(XYRf|})h*gVNwTM-VSha{%i&(W-u3D_XI;_At#H&NRI>f6(ygJ0I zL%ce~t3$jx#H&NRI>f6(ygJ0IL%ce~t4F+g#H&ZVdc>(-2Q{*65Egea_r|t;U539xBFkK|j(&)%F2Z&oyke zMM2RO6Gc}{6kRbSEeL2qKnns|5Ku)x6#-QQR1r``KotR1ETD=7R1r@_JQeXVMZMLZSp zRK&9)o)z(|h-XDSE8Tig;GUvmu@h@ob1^Lp&Se z*$~f$cs9hdA)XEKY=~z=JR9QK5YL8qHpH_do*nV*h-XJUJL1_9&yIL@#Iqxw9r5gl zXGc6c;@J_;j(B#&LmpI+2Vr(b8-oM!9Ej&YJO|=A5YK^l4#aaHo&)h5i042&k}za zK}uAR5*4IG1u0QMN>q>%6{JK3DN#X6RFD!Cq(r3#>$3*&YVrOdEh4H-O+ivqG;Cs8O=G8ffd{(cS(H4Ho1E3vz=6 zxxs?mV8K4#f_=ON`*;iX@fPgPE!f#wu(P#b_iDlJ)q>rt1^YP*_H!2O%Pe+RcY9lV z*X(9br>CpAi$-c%I-1;6gTHR-hCfhhcDp*@&kf+O7xZ&o-T31$m_E~lF|Ez|?277> z0!|#$>S@F?n&H&{2~q3mb`NpGY?juvQ}u#e0?B|eu?eJ(F~Z%^;U4L3oi*D%s%Lc1 z)MhW<*yiy~?Gu_CTip}gJzm_gP0e*p&11WosmU6re!UOadx|gW_LtHVkY~W8<w%swk-pB2StMe*fN@#RnPWlQndQGD4_ zdQ6-US_j*wLxA*(n-R&j)^;s{yA z5weOSWYt%Cv#%kUeKltGwJ0+VAyphHt2k0tkxx_{8>=`rR&i{s;@DWlv9XF{V-?57 zDvpg+92=`RHdb+Ltm4>M#X+%(gJKm2#VQVpRU8znI4D+eP^>yMUpL~rsW{G6abT=^ ztJzzdI51YdrSdih-vxBAq2j<;#euPk17j5j#wresRU8l7VX=zCVikwQDy|o)xL%~< zdXb9jMJf)NRU9&_IAm6F$gJX!S;Zl>l~o)ot2kCxajdN3SXsrfvWjD6 zwch)XIdIUVBE_jlaVk=riWH~fxLQS~Q<3RZWI7ec)hdpwRUB8VIIdQ4T&?1`TE%g- zisNb(7mHL}EK+f?NX5k>6&H(CTr5&?u}DQqRdKOM#l<2O7mHLc&(5Lacy$gPH>$W; zq~ZWx)#xlu)#$8H)#$8A)#$9ric2z999vs)#B9ao6)O&$tvGOY(Pam7JzYTpp#y=? z2k{{E=^zLl!CX&A;UM$}1B8xluBR&#AoQ6n2z~YeLeDh_G)70Q5X0-}$^^vFG#$f2 zOwYBso~}iJ&<89a^!W`4eeMhbZPL{Ti0NZ=5SUN8>;N&$CtYta*VC>P1RA5$d2>CT zErY;B+9^N`5$N-8uBUAo1Qvz1UWn1myRepACVIJxPL#|pI#B{atH4DkN)V&jbzyzF z=sFuDdacl0yRdd$bczJO(agK(6bWLqLR@s6&FrExB@nbiT=b?wj8=gQZ-cX4jqPG|l^`-#=ZPK}s*+u6@AkZdVL4z2UfliAchGn4BB8XwG=|l)(^!B^xM9A!- zQy&mmFLdeyFFLj*bzf*6(oTdRvMl9^p} zkqiWu0h^)=o1zPwq6?d%3!9<~o1zPwq6?d%3!9>gE_Rt+bg>Hr<`bKn3!9sZE_T6h zm``kVE;|2$M7)34_*`_c%k087=)yMWqO&nb#CoB04KZvJl&&F$_m9#w#Ly>fD$jn@CH!gg&5udO1u!m%A&*zF{~^~yb#06qQna^tSm~r zW)~%15LiA+yb#0kVQ1yS&dNoJ7kYO`6%&149iFP*X*MF z3j)hW`4?hXKFYrk!}3x7g&3BPPB0;c<)iEiF)SZtUx;D(DEpdSlzl;9`6&BB49iE^ z7h-t-DEmST%SYK4VtD^3`$7!MN7)x*SU$?W5X16O_BFdS^EBG%7tnlF*POG$aWPNkT)C(2yiFBnb^^K|@;5kQOwg z1&zKtF>5$b(Qt&K;Rr>;5sHQ*6b%VPLjuu|Kr|!}4Vgnj%FvK2G$aWPNkT)C(2yiF zBngeq_skld?}4Dtn?~n*5W_o-1fn5g zh6JJ^foMn|8V;c}`bNa8(ODk|`n+jKE*g@HM(25Ejn4Bx&>lpi^E`;r9z>(_Jcwb5 zkWMtD6AkG^LpsrrPBf$w4e3NfI?<3$G^7&^=|n?1(U49wq!SJ4L_<2!kWMtD6AkGE zUcu^}fQGE1(WxH9Xb+;%sUF0zB_rc#$T%7@j)sh*A>(MsI2tmJhK!>j<7mh@8ZwTC zjH4mrXvjDkGLD9fqaovH$T%7@j)sh*A>(LBH5yWlhE$^=)o4gH8d8mhRHGr)Xh=00 zQjLaGqaoF3NHrQ#jfPaCA;)OQF&c7=h8&|I$7skg8gh(=9HSw}Xvi@da*T!?qanv= z$T1qSiiWJBA**P}DjKqihOD9?ooGlW8q$e|bfO`hXhmtG)rN(7-lKp=EV1fe5<&`An}{s4i{NeYDCC4kUL3WN>>g1+>!nQb<@ zQf2X4rIezWD5Zd~&F_H~e|=9~Cpbc)7t#p>>?S~KwBLqJ1$wqY==lYqKM=6R0P{<` zQ;1=nXzvL74luuVt);osP0ywe1?)C(&W7DXuAa_zno65B>@1+AG}9MAikZFu0-+BW z6f`o(?KHn;V-jVRrjMeFu+Xrq4YHT1~JQ z!rcW$Ztj`~Mw&X|bwPu>ML$oWjT|=DD(<$%7I+N;hINHn)K5?;TY{jOwc-u3TFv^# zsTNN=xQ&|C)6(MU*3Y#m^>bjyV2h`m|R z*eN%Iu+rUTB@OP5#va(W=4sWpG|cHPX>oV^6KE~yDIMSzuc^JK3pV;K@pQHi@pOV) zy4kZ@y1Ey7S{8VEX2W(Bux^64Q6=scS{cw%bd?UdO1itu-~w+Bg@l)LP2e@pqhC+= zwdudoZ8^FUxS36l!%2P<9iFasaJ;pc-X(qKr+$K`RR=F=Slo$LDd+_yf}bub5v1rl z2s=!!*1!++O%KFJj~qGzqHQfr^B^{|enK6HQ7FcPfZtkXcQt@$MbQZY zP8kBf4XqnL0n`aKAmM~Y_@$|#vkO$%LD<=)8yY$Sj^imrYtuaVrL$?CcLiFtm$bLc zUf_m2>hF!9Ud_;zd+XH+EJXzUlM74fDpknwsb6&yfCB z?Em%x|C^@^FKF{D>gm#Jy{mb)heqhtdJOMiBeVzo^Z=<-_Ng&DjbZfl(5ZfOA2Ym zvL2lcE(sTm@c48A9qT$=OragBZl|fW zjb7~C$MN2o8VDH)ptT;OM5r$2k6kx^XYho zo&oq96+Hu}D!SUD(?DmNuA2Rt;@2#GO&!z;j@N zeI0?n1YTpg;F`LC)6{^x!P~EaU*H{BV8j*}8F=1I@K#LIpzPW({V-BLph|W8^9V@v z{%r(k_;(Syg1p+*fPd!zDfq_;x`JE8LBqdB00npO>x%C(+?x+6xB(v&*m4iY3pUn6 z9JkZ!itkF?Fb^rdees~;)_1B5{CxvNFWAnm$MtQoAaP5&uF!5q|0@oN3>yxIfe{Eq zye|UH_5j@xpgRY2#T1~M19T-oR|9lwfNl%W1F)0;EF}QT5`bk1z_J8jSu6uKEdjWe z09;D|t|b815`b$7z=eUAzfRNuTr~h!4Zu|caMb`@H2@dJBm*`Ba8(^wf2Lud$iNv& zfOTs?4y*w=um+wJkOON#4y*w=umx9Id+bo!BN{Kz$ado_N0HGX?FetR{3 zdo_N0HGX@wetWfkd$oRhwSIfGetWfkdocd>=DF5y57^0@R;RZmTHZSUJ+0Gg9n<`6 zw64~lrnd?DYrQ{L_5NJd`*T(A&sDuYSM~l})%$Z*@6VOXZ_nko=knWg`R%#<_FR5@ zF26mO-=5~Tr}^z^etVj3MW0yfV*=mn8=ap0I!ux3VQk^%+Xj&9kr^>hRCv9L@o?WV>!4`80_`vuf`{g;|0J)>c&-#o2ZZ|rqVoy`lHC&0Ro zdr425p58!Hx*&x<*k;oVdNX`GfmO5h;snRV39gG06wAd4wHGJUU!3qiY-ksqVYxVC zmWwlHxj18%i;b!mXH2~~W9r2jQ~!sJfQ->K1n5fi?YiEc(L%Xnh`#$(?_+R_ENJ@H zT|I#w=>}ei>3eWN&~3aRaVIY*qk7;lT0cx}hJ$xkFKA=+DD3TpBi-H$a-v&5&%0&; z8tn6hBW(56^)CHLn+Y^O#PfG?{)+-qXcO|W23<6vm&5X_w~vK26<;38Lsn>1XNV`ymj9LKp@?ALNdJppW)PK^P5T41}=|#z7blVFHAS5GFyG3}Fg{sSu_? zxCDZ?mj?yQ21-Cb1~Kg3KnbWg`qydt#d@ylynoCJs+Ryj@fiyw07wq7;UxnoiSOKD z7XS|nIM>zP1lC9Uvf}O0p|HM713Pj^_MaGF1OHwQ%y_$bP<&TUx&U}mz(wAEAF7Z} zX`$Kze$;QU3tqyR>+L2>fC;$w{PIU*LViG}YrL>F4CNh|Fv8zh=y{+| z2n@yjYCtMo0ECVN&w}3mv)y$3H)^aI0xm*J-F1L`Yn493!8lB((^ImWlxrnRa?S+``-nE*ozHr;T@j+s3!aT~XXErb!nN8r{@iAr#^EmSavz2+0d5U?Od4}1>JPS@ApJSe9USM8i zUSeKmULkifuQIPOuQNNq8RQ$xo6K9x+sr%6E@n6LF0+T(t2>-z-UBz0A21&>A2Iuw zkHKN&r_6rlGv;&V0Jx6)5*%#a!+gaYWWHv;VZLR)W4>n&F^8ESz^UXB@&@^qIm+}h z$C#g(pP65nUzy*S-kHn7^5`%s*r|IH5ep{KuRp?~)_n zj*_qp%Ysu%o)uV;l~@_vQwFiYYzQ05hJmBX2sV;<*eEudjbUTiI5wV5U=!ISaA29j zrm|^lI%{Mz*i1Hy&1Q4hTySfd&la$S>>#!X99)*LrL2iHvkJJnR9P!)W9_Vib+Tn_ zIa|S2g8R#Ab}+cWtYz!K4W^6L*dgptb{IPxTw;!7N3o;XG3;1&9Jt4vz)oZ*v6I;; z;3#t%dkH(8y_CHSTxQN-XR>Z~7TdtiWAam&Bg2z_6qh&_9}3;c@4Xiy_UU>T?TG9Z(x_RE7%*^o8W_9x3IUeE7{xF z+rb&|yo?_DA*zdla069%Fxk52^gZ{>uKw{?7iv9%uh#`@nVRU+hWt6nmOI15QNGvj4FE zvgg?U*z;^Zd=P{Ir=lFka{{;)l{lF*a6w!!I2jG)!r+4}5%9r~DEMSZ3>VAAaq(ON zxE@X7lHn5~sazVD&KbE3E|bgRvbh{^NSepxa|K)>Hwavl7IP(BDQ5x~r3z<(4~|&D zRjHkGa89m_E9WZU6E9UF^7mz&46aIIV$*Uoit^SMs03tXl4a0|GF+#+r?|_{_{T+*0mZ z?mBK6cRhClx13wS-N@YpE>&-VkE5)F&!gPV-ND_--NoI_-NUT{H><0;`?&kL2e=2h zHQYnoTJB+Ry!r_DD11g`1Gkae#BJst_i>;F9$jZX5S3x1D>Admh}g zzR11Az0AGBy~@1?u3C3+JGnQwH@UaCx4~`eE^asXF1Lr<%e@CKTtDDGgwNmX<35HD zw0z3#=RSiE;T+(;;JyUct_QiVxo_aJIN!l%TMog;aem-_}nM4dz4mP(F+g=Oe&%Y!n~O$MCUy93KyEWE1%$KABJ9Q~5M-DQo02_)I>F&*pQ$ zy=)$z&lm88{2;yvT+Np7rM!tZ^9pYPx3gB>#@l%Z@8rwC1#Jai$yf2!{9wKY+|kzY z^}LJM_#ymIa7{a$AHk30NAaWiG2o_l96z3)z)$2S@sq)2?Nojme+fUGzm&fW+}F_n23NLo_(r~oZ|3Ln^T4fbE8oVq^Bw$rz7t&BcJn>_0{Dc`B7QNryS;+H zlD~?-n!ko$3a)Rj_a^>k{ucgLaEE&ve>;B%e&>U&Y_c zujcOq7r77c5AtjHhxoPp!{9FW5<rJ->n9$ZrDIxsUN%_{aGt_^tet;70dp{uzE7 z|17_qe-2#gzQDi8zr?@Hzrw!??sZ@1ckny;H~2UCx4_lzJNzzwH~%ibhu;fsci-nf z;6H?q9qogU@qEI6%J1ht<3HyQ@LzyC-mmzB{MY<9{I~pf;F|Xkf0+M)|B*k!ALV=b zWBgD2&-^d^ul#S|viA@EIR7W#2d;bn;!pCY_|xFR_iy-s(m(va{5k$V{ycn2i3p6q z3Y@@$YhO{21X(Z$K|(ON`3)7qgm583h=h*;MGG-PtPm%}3kgCZxc^NSQiN0?O-L7v zLWYnjWC__qj*u(lfm`4Lp->nk6bZ#biBKw-1hb$B7C{xP;4auMI0UCqCX@>m;5xWU zs1^nbHA1aW2X2I2f+h?Rh6=-k;owquq%cYtEsPPy3gf`N@C0F^FiDs!OcADntKmz8 z>B6PLWy0mc3~)Q_7G?jWOW`ZwpzyWujqokFQ~q8!Bpeog z5PlSnfNSMm;h6A~@U!rX@GH1k{$2P(I4=At^a&@x14lyF)&Bm6C#75)+a70wC& z3Fn1=k$@{^R^&up6hu*!z%8>u3=)II5HVB?0~gH^Vx$-)MvF0GEVyfq7Zb!pF-c4o zQ^0j|nwTyc#SAf1%mO#gIbyDuC+3RYjX@I9{9}P6XG`lf^0G zRB@W_pd8#pUnX8I&JbscZgG~_AkG#&;vBJ2Y!aKnee^tVW8NyZiS6J>dcN2xc8T3$ z54en|yUU9W}AGo7_KzvYKBR(Xq6(0uI)Q^aditEJ<;zn^3xT$_j+#)_M zJ|S)up9GiHPm9lp+r($Z?c#IbzWN36Me!x^W$_j9Rd8kfy0}B!DZU}TDZT}6t=|!M ziMz#j#XaI)aB=;<_<{JL_>s6z{21I_e=6=5KNCL}4~SoY>+7$?gW}iXH{!SAci;y5 zka$@9LHtoXA|3^o*vG`5#Gl1q#9zhVz&-XK;&Jg$u}?f9{speGPl>0+GveRkS@9q7 zU-6vyA5#XOy}L;~5036-(j)f6r|=e%1rhPT6-y;jsbrGOk^)Y=Rmm#ZB)jC0oZ!&AT&j>Nr7Edf z8Vt_8Yo$7=UUEsAGz1)d50i#VBczeiC}}h}{T?fglg3LEq>0ibZ~#6bhov8+ zAEhJGQK?rtCjBJ+Ed3(=3Qo{}m;R8BOMgmz(g|>geo{ImotDl>e@kbjf24nlFK>`H%A4fP@?+pg z{&D#Ud8_=S{FMAOIF;WfKPzvSpOc@LUjPU5@CgR_75P>9HTiXU2e_MmLw-|!OMY8^ zN8Sal=iin0$b04Y=v`4jn5c|W+M|6D#Ge<6PN2H9XR1Q~+C<$b6jjQnE=H$)gB4N>G6IZpnB zueqEcf02{$#go(I4EY<};>Q?b4RMBeLxLd@T;wMkQVgkvG()<R8nO)8h8#n# zArD;V7Z?f+gA7HC*-&gK0Y~~KgV~@MEC$tJ1*iIUgTvr7lo`qm72sgM%1~_>Y^X8R z8tTB=zRRE)h8Tt#h8cz%MleC}{nj=nmkHgHjFWhHH-sy{1Xfl4U-I$4O0wL z!8QLShUtb&4VM`%H_QMx{cgi7LxW+q!DE;MF8iAd&4#&#d4?84E4c4(H*^^08#)bL z@)7v51$^OT1c6V1zyt->B$#!Cy_&Gw2)BZ8&l4U#qCJD~^9lbW;g7>7Oo$Lhgjgcn zMug9aIEsjW5UGwxr-__OVJb0n5yL(b6hngANYHmA_+=8jkA$!!B!h&M zk&v+@WDW_rorG*8Auo^+_~{S{`ICh5Bs89cnn~zr5_&ZWeVByqB%xoD(BDbuza%V( zgvF7td=gep!lsb0l_czO680_$`;3GgA>rX9d@u>WoP@6+;ZKn8w@HLZBC<%tFcQ&4 zB36=!jU-|hiTIvGoFS2!BytUje4a!eAd%-tR3V9SlBgjhY9fhhBvDt8=pYh3h(y0k zqWehnNfJ{*Vz!gm6(p{P#KZqrkc3*2Foh%>A&FZ^;!cwICrM&R((5E?4@vrgBo8LZ zPmvUjq)a6#dq`?ANv+hsM3%aVq!o}fD@l8nq*v+RqcT22jK}p4L1oxT#ytJYO_`M> zb2`afK{B5vnO~5sAd+P#Ssf&6Gs*gtWc^FB<4N`~l0Bbf-%qj+kQ_6~xs2rOCpmwS z+*p!pBe@eu?oA|jE6Gzx-dU1wCHeD7{#KHIloaHUf@V_iC@I)W3eJ(j5>hym6h1%- z-z0^H$)F%IsEiEylN2SAqAF6*N|%$;y~M;5)62ws9Wies=I@9SN|Z99G!f+zbh9dP$6C*BUh%V~ahp zMq}??u_Fo=l#amCVTay(S(dgGQ9!XEh`pd-!A>zM_KMEpMf0BpO?mJ4-uwJNtIOVd zr=K%t&iQ^1>@R1~FZJlxcr>;<8ao4xwMS!P(6}LJTrC>+CmKHvjbDw%2chwo(S){W z!uM#x3^bt}P3(gvPD2xI&?G>U?9rqHXwo0(H!n1KIGVf&O?E_+W6|VtH2D#l(gjVK ziGFX3e!qZz|A3|{(9|+C^#z*tE1G7GrbVIY1JU%=XnHW3u?x){jb{3yS;Nq*vuO5o zG(TV`HvOQX2@q6VO&Cw2edCBGC2$X!~hoH4a(jBdZ%|hZEW`T;hk;7Hw_!V*-f*hA4M|b3yi5x4C zlMFeHMNZMk=`q^%8`>3zb~PgBiOBf?a_Np-79y8qRl8X?z_$W@74-y>xnQa(X$ zW0Bix{JwT428}@q1GsLHwvvoVMZuy0tyR9VQ*1*R}?-4g>OOOaVY#G3jYIz|A`{}P~>P7 zS%o5>p{O$`T8Uy-px7ZO_5+IBg5rCjcpXZxMhW*&;wqF_iIR4rqW(zuBTXJk+lbP8qV%IE;|r8QP-bV8>4q|kP}XXcbp>fBAZ;bXQH~qR z9fNX&CDw&N+j-Vsc&=Cf$|AI>6P^pN@cB7*+P`NQG zcSYqN(6J5ZIERj}L&x`{6YJ243UsnBIvImb%|xdjq0`prOdE8j8#AX* z{)n!wMOTy1)wAfD5xUk7UHb!FPewO-q8sPX&4K7K2svm&rm!o=1RKE+=$Dn#Gs^5?5ub{gF(A{z9ZZ-O&1NtKd z-5ZPU<)M2Q(7k8q{uk(eKXiWvy6=SU2cY{Y=zbBpe+J#JM)&K{{paX`0eT>y2LsT9 ziRi%^^gxLogrf(U=)no};1+stA3bPB4LoY-i5fTvOK3Ch92EOkDAeAWAylY^mq(}8UUg+s;^wbVL^+8Yfpr<#{(+}v`_vqOS^vn-EyNsSaLeCS> z3yfZTjb02uFUF%6i_wb|^kOf1aU8uIj$TejFHOF)DaDU@O2c!C)#FJc2fBpiK=J4hF+TVCW5!`5>_ZX>X8D0qGKu zx`8wnxW2$qXqyjhFM{k#keNZdub^FTXtxL2-Gg>-;fnzHLIYpygD>vD7q8*VY4GI= z__7SXyaGn{U=#}Nd1yZn+Ruje2cdlxbO?kFvGCQ;@YQTE?g++Df!_{%Ch(Vme-9l8 zK*w3o(E&Q%1;HGIU0~7?Oh$moG%(o;CT?JI6HFe%*VEzaxA2V`zBve;CPAlc=yV3Y z{RzIE2%UN8d<448hb|WIodUjl2J+P)_W^k-$j^WRK+zQxn?R8XT}MLK#n6p|ZhfJf z6Lh-<-2u9LLyx)8<0;_Z%!<6^%`(Bvp2Gf3oX-{B!Aj}vEGakasP?$9iW_N_y z4KODj<_?Crk6@lX%zFXzm%@AtSkN980gSEq8?L$}>2%UaV9Pkz5(HZ(!Pd>N)gQJVV&6>%+a|*{_9b-@uI^%iu;VK1JPOuMVABn33c=0!IgM ztOF-6*ku5_Qo#9la83r7&EQfFu06qZ7r6cbN;7bq25y(ZJsdm)@R$W23E642;K$3SrGgfLQEiJK7=?y$ZiO^2cZIl zE``u22t5d)^$=zPVH+UK7s84l>;{BOAbbdfZ-Vep2tN+tuOXrrMEnL3RuB;a5r-k- z2IGSvvMWSRhRAIYnGBJKAhI5!+CbEg5H%N~+#xCrqOL%c4x)QQ^i+tpf#_(6E{EuP zh-nKk{UBx$#CSr?5r}yNvE3ndF2p)QtQKM~LEIM*Hx}Y7ATAW*jzZi^i0=yVb09t# z;x9qG4ifr7!aPXuh6F7n+=fI6B>n=48z3 z{{gApA$1z0+CXYNq#lLT_n_$sngr0CfV4J{_8X*mLfR`x9|h@3NWTLaUqZ%Q$cTlE zI>`J1GPgtKUC6S4to@+<8nkAh%?Ise$To!RpCCIGvhPBUG33mH95v+p0lD8ot{3E< zhP*E!Zw2J3AnzpPJ%ZiFu-hJXABNrcAio{tTS5M1$o~L)dchuZ*b@wUKEhs0*joq% z92AU%0&^(X2L%tIunQEtK~TL2svV#@ z0jlfZS`WB38?HIPwN$uv4z4|e>+RtBQn*gw`boI)Gu&{58%N=08@M?gZid6nTDbK! z+?oxy{NdI$xZNFYZ-?7Q;Eoa8*$j71Ld_3Q69hF)P`e&#tDtTi)a`=0VyJ6^`iW2< z0QEQF?)Pxl8t#_B9~}G<0Ds(vdt2e&E4V)c?q|RQ1P_+NgJNjt3JpHc@CqI-hli)& z;d6NO1w0x9kJiAWLU?o&9)AgsN5SJ2@Yo9;=fdOb@c0=^x_~Dp@T5OH(ZbU<@N_af zO@^mW;o0x-YzI7xhi50?*>iaQH9Q{#&lkXRM|hqC&#%CXF7Uz(UPQu+kMOcPy!-`T zu7;N$@G=8lR=~@*@akK5H56X?!K+eu^$cEjh1Zkdbu~-tfH&RY%`|wk9p0qFn<{uC zLZcBhT0vtfG(Lv6J>cyYcpC$6%i$e{cVpq52fT}bcUpLt4e#>c-EMfd7v2@YyJC2E z2;Lorcct)-6+Qv)PQ$x%@a_V%i+UD_z(pjF2l#R@G%hnG=@LD;Lie#24OT4 zqn8-&VwRuKW)g1m3>)TPi942d!dyqp#p1Rbuxv1vRbtsk+%5pO`xAdL1%DBXzo@}q z4#r=`uq+j9^ffk`iH%laqjcQ97jAEc+n>T6zQ-NT;SMkGR~_(IEAUr#_^U*0JR2Lw zW8-Gb=i!c)xT6*e-(lewEL_FHpV*`~Hu;FZPQhP)#NVvKojT)AnfTif+<7AI9Ev;F z;VwIHms|L|MObcxpv%8!MjRu6=OVwYY0I?xw`u_Tg?7xZ6kEU4gr= z!`;(ykMX$2W871Yd;Wxbj>A1maIfLG*F4;-27kW^_x=|5zKr_}$9 z4#0!U@Zf4Z_%0sejE8=Shfc;rm*b&+cxWLWdI$e3#Xo<8f9{KaUXFi`#lzO&VXyJ< zX?R3OJVJp-EXN}v@rWusvOOO8J07XTBa89KMm(w$9yJ$_3dW-@;?ZB?(G&1!7d-k9 z9xdWA9q^c5c+6ltW-K1#ipLzmzbNo8A^6u(c&s5F8-d4tg~#>6)obWmE97WW0O^UcLjb=!I7V;}y+#r43$Lj7?3kX&^Q|h)wJ8 zs;}{?GkA3%UcDQyevj95!D~k2HFxpaVR)@IUYmp0*5Gv^cwHf0|0`bakJne@^*X%a z8@!&V~Zec`8Bp2h%MjZEtl}txp=D|-dc#aUclRS;_WiL{U^MA1-9yq zt)^hB2Y81J@92VeOu{>s;~nqt&STix23vwVbf3v82xZO&m^8*FEb?Z3kg#@OK% zcKR9b>WX(=z|KpsvpsgM#xAR{s{wXp?{~2>9=rX7-O8}LfZZLj$5+^M5cZN_ujAO; z7yHb`zF%VBeC)RXtG>mm!WbVl5vxtGdKy+Q#cE5e&cN!sIJG-Y-Hua_U`>0hS&cOXSo0dE z4Z>+QIISF~x5w$LaC#n2{}X2{#u@oI(+FqU;LKY%YZA`N#oBMMb|uy(W9@C6-417u z!r7;BjtkCtjB_J!o-NM1i+5{q{v5o=1n>C;?=i!BRCv!`yyqI;+Y9eqh4*IRy|;0} zH@ILLF4%?(!g0YqT<{zh_P~WZaA7JgJdBGr;i5fwUkAJ|0q<+Z`(tsj0WR){ix1!f z1Mq<=d~iNKcoiS&iVtb=;i34j7MFa3OZwxI%lL>BF71I!N8!@dxHJ`)eT&QPkJjR&A}*KU^3J$?6)yM3|a4`VC)-z?V6E`FnhM6~63)uPE@9E%-_}zEX*=G~%oC z@Ksk_ZG@`_;_A7$dK<3J#?@!=wH5f9FTOquQGb!UR?y!j z&4&z)#5+dK9sL`1@9d208oR&!J%g{TYw9l$Z+{=+&GDzZ%J{lHx><&NWj7h0+)Uea zml-uJvjf`hVReH6ZKp4*%n)~lj_1@8|Hxc=kVJ9$mf=oLTing$bc{+M&N*UXfHRjO z39ZhcP9&1c-xBV$(`J{2+=z~)-PClPn%3LFar&d6j8R3(>3A|)+;8aT;q5})iD#g9 zD2XIN>Yy0zHa$!&K0Wklc^L5b!#nJu3o(q0_K%Ek@mDHDJ92=Iqf^)|5<~nWxkzV! zrCc0OZqfaQ0b#+>B%Fl$273}$;%n{W%#9Kcix!_A2DLo&YkBDN`Jp{AbaqlH-J(>H z3TjVQh~vb`#L&%&DA{GCaFBIop}%Wr=t% zM1k&;#`whgMtgDmYvm!^{H0_jnYL=m2qnjaHa~@mDI|pkrrb?|V`u0*4IQO_H#nv{ zAoP%(Q3*X|y04nQ7kagRHlk7+t**0{#-%_}&7QNgvxc747!=Wt!q7W2p3CdV$$j}b zm9tMe9U|Ar+2aBA+zyHFnCY8M$wIRDh{c`*Wd#=x+}vA{oOjQk(@M?!7OK36k_`9X z9>7^j4`}RnD2SW4r^fj$wPoo68l;)g*_O8~O(m_TsYa;;dS> zf{~d1{Di@C8YqxoBE>JuMZ;5K`}%1SB!mP7l28&z0vzMTuc)yFolEGCgihE)C(!ov zD=s>U1hB`UK7n4ujSQMHN931?*i|$j!#UO}bvj{iT=W#Yf_y^c5#;{GlaxO~aTGNm zwd979ez|}e&Zg~uKJP}nh>s6@^Cfo&>X8DvFVzOe*#z98{37MSB-B5Ua5PWcEg`-tZ)Po&0WJ{{ z0a0uRm(VMZG|?b}&Jy~_*us-WsB1K+D--(4jE;*+;Rl(K zO-j=@DX_POhSeDCrB?(pE@)8U*nOj}*c~QE$+hJDkFzU-Z;+ehmiI05t1F+3j9Wvv z*<^y%+5r}my?-I2xtb_}oFf%sM-yqg>TnIoAr%gZeIn+Qv1A4rN2VyJ@9IBqiz(sO zlQ|lPC(2W#ft=)wh*p=cJ;zMq_NxZ>bQgsra*~v$am}&PjlQmfavBX3tB^h+W4r1uN z#dDS0E>DlC#D=Zl@8s=kLtaM*Qu(Q)B}=3<09H-*eg1^ zVeD(ZSy@Co3j-fcr9h1yJa{E1cV^t^k<9TTx|4pQirnPx3^~^C`?0gfZcee!)F!25 zmaHJVVqtWL&)Y$vkgu3faT$-k!8ozSF{am4dA={Z^~9VGCS#$smJ{6lmg5$woir zc)6{mq%g3Q6tPzsDP&7)D;@6_xm$7fg|@eDxipD{ILY0ax_h{C;;H6Mh8ZT*SJ##_ zw>4z5ruZ41kwU+&(b^f8JYTM*_V@S?CG=w8_KnWd2u;e?X=OH zql=|UUOArGOz+!QJ%|or$A?12meJeS&V8BP?_?cW8M-3bBhZiC3Mk-rf z{dG8|3DcZuURFGb5q^1WBkm3mW z5h*DTEDt*xq>icIe}BqeEA&Pq^Ml+|__8{7zpsq%>M!2V+aSu-G0W%g z9_AD3%jrtR5W_H^aIY}EjSpfbJj^@LA&hI@MXe3(!|lTD!xF=i!V|+eo&HXANJJ2G zHbT6Eyom>K_ww-Jn)NMs`MUWKcXpt1tzHmwf%*$>v8pJ!u2B*h=N}p6q;gR-H%Z)> zAEa_++Si6TA7OIc2T5cMiHvsCKcx}}Ur%p^et`CZSBOurJdA{ggavWBy_6UxhKC3F zhm&w+kar;Kl!h331i6QJ5N3?^CxLskPox(cq-brJ=$`1FF)9U7ga>9QLfsJ-SR|kYh4VX5_~19@NqUt#=pb?~Z43FtX7`+fpyBu2)M> z!|qvR^9S`kO_1@96J`A2NizQ6Z?cwIozc?V*6OAASoO*5)IR!Y5MAyWWSC50H~QM7 zoSo=UYlbfMwn4th^p;Lrrr0Gz zH!_3uI78o#OtyftJEp2UX z_v@z`*5E>YgVn+!nNe3JoPim}$IeYpqi@eG*6`Pl1qh3oVO+w@;!-9&<}rv`*sY;q z_h^{LpyP<$x+TIgHYZl`>(*6l(i-G6brqJgn=0F;VOmIJS1V*jI%vI2V3#ZZT7^^- z)4x8`+IoXX_H>oZXbH*HebhpwMmLjfo_Jxk-o%N4>i4tM@aBTS8!=Fz{nW$X&#G06 z{Uk=mo2&)Ba~R~t(RUvw8wPj>JGA(x%-18qzM+00ocQkjZ-#!s-f?b>Ak zEjC^-NEYn`YufMqF0r3=?Z;VCqb0g^bR^KWI(s43mIX*{=ya+sYP;n!+{jzqU*~W=+~Xih`E#yGczE^gSB#y93tOJOtHTNW+NDYd#zpPFMO;wt&{Txua$%J{8YWc;1r;3DjMz@G_Hofogoev zeA$%!WVAQU66j7Te_N#=?x(UpXrM)c26_RCUp@Vva`GxtxGyD>6Q@W`mE?E1r8L*S zG$4(nlOuk4Di&V;t|Y|@-EkY~hJl7I8EemuW=9Fv{Rh!RQDZ{eRn=U*RWx*wT--mh9w&399o`E12xu% z_$xWlLI7LDKpB4}h!sd@6F1RXrWn7e+@Yc0YWTuLrYyniX^4zJA1dPu!#-=LVas~A ztSg_B%}RyK_!SW{J||MfcOkUSkk1i&D;d1gneB|9nDt;qqL}4ry-HwL(SOMlqD*31 zR;)!8AhFgUQS2bZvRhk;M%R&r>>7GTV~`Oe#IbvUsqZIe0o`0>a77=>x+Da|_@(&X zwITgEahsF{sP6u55yM}Y*D#MKERckeC@13KKO=PYUiW=|QG~rXxf{Tmu{ul1Or#{QYognro1U9kF$iv+-uVpPbq{UeQDF4)BVT?u|5ngigHjn2UrLFoJ5{*lue73P(`@a_Rbr@ zTrm$H=Ek|k`G={_dhS~vzB<5_VMr{**eNAeq2ptx=T@-)Bx|2`H|H2@Xl1p-W(E0;Oh3f_lWl7@ zZ(uH%9;MSM^u8&>=yvnTb>awNbDDj6iZ(W1PC^;b6UyLr!CzU&A z-rUd7&%w`yxNv-#t_8evOVjm9gPdC0OMC5sK@Qs$M3g=0OFOx9L$!CMv`0j4BMTEk zy@Nc76^ZOA{-ES^KS;0mW;!G~ul4fT&f40MXq%pz9G5L8XQ%G@K9IX>IaDfkcKfjx z5!-RK{sN&!C#vp7am{_C-ICTnAon;UYvuk`>N~}Bp_aCJK-;Q~%lFW}RdjNLe>(44 zR(?o`m+_@#2X4sIa2se{V-M>^5sSQjd zIUMbMPZ}2%7_A^N&cxZD8%BFbSU9*pc~5LwMtTv83QlZD8TAQD+hJ?JeS5NlCN3+y zux|R3UgP@A73B)C_e`miho^^v`1<<>_y&7K_=gehlxQd+mi`vLTg@YF{5=S#lKj4S z-4ewfm5?A~Lr!FlfU-d6Q2@8}?*<2U3Myr|TO`9OiT~?2oWJDonzLK4v9VN#Upn)q zhl<5?ktB>MlD_Pogb>a5G`vZ^vy%T_){$zVqp;_KD7?~pM{@ej|1hrCr&NZ$$}3w5z8rW7;&_oK zhCAJSJYAznl!A7aM3Vqjq~kTQgDRBRaZaLA>gw_t3UwE|N!GY}kCMAtN{$2^@jpV2 zks`k$W=Y+oqg{fE6tp{amqteVM?|{%yDCIwtb`;J?`mok86V&u!y?jeB@t|g1%(C% z`-FN2cnA0rduDQZ$!*#xJU%KmBQHO;Opp6*_1mV}z`~35{_FL)U$I}L9gFN7`~v!j z!#J^zq(2?#pk@X}bCATzL$s2meoIwL$r4t6LjXtbiE4pYUoRve>ecDk?3D`KC%5CXhKf1K+IJ>sY&u1wjkUCQKx@WNx_CF^+$$4G2+_2rkJf z#=+IvW6Rv*vv1$6sH&Bd%Nq}`F1S<}cAjLClbi4NILM;e_3^_q$VGBA|LSG7P$xXn z=3DxCSu@ncE^b#slsYWy#Jtm^Mo*aWlbj4MU7Bvit+!ccN~Undfx?NsWyuVh3-%56 z<@lxpJ0Dpy<(Rd4xg(4Cd;+`zm!%C%Ce;k$JJJX*8t*kSZ|k*?B%_N=7_P5dC~69r z6Qx_NrMn8Cyns%7t&^lvH)dh8sD~hSqa7J^J%skGq=pwG{KDPj&V=kzdAJYxb?#8M zF~!cA)ReuxOFv2bq>8_)6hR)5exbpEfkFNxo}(kAQNH2sayMdUv(20ntrbkcSH)wvI3QmqCb4-d3C1RF}pE2{lQbUapBFS4HQ)|AF!sZP&ZkaU*UXC zev)J+Cu%spq%3qxxVuKhMN1w9L?y7$QoVy6G7diO7AkI&Z*-i3_LkDFgAdIgys_`B zmAPI~83`$=8oMMbIg5mew<@k`Z$>`hvA-Gu@cqTCZ4<9y@fI$Lq6#KYe%F^Q0)T#!lv>5Y$rKh*($E59Ca%DGYXABVHSJcwm)22llfwar{121EDm3;W-8W7`Pw?a0oSbR33nsvc5;+gu$O|dKE8fFKAvRc90pA} z@AXpe{3EKYZ_aZ~;eN7@J2LZzI8QDnTSiH4MS87Oh}BZo)jFl;7`cmt#Xs8w+FzO- z5WP=cAh~zceoX&CJ~I@&ch}~{CZ7e{^j$VoHhm{(n2hh}))uhQ>Q${7?Sego3s3gH7*i|Ffy^_Sn1jQxV`MW6?keo*IB+>B!2?-7X%&z3I zt0=z&2YZ!+t6a=uh_b7LpMyiZU$lau)M;YA#9HO8Y(X@((ib;qWee8YyFnj6moQ|3 zRx-48B12n!ZGCLL?c6u{tC&)YJ;~!TL;hvu9@-|IJmPNuaC2uHl)p?eN+spuRcM}kR$WS3P>?`V*K9G{TKW?Wp$1+AtNO@E6X~0 z8`D=`m6s~(^pWTcq9wbX$pW&{Zk?rzt^1l#;z-;hd>M9_Ea6`l9dsDG%E5G!VjkHV z5pnx7O`aM{duUO>#F#Z zTIOt6N>2plFvo}slI$gWRC}2|c}u5zgzQz&FQu{G8jnJR)A0^omLkti4#jD5yu6en1t)JUexzQ4FaY0iFQ(gMUx`N6A{$aUpoxoow zVprOE7#-@tH4COvhGF|N-oVw-&)zM}Em=XsB#{XL2}$0BT6DrB*ct>ADj_*V;jp{T-Lud+Z=?JMfR5 zJq8UHIeDV!F8Bm`#>?qxNl1JPfyos7ti;{j*IjX*dI?cdQcOy;yS2Lum`j~O(v^w! zxtjt_$zqQAm}^z@EX*v;*E6?t8`&OZpR_Tb94Ci3qprszg=}VaOmbT0y;xhNrB@3I zw6v~enme!Xx-hC0a%G8Pldy|!w34bex_bOZIA=@AL1sJ-GB<3m-(GfktZ?HwYfIO3ME^F8yEIFjysG!u2K3E*bKFD$f@Gkw@ye z60<6?uP9x!V%3&pYi*`FSt}=5_R-EEiJ?is>1-n&*vPoUh3i(VGZ&|dKZivV8!qYn z7s98pv@S$^CXIPnkk+y-UtPXp{!q_)bp2meh73>`&PnEES_wTnG^V1BU9g1bdGTb3FJCbo%iEO8* zwCr?qMSQarlY;WIR2ZPke3*+5bi*5s%WJb5r={I%;NO)`78a)sr#)ZZFTJ(bGQw$x zox^t9=pDP|!IBM&rtMlnh7;9shMu2^j!)2>x*z-pxypq|<9s9C8MbscbST0 z&0l{w`risSeZjDvYz^(EF~~M43Ctu(WG~sM-_K)|<_r~y~I{FBUc#fKt&zZGp-eNh~ z9KGbEwWp1TttZ=olbgc~9hbYV8|25)6%1G2Cld$g_6e7ZVlp!Jxs02*#(CSe)zQ@~ z=faDjmw8%xS_`_NHFS=Cl>24Wg;of~3?XPSj<>dE>fz<`*K|`=)FsNL@ulSq_dOuv zYY(zG?GXEU_#a?0MOTd(o*Yh1T{L{h773Tg_{ZW(CPoG?{{=dKDpyTS$LPzIwMdbb zOJZ{TXSvVdG~LAHnBeG7sI*c4);%F!>_oe%h@p6aiT0mWZa3;ju#E|5f`&JtOPP#5 zxwZTw@ja6gEBpzOe#4ZIVbWzSdZ^axp{!L8BbXlcf9w~a)W=x1F1AjJ9c%)6#gOwv2q+M*D5~IxB1uqTJ949Ps5n_;{PE&76il-e_rzC`UX13vtZs=;_d3`s;BR4)J?yp zK>2;0(>;T7Mm&_u7^`_qc0-I4jx*V|Py>z30Cu^zn+c$u)_}&e3d&v_-@+68qr_`! zTFMIOHIOW%c|(daHF)pB5`wuZ)9WTn_E6rRTLT^lV8j zRq6SttAT~g3UZ;65>lcnVZNjuDPXb53))dihn%Ess;|vtNlrg0M4?Pl7#>n1kJL)U zAAAj$6I1^cEcTC;R+0o?|MgsRu2g02;yHnEP7)@O$pSr3zmVQM_{*=qtr^l&;lZ#+ zPnKMvREOIJWHPsw2CI!5FO-)o)ldUX=3(BnEZv0vwd`_033KaA%hY~Ecf#;!*oB)# zXB%u9S z#^f~X^cxYH9a_c^tslcTt!0QKt)*nJlAje}iyF#!~an zvZM=i!(TWjW4V??g!3}~4XSAIclAL=J&OEb=?ea4Q_1r1ia`H#vR#xz}!@gLAd zeKAHvwJ=C}kz~~Dr!&L#G*ZXUvV0`+&4Qj#di)om^k1qZ(wVufRZLn%_^6k-?|-PF zmt989PMYl1Qsa~09*GN^$NvvC*veV|@eh@vd$WnY;v@8VbbrxK zhuD04=*%e<{5L+WYN^@V{OU9B^jERZoDt)rDqUoJ@Il>9q4FQ>RRJ4u1Wgu4v34R_ z^-Lq;58r)mNWAlz=4#VuoZJ{)5z)vSQwugqc4mIldUR?EnqPk|uT0bZsHQf_`iLyE z)*zj35=7xme=2)Lzo@1|?oal|LkC)h91O&j=hvYGW-hfP+T%17nR=+hwIP{PFozdK`iX4d}s z89#YMtvx@EK?(*Uu$(7FZMZ;k{ZC{R?>=48PNjq^1n;8 zD3L2uBDWg8|MP!n&?PaUxyxT=DYuP&MLQKR*!nA=H>j~e$GT&TV`9ips$BO*5)~I1 zpWqnatZ05Fad7r^RIp?q3&BETxhE$pEkA#YX0wIm4l}uKLK_Rs7Isg=?xnGNR%UW$ z|8)b^kXuFfS28AWlTanAGPxu>DUKB`|4pg6C+W48tuM2SnUy>v{983QP3$hN(KBod z8N-&w7&b<5ForFyg<)glGh>Sl8HM_(aul5zRoPL>s?TrTkZCMW1pnXF^{USLwxr41 z^nLef)%G+oPz?CLD^sTatnx~>$g5awL_4u%z9Qq>A{M7yWu&i?c>!zn?JQ8TNobU8 z79Eu>(_oKUnZi~o_$1Sw#9Af@1`FA@OhDZ5!AMUBenN+IeKd53o_=hs(lfBn)BiI% zE4^mQS;kJbKvzf^w-p)?5}JDWcopGp9v>hg5zibjeD0jmBaCb9+&X63zLAYN zqVYb9&Q7z5Gq-LrbDDqruvI_qzrx2`IY6g2T#W$9ef4ax5nQ za`2iY$4K{;<{Q?MCEV=eW~EoPRq=8X#{4T^S1*s1OBgM-CQr}wl@vw=MF*w3XK8k4 zdFA`Za|f-KO4%TGc{zLTQhG2Sn}1)nA>c%qf-aTP4x(Z9tl?9Z&#=%qrD&28)8pK- z*2%*oa&pfTZr{1BJrrA{qJ!uBpU6~>Z!TjeJ2_r_lMV~z*q=6>YPLkaVs$Rlyw?1y znyV~Op#zvZF4JdW^`RB5&hI5Lg2go~AL|Os$GXJwvHnUp(I4}oUuVsUFPo45$F}vSdu|P($rD=JZrf@> z9*J9;fBSdar{C(EUhtMqYHj*W(>uvy&7n!eo_-rDjqov1%wABiKH zd;GiQduQ~aTT4#bOB(mSpFn$vMghKI`ULktwx^9a*Eqw~)?pj2kj>@W;`@UwLrREmoUl&&+Kc&P=zyr?^VmUCsAX>O7jp3j7eIh1xDt&aQ9D z*u^awpNg8-G@I*_fHoqQ1(c0Grvd$MA$eD&9!>vReP4TDZ!#I}9#_Q|=vnvMGTx5% zi6mMtVmM0~eo&#CQ=y$ozmf8XTFqe$i6t?9`CfiI$rf&#Xkke$#C}q#E*JE?&)VY z?k~jV${szevRO^nYm(KxKo>CkR-=b@rZGGAuV$J3%Bb0)rk)i`QUcR>0o60>Rm-wl zl%tx5w|JL-If;L@UjJ4<+H-F+GzPS&MF++352ya!u3q&rYA!dPZgp~}e^$Mpnic=@ zSrb0lLzdl`o24H#om#w2!#C%c=;lafE_PqTa^3Y-u5x!pDzkDV(9hp5z}Mf8SqhH@ z3yB#q)v~Ls-S^r5I|aE)fc^d*Cd@G%>902$6qI{5hS>W$<; z(2n(&=DWs;g1$koov~<2`8V-1F*@h60qirjP)!#>iS;*u$WR`L zw0=3&n7q=v3Cf{UzYogJ8n2mIJA)N9r~2Wv3wp9)uJY!e#?-dP zz8uPdVo2g{-d;+F8ZRs77g}#BOUutIcFtO_av*-*!NH^~c|%TqUQwD363Ey&gkXx4}5Kt2OgXjba&_B2SE1?(pP$zWYKkwOD9c{~7Kr~}Qi zwP&)bDl;0TU3H2dIua$^DRFpd1_)EOWf^^5$BPAC@0@bY9%Mda4xB#DReiK zf{@(|S2=M-(+jRKPPnK@O428%_~^XVBlpS6^a&SLxyAC;VI#>Z{568Cju6)6DlY0Z z*6QhO_ddlxU-7lV%`e=?Cm}ppeW_srm;9h@Ih{%GD?#Vq%|1|c{1PYQH*@Zqc30Zv z1hqab{*q_hru0eBCaZpISS1f%?WA2wgn)H&a{@c6DS;iNnC=s`NlnKpsO6Q4Ju032 zpf+(1nZ$W}Rv`fsRn0$Y&U*VOmgwBIphAEaxJ&yBdsj7>ES>CaBWO<%`Vy$@vZN$k zatCXf2z4TfARXU#-mYkck&> zVHLWKz{n+CCf>h;f4B?UZus+Ro(O1yP#*2ZPebt23CBfUh_P0A&q9p5Zz1Yj@vFF> z@DKP^&X$H+j)8o~Nx!Bx&zA5()LvU3iW5%Z^MY14rEN+iXy$cmQ-n}>s^K#K{h{XH zpDu4(lymL&)tvT4Qc&QCh@fS^4wYL5=lpd1LALl#uqhNUq&^oLff%;Ov|occZ23!7 z;BE2Og3hH8ry!D>GS`1mi@iE7L>~&8QQ*v(jeB&>fvxetk889A0FE6L;{7kfFhGsD zin54=%c?v+-oC_WTS>l@##^4^aso+xY2IUEDuzz=M3qq zrJWx}$fo|M_6rarq^CfO8?DbI8Q~eA4JpQlMr~FTV|$tSGhnPEmBy=}S~tGw>o}W> zb$xFG9T&0IA#y&{8(~iK%WCFMn{cp>bb$-FJ^8e~(G7`uY_^}_AkgQg=v&Zvx4u@l zn-r04yF-gq4>lv{v3h z!+ESh8-7rVHQ&~b`2Ck#r=F;cixr#WqK>Gdisf^|SAt|WgG|@Wz>hN&4>CfRsT-#8 z0LLrbD{!98t168V4PlU3p{K8VlY+Gj=@1^RFU$SL6bOL9P*cQjT>A*j) zv%FO|_V}TD+v~=k5cgusWy%K@^fPeF7u2lVrxK4$+jDgjwZ!XllZiK{v5!C&i6ycV z3H}S!4NArPcGk;48S~IP#}~xMrjc}^z&YL7#mUoFCARBIRrMbHjef0R4;vUCUDGst zj5cXauvqf^{(gXnV{YKYWo-%?9*8 zo89)w3A+AfdjtBkM{hvta(8&CR>)CI_5xLt&@QL6GT0TwyJp)LTI>CDpm58Mh$04( zt2KlRIja@ZNNzIwg4z(0TU}a{Rg#=*K#&<1rqc@Hb{boUY>(`;yu{MYYxC!+ZHT9@ zo3rieSxgBa!3E1B(nI{Yl#xx2qAw#Oo7a z#{rmn*!HYiea2q+j0a7hF(02%YM}7~91Wpr(58xqkdA7nbWQilP>K5t6X=>)GATDA0uO~;&C>~#u@gq;bN zDP~iVNCUQh1lanm#xa3yvw+5=(!N}Fwo$iBUkau8E|YYm2icLYHl`_}LUh_lC@cFl zJd(xv>A;#6l1WU4+PF)RPj)=PDPXmBez*;>A@lXC`Sg847TT*x!&mH?F;y0jsMT=> z^ze0s^6#~wuIh#ig*{o=Lq*S-W^-4mFIZ;G@)>pNo>Str67;4L z%-*9%5@vz8>C-5A`O}qiFc-B&0$r=P8ho8~R*fPHm)cDhNS#dlCCGw0n3^@cPYGKw z$?mA*-y8s!I1g@ycyIt*hy!4Y*IkL@-yM2@ZRV;V@vS?i<6r*Q&);dxWK1!qJOI#O z?B--k7iOejLVzDtXqs2i1XNpRdk0%jEz+%lvEi6q`|s>pUuLDU_QdQuf{F5{=ai&9 zcq3xX|F3s)4da0-`5p}!xzVjKym^=Ibz8J6?Odtbs^3d$$Zl;7hOUod1o4f*&^1Cz z5>2A@(XlxvZ&M5Mh@6{JG9lF_#ttYa2&-Zldv}bpjmm?}ja;!uh>~whEzILYu60f> z))Nnn{pEYwVF&H3rj-i%4eNW8y`y@ce7$(y9D$sciEg&x<48X}SqFIG1?h1B_>Eh$ zUk)dCZoPZ{w0Nz!^CJuKm!I)KCi&#u(nk8mY!iZ{YVd6$M@YG&-%8B+p3_orxU8W= za}LLJ4;+s7KDo>f$6#8}04Y9X%NlmIzK;YMX>w?6wjUBhk4-`1nSKq!{(L(LXZ?jP zUQHJVJORv_VA>CQ(4n2n09!W5T2N3Lvs-`{kQ()%lRvs9_Mp|K3Ei6w?OcFge9$7( zMeF!?q=$<@iXz#Tr+%dmFH$5scb@_YgIUo*tS=p*68#R5qU`fTFm~jy5SB=LHpF8B z1M%QP`AQ!>juAP(DXwqy>90o4IL*O~2Wh4*4FR^(mxjxA8&mWU5#Ph{V=pPUud<1o z5;qE%kR33gHbgd(tjnH?IsY~5&jzxU>R*$UbQYW!-lMNSiyY5}sF)Y1)~t_qBx}pw z?nQ^t>ELn+Qi2*_k!AiRTG*JKmJp-Pk9z?zI!%t1Z#gEUJIXdK-fwdhX_z+KpXS7Z zsGx?%#4{|=d)d+jlcBem7rkQ7>YZT+$qp>AFlI=O(`C~(Q*Lxse@14_v|9`qXT{Np zWnb4bPj00cAc28+Ande5WG~%vp{1UdFV(6Whv?!=uBoNv(syPNSgV`WwSOnD z6O*WgDSLb#t7A=Tjqd2wdnT1`S;QI6#v^A)8N%nr!)-KGC{mG3*tn+Lnye$^$ON*y z!m7gRyO1K_fN56?!0+)@YX}L@1_gh;+QX4dC*da@AvZZXzjS8Cqufh}$R*(+lV4}O zRP4mMT)A|{kZ?Gph6(K7fWzLS{bu z@*IwSM;GtsV&)!nv}yXCVJ|U=m46R(e*RQ-RxlLSG}BPT(6{oU(0JG8~3xq;%B*Z`f8`bcxV6h!>N``$vRD zYGE)F6BV1LeyoVoCTdg~;_c<{@09A4xPY+th|5S|t>X?@Z}r{aUlUsBSL9dXNEQpL zy=V7Ti4PuIh&>)dDO=F^91?(t2MMED)usn|sWGK8=ihJr=|Q+ulLIKF`Sv?qXqG3} zYxX!`Iuo2`pFV$`D>sioxp^AO&8{ZWWPoe^lmg4|jv>sR4!%div03@zHqCcYaL9r* zEKCQaP6q%tKA~j)hP7MmE(8kV51P4(V_|8wU?(q`rs=2hb54@e4=$%3R+If>5_}_s zjf$T#k6kB^$xq*2`YLa2Lf35PxJBt!RpbEdn~q#Ya_ff?5K9~$lEPln1?v41A*9l$ zf{lhe9)aj-ssh9!Kb)Tez1Gfl&s<#Hk8~vihY;45^gA~3m~)ASwsX$$7sLyiU2Yra zz~yQ5oH1X3Iz1tEan-Cnx+7r*Z5)IlTqP5th2oNBNe_FCxEjZ0T9)pbp9NJ;;+fED zaa)_#pE}niT~iz+h-ceOY<$K!gTlt+8I$F-=SBXE`@T-{(+k@a;$@9qPenrLJLLTI zZF5s+rZFL$Tq1?3RX_b-jfib+9ybmJo~FV`peOk__>p!pM=`pVi7HL1Y>7EhWu8ak zxuO65f{&9U81Uzs2mB_?qhidXef(v_?-K8}eh=m$-;Jc-f)?lM_=*Lfem$6;(p8Tn zF^2r*77qyT7YzC6e;)EKQC@N0+lSZ1Rl61Wl{#R^&tA8Bs7k!cIukAJ=A9TC0b8Z8 zpiroBy##SUxG@mux{2C5zS!7MY`GEE19yg=PCAVvp~yDEeZU}3r0Rsdv^x<>{kw!zB48!8<~zRfy-y4H=usNuk6JI4@m}6R~%cf2?Cx*hZKN(PG-YNC9qWcRJZoE^hr5F9mTXE2b|UZehpJg|0NE z3@3s@3R?!22ilMleWbJMq;5i+ou4D_H1#r1J}Pg*ldA7ZXgk$Ewl4l)ZvTS0{hito z4y;3uV1b5Ht#W)N`aq@BvPj_CD6(=%hVra&`khb!;@79Sr0jT?Y>{xnSwJ%Rr?%g zDn{C_vKmM3LfO)jzSP_VlRu{K7t6)FP_XtY(hp~1~RJP zcM??+ z)lV$)IzP1l5<9$gYx^a-BpQt6Y{~dj;%l{ZD!7vKs%^LLE8ntZvsL9Z)$~=%SEvo0 zH)iJSJ``*1vf9mg&XmBiY?YWZf{kP2c%P<^K%a6(pih<~q(0$~nqMnt4R^VnAw~7; ziV@X^{VA%4qgm^InGfmSQdrO4IK|mb*G+-FMH^FO4>HqRh@;T>-|?JoaqPEGqtD9z zDjH*QT4@Z~UR4nG+D&*O{%f!w*DM&%Ki2yV9s z^qiuURPkfJwRS=H8c>H9=&hTM`Rp+SrXLRNw~$#NQcxK?y*Lkou!3iUP4qI3lwf>) zRF!Z_yub|~E|IW-YW5-U-vP9rYA@MVP#oSPwtWzS+s zv209W%r0PlR3=`)SBjKA(go=o8QZ#K)pslAsU~Abn$_OeQHqrs0zems@616>mhqk9 z8t?aUtCZde+Dz5ShC+jU(_xvoE!lnZEChq_*J*#BzC$~I z@PM_4@N}JHilg()3!8qUZo@PcX9j{9aM9W!63Vp4rUmd6w~e zrCo+wz9B0U46~G=f)$bRvDw+@h|v0&w9eQ?PT#(B`zJNI`Nj9FJ+!d@ew1K|9aUZH9gi4X|~0B-e`;U?|)fz%t8l_x z{c`JPSovLHuc+BD8eqyzUX{Z|u|0`i71tA6oDUt|0N;#txkT2wTM#zGgbPb3Ot>g4 zGutejPr9)|>Vfiw;W4F1;`MGeAhJBh?T3~aMDJsr4Wta;W><=H-n?kYjtmA5=% zIyi+^;ZxTyc3rp1W)@jYtaF_<`D7takS=u0G`)yZFipgZkPNT>`W8+>F3ZL~VNf|# z%9;+U=0|a}@B7kt^hu}b&Iz-U!6DX7ZZd|}8B+ETpc0g&aqOFgL-zH>kJjXFdwK4KsD zU@EXqNvJlAhqO__I`GAGIF1Ps@)Q!5p-)JnogZJNZAlKf8+v~5;n8vMR~K{0Y&@N5z2b8@X?1p{L|50+8DNz%6eL z$YMS9J|qnC=~NPt1p{8{ub|y`Ot#gs z_qF)nyb)GqbE^^Cv8iyL8^n-i1Mb761$0afCzZ{Gh~2&5$UU)Mk{7Udo_-lc>)4%OWgr4SkNmnDd55;2C7K>3A$qMg< zD^b>qR-za5lie||J}0pHd}Yxwx@onkeBxoCTW^SUl`yF7^UEOmo{Byzc9Kif37(lY zF(Dd!F&2H;a08LW{Tf}4882KlAw?{J4O6gJSet+KQj2v@I`1IRhYwQ0>M%!c6CSis zHBb(--eN`aiW2KcS_u0b|-VCy+9~5?drz+4VH@5Z9SHsAn`%^l>$;E z>&uqr&BAxG=-!7Z)#45~nvqTVgN0RD#pR`Y2$;3r#fuX>;k^Sh!)4seFkItlyI`@) zQohpUIh6|z6=C~G1ptAe?3FU(lRV4)r)k*twmIVc{qz=0h1#F~^6ZLZ*0kSzGKY8v zS*;}Y`04C|^(nSTkl%3#^w7+p6Q+NEGVQwpH90xZFn2>-WL!jCq#*wK{l=^u}$enQ4aKBY{Z5CQ~SrLt0M? zv_iDpkQlUH&BDei+yRcd>&L}}Y$t~VN0w%g!zCv@A}#S~mF^S~PO)tHYR@nS75h-m z`bO9saHL-d9g|%Q^r(T}Dz?mjFs{%z9jjG7J)`^`Cl}hrLacibs-$fZdw@*_zYh7m zUrQ`29*nz3&);*+6YDGJXyu1bImY`b?eGA`yUG61SaHIDBL(__0pDqEcha4Vxk7G` z+ouj3PRWdd@f8t{%2T5w;?!E$p9c_O3a6fk%O38#a_-y{bpbH9knMeig&}MFoYGwi zl1gK@-tTn<&OqJRYeT!M$(I)<-f5lTx{B8QG2GeM{G(+hta{3`L%t&P_Gs?Za z<>q=%@nKm1sbF?^MR>;L$HZmw#J)?qlZ!JF`yIjSPrqVxha1I}^z62Y*Xg0_?Gu?= zxwz7`T=f**AXc$u!f6Wmk1REwq$dT!3}TpkBb^+3Vk_3H?C?Y{+Ylco*l)O``={pR zW^UZNB4?S3$$%|>>dNH8c)6D!anr<;RP|vp5{|&D*=X<8YXBX%Mfj%_MJ4BwOd;DN z-pj$sZ8d=Q7G9Xpr5P7Dq@TW{fZ6c4tiiq(VZpmN0sPO1)VoWTt-fMGmON! znYV*NVjoc*5CGPOl7A*4F$9lmX&a_%;vzkfQqu*SPqPjJ)Q2Mo0DzPtX?8=qN#?h)pt#h@$GZ`7565mRKMx~#z}yLPiz1{3HPK3Nxz z)$Y9%YowyW?E@C?u~*a7<{Fw$>+DN9D|kbsiTHsfA@?f&(u&0SNr1 zgF^WT8Iu5e#07g=bq#s?^-N4i%UV$-#xIa$JzcDzk7d&Uuv;@ znRZ$Dba=DP(ai;6Bw5ka%yS%>SpSbd|NLLxSAJCKjAQYmq`&=9qN#7sWvu^z&p-Rz z_n&_P5(iS?OI+)_7PrNwCRqndo7zsh>g~HEFoyk~b2pwV`UHM71v{Pp3wOGAxard8 zd@`A`^4xOiN^$3MeYp;E3A1SvULvTfzMQH$4wTY+rIp(X?S`o^s9}x70eWknkQu7L zCWdP>Sv6EK%{!`GZ5*iBseOz!au8WM$!nFMtsn;hotkt0^z;^bR&q^V{ItZ!i7=fl zglj;a@p*EU0W^F1?DjR2DFRzYKV}2ySk*;xxn$o?!RVtPL|>Gd86TgWoI-@y=n#$i z`C9Yx%6ZB!Sgh+$fmt{2>p?i*jXRA5KKlkU?Nhc~4YO#PNPC{VrrJd|yOk{x8p;)J zjrKli@H-cz%SjprUz+JIp;d?Y2GFyn_gG$hCJ(rFrZZ^$+A5B7P77ubXa zDaM7x?oiWD6^Y)P7IGnI2ilGcLErq~C)N%^&~e(?lLQE#>V7!&P;~)@S{WJ~M?E3ddE; zBv5Jw2c-_OG1;`gZO08vt7Tjh+7sJ^*`DA_VmPXF>pWyJue1k6anI zXD0B~=Nga#ijlzt5VwtefTh=ue1H!g6Myz^De=wQtK=k(-0c=Uxun`Czj`CM=UAt? ztT&E-E=Bd96k6^(W|d2SI0l-^?R_|M_C3Zr(05f>l5487GZUTj7A1kV85yOI<&>^u zs%1@#4{}<457adU+=J z45@;>UN)S}UA@lQ>Tp<$R6-_18zeAW1qspx#UQC20Te|?P~jjlIxm4vq2FL}s2Ynz zvU}*P8P>v5J0{aCaTTW6fBOyT%k#$|2@K4#+_2LrkGk)4%Mx#IprUf%@%axhck(jw z42E?HemIN8M8?9nud4!87J^jaaJ>r)_X&z(?=fXSFXC2W0#J_wsxBY0HhiUnZ<2R1jzYm9!CvZKD08HVP1GmEM@9aM(h~{tEr;A^sL2TS zR;Z1?u)t@+3i2-jDYi-@>TK?<@2@0*QGroG0<~uQWpO@nKJmT+)36L#NN^A=uLP9A zK`n;;U`+bskm<}OOz>s=NF7LM2+czwDiA+oVEz&lkOW~4-9)X3jE*P!U9#B-Ay4A{ z>?0_{qV)!Zcnup<{m5+kNlsgB-tQ+)-EFTm{;Hf)HosPtMpB{@V}yq96s*(tRy7AJ zHgCyy%XD;h_OMeC`!v6dV839WU=JcJ@vOR|Hr`R(UwnN0tkv`9sPZ~*%Bk3_sv_D^ zIvc@PVY4R0f4R%nGMul%`9}DaDXoc|=EA#U;W(WqUNYr04RY50tlfSzT@qo~B#c&! zavA@v%9nUXc*Y9ios5|D1X3XJuhygf5+!gGYaGZ#I0}o1XjC8y?%v7Mwb(s(Im~w0sWsewm^+>Zh}+ z6p|L2j!IcZ-ynB?;CPOiuK5{x$)(%p6-@$_(t4Jki>FPPGg&2A4^m)7!lyoEPT`)v zd&kiJMH$?NM+!FS7yFv(Jq0yI&S@@AF8p(Q#JS}Mc?Nie`Ji@D*u95pB|82?NxC`Mw5zG@*(r5!EAH_2|9 zSWv|Ofx9>s0OpoqdSn~_eav#3!RLvw&l6~vf81u=g7kCd{h-L}L^vTAFRlWl(6tX? zeFflqZ$bn!ijIV*gPe9z5&9wP`tvHX~zAK!b* z`V#hmz)*Ak>yP@gcBkn;{I>T~+8YFQAv;j#q+%b*J06zO!~q~P;`u`F9WNHI@CR&8 z<*{!2PzkJys*XH-T74yNYfUYo>44Aa`;&^6EnRNA97hI@xaO-!zI&o?bgVv39|v2_ z3U|V~6Baaop7Y8j!~6I7ln4u_Av{zgS0`S1;TfspP3a`(g_GR%8#MW*c##)jNSB`^ zv6@r|0dVvK!seJk&2{qD`;N0>cvw6HBw+lx8x=CKDj_X3I`MB$ZBE1~)YlDW0u1hz zbf+u`oFCL0L(LsSWR-MS9|if#GO|dw3=x`(*(5=ejYON#{-$K_zPQr5T6Kg}?91IJ z&|US*WC>+?D2ey?=R{kL&uW2rG}N;8G+P!LAC3iI$lF9R(4K>Ke7Gmzy%joJ#PW)r z9FIV;qz@a~%9eC@95x2Kjx%vjFOG!ENve?Np6cx8;tP}qsW)0B9j%*6RCq>_ER}H? zNu!yN-Uc$Y`$vph1b$wS4?q0zxW5~F|1*hJf*O*ujOdF;n%@)SJk-enUb zqIr8=T?}U-;2Bm*eGgey7#tA+#XS*!ufV=XU*kD~*Da6|=!D3sL#r#F&^X!w#sUo$ zbds{yoo;mC6Z%Rqwb)^u=w|ESY_mSop&a|z+PPC)hk$!OD3{==Pp6b-WEYi|C+#FB zwVHqw$ypnvJG9ZE5uPc${#pD%a7 z-4jwC5Z4FY@~8iTP(bFP4GV=5#)-tzscaEe71x>FweE9?iHvB@DB`VUF40u-&F9-cDv;%Q@JyIyWi95J2kGCEc~BKl?lNL zVy%S*sq@NJs8-gxSlJ(q#macCvcZ3@pr#g|HC9mbR20U=omSnH^L(dsQ{r_z=N6^9 zGaSMPKCF)T?N=}!b-66B#~Zz()UPy)Q* zQghQYwk@mDW^vBQ_7mhRskJTgOB{8Ey}t_1?BbPEAVwiPn&hYuccI<~YQFj9TRuHa zm){ia^Wj58oM|sq+L|c4XQ)XiNV&Fk^DT0b(Dq-P`Vvd9HoV&!aPZ(x%s#Lt&awo3 zMsyk=qG)f3>2O$H?ya4SymK!&gwm%9(f)8|c}g0D3gr_?Kf-2?)GhH_AGkii)>PPM zr0XBAAcg$W*7m@gzEXNbg`{Wb;xkqC7ZmBCQ68>D;~`n%o3E3XQ+0WJAX~$f9uYw) z5Y%RngmiWN83n8?++k+xfgh;pnCIUsef556{0~+2mr-am#><2Fd8#^gW20Q@&299x zZS-ff8@mn4`oT)AYl0TffYL!n<)|c?kscMDuA)ciC`ATJjA%22OpV@0#YUrA=1637 zlXTi_{3o6CpU%lMLZUs~p{iH03v86a%U|oJ*19BV0T$qfudz{bFaK~ic&zg(xHmKZ zhbp*>Nbx#eUkSVqPB(xVC$;32mPDsNR4w=)V(Ifd&t~Tl<5psDEH-w`1|K9^I*#Ip zznFO;uWRa$o0uWlBTUo~aY~!#;Xchjr+1M5Gu=Rw4FY#`vwKNz^G!OuZfr2!WLj_Y zO~Ps$w$nx2yQEJoyi2sTjou|j2VTr7-&B>0L(r44H=#0D{l`*iuII0^4qTlE63?=1 zI*|(-rBXqj=r}^I22;TxK~ryQDj5_#qOoRB_6UuIIJQm0F4~FT0IJZ|zc)$J4w-K- z!f0u_K~z@b4YIOugVAjO_;liGw=r!XcQgLds<#}zqhrk-E-`gjiw;9*WPI<>4Wh&R z`bUi|)<1Y*i{G?){smgZE$IBFTWGS~q&i9_GU0yHEjo;-?`670SnE#DXw%?^TR=9p zq7B!L0ir?8xAB?P@Xc_d``S|{@cgKy1zY@Y^^3irxI0GW#krsq?={!GlC?Q$`lLIS zu58%ZAp*l?`X{3ozZZtwSLEB;^9pX_*@fvKQ!Iz+`qsaYAo1f;!mPi z7%X9Q-(Jr0b2NAC99zg@Tl4#P*YJwx(*p~*VL@lp9rIw9ro3nL*D$;y2!6Wejr=IohFL382iSmi}GlRa#yV~OgUES~6va4Q@byhEZ9`mbvitpp1Jh1TUutZ=N`Uu*?WZ5)x{m6O#IL%9zOclbr`wncY^HuUxSZAz+?Jn5#h)lzps*ZgN&I@r9JUUE}+Go;gAQ+Jd9Sg0gx zQlseTSwcb%vH%ZtI$vI_$Hfo+&FH5)JM|y?x#LeSc2s?Zk~rmS;zG7Lo?V_EAf#l< zm?eER6!m5VS06V|HGk(HvDyFYJ7?7$5G3u%qDYv!SblNx&?^i<$fCMWhb;s(AmOS z-!E5lYQ{GkwG=3udC^(qb_`RaEp_JBcqN@_=f;|oEVce!Zu=n33)4_*VS@LJ&kAmX z*X+2uw58)J+)~YstL`n_QpI*{7Szk6d0<-d7P+0S+lbb`|5MvtjC(O{n0Sj{+oIWB zjm<{Xea9_Yw+}EC`*Yv45|*qcTUXwQ@fdz6b{O%1_2dfXWq48>ywh!j;IRnXUbbKSeQEeG6AwRH zo;30P0vtzy2tV4Ed=gH}O~x!?@~{n_6-#pG<h>yjUT?omXeJF zPi}J$hk#7J$vrzhgC4++B>d5q+f<$F`R~`2sD5}GAqn!@oKJ49J4zQ;Q5#d~QTnF2 z_NYYSHdi$Kt%`Ow)ik7U@uG(G?N)c#Q-WRBd=I*^o?w`>e()Xo>%H!B39jjGr;TN} zZW)&2=6kjV9B2pD!9X8A5&z(b1GAv7tb0NS-g)*Ik$T%ZtP2W-<1Z}^{B(^hDItn8 zNDCFo;jvDt2A~4s?{*Tjl12N~Bmy)L03V>dPLRygI~28O!h<7$Kcx^&+)3R|Qtn%1 zli`wL!?}~6404)Agdt=~*4i2&K`#DLo|#w33(%IEtjcE#lw>`f3|Ki_M&H`Ftzt{Tpctc?-|7xQg`!GCFzsPJ_nz!b)^ymt~XUG8A7zt?qHuxGj5RuWd&vhWgo~+ z+6~x@*DE1YwW~SV!zO9Y0hg8)OS~5Nwyaq4b^TyvikmDsCOI}KmUcW9L%?}S^T~7t zJ18UoMY&%SvY99qH7Tt08Oe*9tY5~9nzV=_B(wJeu2THcZ1?_E7y3#43MpJ3{^dF_ z3C+guRk|kQ_eqE{s&poI-R77AG9{xNJ~RV(sTA=oquexY?y|d3rN1b$;PPk5EdC`r znW@Z@d?=MlOiHD?EaulRgohVTY7t)4X61Y z%ZoX9H`xBEG=$`B4Ng!$_*?I$llPozPX`zuL-))&HoT*3<#gS27R}4afz3bGz*7M*OH`St@7nORZ(MDLxs#-Xn9+n;i3khnOKW+6m{&<{Lo|F7Bnw(D~TRp=A z_P)u<{;v+GbmX5cP)*MO57a?;idHQ&-Tu#Ipf^#-RnTqoIsJgS)9t@)+2f(;aLbdr zBzHqe<<|$b>TBU5lbwAFD>gd3*^AB8yKr=^<-H`IB-Ca$`HRtH<2tE7=F|Qr-XE#t zN_*G@wj2{lEsDT2I%}BCKhj(Fiha`3$lYWDLVLG%0Wp932@L~V!wBa4iEWHMjD&ZK z{l+SrFoYJ>)=jSsdKSt3;B>2w@=`-DusGoFP^!cH`n;Ci6-x)=CBo(4n69^*k(b-hf#&aBSIb_*TYW(zrWTqiJ=1;Ww`+m}8b3BftnF!+R-Mi^Rc>j< zBg_g%Zi*x9Zq?y@NIxY1_P>{tViZWgL$jXL-19SRM$gtr3qP3iM*g96w6W(|X^3=X zADpW;)wAb{`SGdLf{W&ZsD`ywQniKX$i|@xCRFijpo({tRPpScbJeC=*KkjuT~p6- z_kHG2M0zQy4>dwd{G(ki$Y(yawp`=k>Be9=9ev5~GTj zC^evys4~fo#9#C2wCc;i;4hN%R@Zu_2x8Wq!eppn$j{@xN4l{$u@ANh6~v`}iDA+3 z&xM79Ms1^5mlPbQ60@>MMh3D=D71KKM=03y^Bi55tsgy)we=&8Bv_BS4=Lo=&1J|v zMAaUc{h6BlYic(pbAV0y0>3QcfZWvt_ zOZ_uYuv)?mB#+T1&@TKoB8W!N?g5OP)PJ%Wettqh$FSbF*?TR`ksh@$M>wRwiI2HeH|zXcUx@n?Ns>NM?_y;$+0P-! zCn_M0^ZBDAq9daA@LEDiDQV$bYK5%Xl@66e*j>J5+n$9Rma1VyALa+ zLSa*8rLfs#l{+1|)%L8z0QN5T@+Lc?s`Ix%f>TmUgaZc`&Q-HH3Sy^8nlsMdH`oJ? zj$}=tN1nr`!0qIuKyUO{CPyblA(c^D>acRH^LpDkVQ04@N!!jXufAV_W#z5uKT!R( z88^k-mGo03lTl$DFeXS(u7jyaXeaja?sp0>u~3-;Rg5rkj4@&Us0`l@5g-!nk#PrjqW0c>7jo zaya_rqiTV6`&?Gyo?`_t1BmF}f+Z9f1s;W9NwE~rr`x7`r*UO?~lTvu;v7l%V z5(J;dy^IWR(O=YFdpQV>^vTfuG?*@0M+Qfj1o{{Lz+~oqGjc&bjdO1w{;Tl zzEnE*U4+rcS_A#YAnqPf+L>m{#p@$zSHNbqw3~zoEJgLCA5Eo^5LG?G<$l~L{NI`a zYOkaK-Jdtv$(w_iy&UQMkLBH}jnxwB$N?D%<}A482XIt#O{_1VHoO>+bk6)0=S=!W zuV=glSEmo*xYbE*f@{Pce#SRA=+E(P{Oxt-Y{LE07aBjSFM=7Ag$3y^j zqwdDF_HT5d^Gv$-VGSQN1z_fz)o>`6->#01H6dJ+O#m1D+PI(voXcM<2Xb-KAM0+K zz^=wl{z!k|S9UUIOC%JIW42&SHCby6V`Ey{1JIXCz*=g5O~Pg;KA+8<0ayuO+_w!z zCobU#Gu$~%yP_(180)UoL~A3VQH_k&$0TgXdU)bO;rOHe*zFFM?cA|v@y5mKwZz8V zI|!@sHW(YS9=z;~Nf5L~0nHrnBGe3zTEASFGt#yz@!_X(qU20ky*?^oTUIU+HWb@i zt#!6qKE?ZJ9@Y;o#(ajN2hH4l+QuN-{T+QDPSbB(+r0anb{Mp1-zbPyJ*7_>?D3+UNDP!TR|dqoHtV&d=HL4 ztvLN|KU)}w{h&-%B*i52ePKh1{VKIsVB>7Zw}t{F>)~aCcSqg0<&MAMHcWTm@ge@~ z&);5*Hlpe6Wj;80v2fhc0O{@R*eSg|Te8;E2ZY#2iNyKPOya!SXk+8@Bg<2u;2vig z`-X@5`3R#2t!J-5nPYyBx=7uN-(w@-hVQxJ-o4{u)e6>$BWxEn7|dUpO%Pgc%^BCk z0%>c0yYA!PRVPWs#=;GPX#K50!6o-)iJlcRV1-OO{`L#zamSCILW=B8CUT;=By$2I zn}7PF$&DNFs@xe^Ih%`W@QpZcOIFF|Vr;~R_Rn9SW~fC32~vQU-NMCSzrfEt-J#rJ zQ&1J(h;My>QG#z0#SOO_O%t|DrU{!`m?i|nG@;2cVNW&q`(K%v^t}yj5p$%ThIc@F z>A5=n+>jyWq5ZnlmoX*#JUA?*QF={ ztswzXL9w6`YGaCzWN6bl)uLG8Lo|B-2;m{@+kq+XV_%?

^6dgV;mERwilKwvz3V zhRs7UmH5PHN(DB+IOiuiO-6J&s62ELx(Lk0gaCx&D81$r766H4_*bm&9NI4mBs0N_ zgawEA2y8{e1DVI%72dM|X?L$tlH{1w3MvS-WDpx83y2FvK}O*K9izlMy0neG(TB~G zZR~uB_TR($gWxKO`5Tq|;(eaeA#fqas?~9(*YeOHUdfX`UkZsx%Bb0(DGS0Bp`ZOs z)l@(QP{}4RSnD6i-tYPaT-Kb)3e++ZsEhGFs>ttJUN$~)a7$gNcpu8pG!mnMN1HBu z`e%NLK1p6lLT2qhJhjFy#K$(xI%xsEVxuF#6XXGR(>WUssY*#nKmm#NQw1UpAdVH$2w#S^gURfqlR-}q954L%fo_giLc<0?pBbOnO3ACRW>Dy z(}b;5{`b!kPISvy2cHOUoi`q)Q-7$NUsqq#E9qxfD{FBYN+M}ta1(&|%(>Wm2RS8h z!(#4%wb`V{M0uBV?pX}Zrse}5xFLN_gfC3`f13JRa|*6ElB63*@&i?AvD`Xr0K79+ z!Zx5qJpvZ^b#oH#5{qjrHvq;nKbo2ixB6{7`C_^@VTafZ_M597x_&NMhX_B%97fl$sn|%g`*S@^k zJdZ4U6U}^mmL@gWBhVW(^BH8)F;^;Xa4Y5#qGak0QrRYYg0C z<14bb1oSZSAGo|F4X?@qH1I$O;S&hsAno2JL)#7a;%`4hVDmRh;0(p%^vg9Mio)@w z{}I+{_*W~xvCZ_%D@f1GQ<0?LqU8}A%Z})vaH+i-%KEZCgZyW!{fMW|J4#?XnuNi; zJ-cUgGFr4WwJ6pUbQXP0iZ(&4`I7=U?tg1mE&56=tIRE1!4kQ#KEig)uy;#n&umOG zOR6@z`211Ud5DGB(8`ae*}0>kZ_DX3sB!ktDO;2e=-}pg$Oa;dHx0SKM%$hS*!BP@ zqn8?|G{?zWu=rWvnC5wTv74NAqbR}zPCuZVmX=#TTxf1&6kRF*C;zS0teytGIPXJe zHY`y>XI^} z{)f$NUA;^ek`x}}g>nji*|-`_EnxM>(`54vw66hn_ueMWqsK7{W+!Qp|cdwhatUr!w3T~ma! zh$Xrr18mHWAX#d1#_r6d4MG)t_3s}doRB3<;Th!~fpCEd{d@3pU4rr{naqaDR_qKv ziF#PNEY!a0bKS53h(~HjQjjo}4MBXco2q3SmPU<{z&lfQ3HrG-W>H;&Qzr2@<;XKr zZ>xSI=T0YluZzXvvO%CjYnsR90@y%TB$Fc`f-DJNrCkV##vrl^M36GY&%}4Nngx8& zJl1JjHbvfl-i+C*U?mB448!$A7wu%B}$GIz8%11^8Z9 zY|SndW}^#J{zDh0I^TPwHh$xxST#WATd@4dP(C!ee+Y}~Yn|!Kr>~v=q4Lw&D%MRg zZ{~>Mqo&+OT;9^NT5{NuG7|rln)Lt9U7BfIF1bwKwC0+;raN+@Z2Bc1Ad<^8Jf5!< zbl!|YBn6<%?;u;?Ug4@3DHV5m^>p(n8K;x~QhED=DjMTg578x@sT^Ug;GLarNGvH? zSGbfb+Q#`Hr%l^|eZgRhZ9YUZ*`ns*glvg?v4)J3P7B7xCG<_KWH{%?!!S(p3Zy zOJ8ZxtM}zwpCsc~hb@DuO_a}ua9fMh;y>!A zYS`=jOudpyU}3+z!B+jFpbBs4hVoZ3k9CxF^vc^nHUZcVhFk3H5bHGR8jB-H@A| zR&;#i<^k-rZl5rf>NI;U$=$14%iUXeFtYVthO)b~)L>ay=ys3JyC&8Z(&LaaM~B2k z#On3&k?{rR4yJD=!oD9Du#eeipD&xGo=Db$>?(+lwnW4fl4K#De|}ew^^?vlyGCDq z^n|`a-|l{&bx^T4teK3X7d{lH(~rpErU8;Qkc4-fyk}0ay!GTGU=Eq&Xhii9Ku7_Q&3&clet6c2>YAcm z1+E$BkGs8!c*eRH26zPegrKbN0^j}Lt4Y1jU-Bx}TUKVx!6M!o&S}c49Nsr?VKbcL zuw>!-g~XazrP^;c=T(}9Cw=2ymgKLOdivo#akqJt`bneIhsb6`1V)kX1Y<7+9rRt^ zCKbeu{z;nUp}}^54!Mn9PEK0U&gB);C-$AidaBr~3fmGZ0|%Pqtev@fD<8+Z`8bwd zsZ{sVGXKPcLYfa2Tt1Cfy2PW99o--&bHirYFL7J9YBeT<_y`u9JH57s;HZalb7)9d zpzZG72=(N@KY~W_r6H(R)Je_6IXypyog65_dfNR&(RKn!;Ie4enV)97Apg#$>kO7z zytNXvmPN-Z$82J+^8!CNVL$gWeYe{Krci}*14Ws3Mqezlnz+zdL?8CcDb^${r z+t%ixu3rp7Isc$wz>`98xRMe&PzN74C%qfEt}C@;R?oLxwkFga)>2=l2mLOACVA94 zd;7g|jDjm;FVVSo*OrO(rSui$oDE;n!C*Mh4yeBx?-vmg8WZ@dfOQzSlFmE?ZcgJ+s{M4fO z@=EI?thee5V&m)LW9R9`YXnU_X;Ps)uI5M6W98NFF&Z92&isb?80Zdc0GrXLe>rYBu^CVz7yfjw;^x zGaTp*JPe&}W$zSd7qBD_dzNo%M2=Pv?;j7ZSdKy%-z-`=0dIE3NxwKj6Bhvd3m+0{ z!d1C+v0Jp%4Xc~~A8~IU-$b?ljT2)tW7S@7V{o4cxFBvQim1rGDEq$eEwn&O_q|D* zwrQH2q)F3#Z_}px-qNz~n^KUngFqD#L=m^@33uxEdEO^&0lnV){GR8Z->dgxJDJR! zIdjfu|GeLyH{L9g|9E#G{k}SK1_N1xfmEk`ZnJ)tTppvoewZJ`O*{}&ysfMm4cFN! zO9FeNOI8s+GBACol0L#^8PdXqD56amHk+4suHW-X34>?pzih#h4fgn~qP*Nusya6FGV{OapJ0$-8drOQ(@l{M03@#=buI zG<8Mr-N9`3m#XNHN(tiJ$E6+s)LXjS=g8+BdwvsW{vj?si{xa-n4*;Pf0s4om=6k; zO4&-iH`c&P)Hbd%@xB)BLN>_K294!nVv}N`)Aa>PFbV;wliwj`%O}fMuDxo4OgL8v}L!vg0zqINXp^0oyYppoHJNYtmJmkJPE}gt&Sh;grwm^x{zG_;3uyAH zpN1FhI&iS)uEcNh8Z><%UbK=O6D;5f)=Y07iG|$ubo}%XoR2@BIUNIZRK9Y* zAkVo%S|?_$O7+uea*_ZyW+nwZ9fV#>4aJopB#>g`GyqwfVp^{st zUkiFpT;Avhc)(fR4GL0fY1rOgy0QkSM*OOvkU-y!m3y%To({1&#*|8mi>b1-Y=8P^ z^xpYK%1CUY(SvZBv;TP7d-m$-R=U##A z8}~CxL(L{jvbFX^%7BVZZ3|F-yEt;H_~NRsm!CyYWX{GF zo0N2h_^AS}eNJ0v)zo}HvW5?i_wm^fTHVNY$h6ROU{2Z`Pgc7B&_UN72M8KwF-O*> zFEEjnyts*5Vtg4$J71C3&vI&bB7%G_n!cD$s?@|~xhdR85RF;JwIrt>6VfW-k$GcU zJlT(P>mJNWT&HAEHbl>hqdD=}H+@!5)9gDU&=9WCoOVpGR?6llZ8V1TeaCG8qK;1h zW(lwoKV`zuG>u-@1d+@>Tr?SoHqq5cg7b@E@Czb7DU{fMsVFfA`7;Q-pqpg88 z$!G9qfUsq}V9H86w&ToIf&!_aGzN7-R_a#!6-L~pV1;L>~(HocT-grs?1yyB9ovkLhf+5XZ>BT%7O_nVUS;qg& z<(t}xU^%_@;uS=ztAbMT6#wI))%ii8F;m-S%dI*&!ahS-PFAYd`Hi2a8&Br*-LKty zkYC|X&#Z>SXOWaiDs9q>``6EZM zI|jl)yo^HMCvk6J*4(zq&zE9h?f_P;5qy%7{9@C!0tRbI&#*FdhU3!xBq?#Ymq>o@kXH7uH`N1+v5wTUVV1;mv$g49d~W3$ z(^B^f))LtwEXgn|Wc)yArEvQ)53h^)`*I@I9+611mTy&9Oz^( z@=geQk?>mGUh3Ih^xH$!Sr0)UIKKJ@^|>Pdp}PNGAaXJS%{~ar>2WDoemiJ0C$1Jf zU9<@rXOB#BlCwu#W4WLFhA!N=*N zm%sjAAV0l(U3@GHG1!Gx6O>a5M+y^>$Kw%VJW2Cq^4};xSXuTSuD**?o_cgp&yF6E znl9U)nXpbN`f-a*uh6H3tEA~jL7jDD!PG8PWNzPX+shkQlbKAq4g{vPmbgsTW$aVZ zEYKXMTMq&0uTr*F?+KODyDWWExP#e{t|+Sje&lm+`mUZ9d!Jyv?{xTjJ;J#6bn&@2 zeAoO{3jaxRvpoHuH3Obx!%_JzrX12_`?LN}Pl8lkii=!L2SQQ6aa?YS=u;FXD2jBZ zC@`u%p?}|_^k|5`JjkEM27|EgxqF`azp4}ZLa#8s4{g$To$?*>&i^8f|F5)4AFDb< z_|xB2HTQ||f$P^S5`su5WOsJ2?1oqG7a1C`bf?c5;R-o$IODh}8wuZRCc7E6I>Yq! zm>KSsP#u1BAbI&Uh-*#gS>9+`JAo;!=)Em zMwV^$*>>Qw?es~3ws60k*?pwL^2uUBjNsP&89Vb{Nk#O1MDN?kPpBq+hpe0iqIdE= zs~vLQe}9pFzOwsy?mf-&Rd7W$W|h^JlABHJd|60A5aQ+`0m?rQst0T9^XrUv^KbBw z@1lG@eY2|hT+#|%L5H|@sui?CreC647wQ%4=@GOP zb;==@@Qlfgx-&@!_^JmZ^+=ve7V~cH(YAPWqgBw6B?-mu zhcD5{BXp2mUVa!?1{$Yy2z^+PhixGRIjkNABoXu(;Pi#O;2l348d0`7&WLFnu%A0h zigczqyq&%pAk)tdPJ$;gkGg>YD|I`nq0$Y=)aJ{+A*Jf<;mDLlP&d>~t4ZgypQn z!~R3;_mcR8@=}`O_fK!$Jej<5pddcYy{YpUKXls9z!_w<#g2DC(Ah~x?pj?=r*z7% zmC^Z%fyV~l7TQVWw(P1r&l1m13IF}&YruFG#E0p_BDj6t#HD$0pt3VjLHnOaP7*~u zmj{YZjtL($N|4_gr~b-{;10>o-T{7;bW^RrCQ7>jACNcGHiB(BD)_?*i#Keg{V@Vo zWwg@fkl5&kD#^B!^i}ph>EmwOs9r%v9v(o4hXP^?sG2Gr!c)+HjZVV_3&RP1tmWnq zwZ(vJ)(DP1lXvwS@&maz^EYvL5l_c+#=30t0m6eiel3z(PI1hf8y^dIfMn~8sMj6O ziEo8X830uIll@6^8lbQSR`J~uPJFRSOCKUo`GMk8g+sz+B~c+5Weo%m|71`&xHUvu zpZ+NOs_S!f-0f1(S6QT339XQLPb)N}ryLPdn6tdT*I5P+wQmM@$r86lDL#+BI&t`T zuO)(3mSV}xAU56}ninF3ghpb6_TJ+#!219F9!|TyM_e!ET8*hE1Z$OSg?@v<>5@Cy zt~0q)B$E}Ci`Xe@zz?W--0p*M|M7Ot%oQ`{BZJNi^>#((xhmjGN=vwP=ID< z5SPBE|C?tI4vPe#oM^++a2&2;(evY7U$KtH5egr!)>LcUf~u8PLo4i76*rN@q%zCw z8;*W>x^9PHk!cg7ViB|oD2ggBF3Pi4`BknEmXIYrfgiyxlQ84tdE|w%iOY6GhUB7PP#5X%>wpE@)8#-yXBZaqBg11r|z426;yV+tGD$OrG$eV z+dn@hTN{xOstq9g8)o{fRY@~Bc?*5-bUho9e9Ql7Os6p>WOZy#mkO}uU0(sU{LhaX z%Z+hbdI{XO?P*1zna+#Li{Z(dM9;v&NL!&TyK?J_=4r}VWSn2jcx@aZ*k~fb#))Kb zw9MPN+;jB`+wUFH3c5p~N=alW4ra5-oRS$wA3Z|&!V1+;rKsV&ba7LJIJViE{si^h zW2UJ&$`8x+6M#PE6*o^8f+QC!LvfETi7TNW+vwOz_g|0GnOE8iQ54d@##Cdhg$ZBh zG~spdG?>Bq%o(J(a#kisnb0JH62c&OQpU(skV5V%TX4pZMX*Enz!}ccq-x@{TEhZ( z@FcD|^7F)^gq>4M`JXD%ccEjXdG!VC$ygF&h=sr#7i5tgOo@Z3)ym1$UJ07H*j4C1 zMBT)*Tz!f$PuLiWy;QK>-(@9fy7Bs=HT z&&{dMDbC9*FW8*?b1S4hnQzsm$ET-}Og>M&XO`%0ekW4!=7JOEekhEk&&cFqtp&!N z2(2)S$P4=(<-N|RO5gDA4i3m8&$e^z2jV|MHSEN7k?#x3Wl>sVJvM(&nG_?gWo)LK zSC^i>es!n(M$K|4kwbn>dz%_ts)KSudp2QEUSzQ@7I3Rkgx|EX80oN5juad9m36f~ zmFleoJwbJsFso-~Z=H*lqF?9bhz~Gk+lK0fFVJs6qW=%fwjqq!RzWs0X4}b!z1!3k z@iu)^TvJ@zCaAo%!HeD&*mh6>IXbxuX-9Um9u|F$Ha0ON27XddK?o?h3d*-Eu3WTe^g8h|p}VbD zfRT}!8H<=WNsdU(u@q;(aba!p!9C<0>}HPFs;I*zS##sc@hjJ*Ygiz2m(RgZPwqlQ z4e;Jo0};|dHFJ@rD77}Pwt@gwB^GldqEPmrB9Z3s)V!2}OgQ?b;e`RgL6MsvK(@2H-?B;Q@Q&Y0vB$P4h$=lX z)tr|WN&6oo{MO|0>y-#V4DImVCLjx!o_jLIZmdRhBP-q-r;FDCo>r5zH16Z&gkPaM zS+8_F#r^bw-3vP{be(3oE<(47L}JxmMx>FxVY@Ta=I)1@Y5K7^iMeYIKNMKZ+p=z_ z@CzxXa!Uzy8Sgo)9QggoxC1Z$om541zOmEjOzxo;I39Bc4%AC|o7({YmV@aJk5{Rf z>?{bgJEwnX<$}eS{(lPHc^t&Iim2j@_#yoG8LOtSkn`rowo`)K!2&d?27XZ3k`-PX z%UUoMaBG8Yj>i#%FQw0POe~de*+)%^(l!}J145hRT_vr}u&6V2>G94U$d@V4Q!grkqXo4<0r-=y)l^jTkkyJyHm zx65k|f3~Y?dwE!XpkG8Nh&dpdbG0FH5jw!qPSW14SISR2=26bcwaPh>ep`U_>&*Jn zSuFewAH2sBmbWs@*;6U;)086@a;n6*c#w1%lT3QEju=q=UOiMcqHZpN=bw|Nw!$`^ zz}enJUxe~c0M3-N)-GA=k=pF5^4wq zqoe2{lfOaeTo-vOi-jQZ+phSDdaOpp*9XC#BXnC2BNT`*<)BK*iB4+llVc*2q}t`@*^GRaqg|*J*K^{77yORF zLF)ujBof~JHQQH*goT9&#Ow1whS>Me2cqmX6p%%l)fxW9+FG(3`u9*@l*h@^@p_n~ zpns{U)L4oZ!ipZcjZovJWZ>MEo~0ku-Q<50^IXhvW|nmITcM9*1=+Cw>d^~XW2lA! zz)dx33RQKy_$0jq=-0#?TVkn;Sz}@t+M(gvwYXg^$UthHWFVF7^f^RpiQ1ZEO-ePU z8nchm5Aye?2c!j@5j~UR@HH*ImcYK&Gs-G>C(gSpgaa1c!i$g68!oy`ZI%i};Uw&3 zv3daAX0}8UK1rq~T5#U;F8~ZSa#`%kkC}zZm`pC(B#sYJ;&FGL%6sN%ojJZ3!Q_0C z;TLK~s9enM8+ll`)*D0UiSSip2+oO`Phw1wn;G!&1($@pq!jE}bO>6q%GxWgpEMyx zT}ZOyv*P0QJ_(>D2`Y%N6&2-{x2>*UB8(uOL!!XVHLP3V623gjdkTD}KHrTd97i~R zh{7yuhZ?3YiR*GhP!k`*zt64J#?2CRLEv1v-vH2ZvlG)Bl-D?tYcb!X&y>_v?>c6I z0~ex)e+3r~Ja5c_OVJlNp;e>RZ-~zhqw@^ZlClIf6My(qPRg_LpmffQ>JGubWAbrJ|t-?2D(ijM(w?e&lQ)sXf$dx77!mg_W%(Wd6atO zEUJH@hFRlEQr{vouyV$ZUovr7WQ$=w4+D0gE@6_8_*ZW+o|_qGty5AvXEf5W zwFGG2olAZId({KuXHFE{jFr9j?EI!<&vo3h5d9Sc>Dcu~1CRb8uUPGDA>LO8aRmv& zp9QaEsiD-sS8BOQNhy1jpjaZA25YD}z#=|a2{>@_Y3er^-(j*dv~nf3`09ASNM_mO zAeVk>+(9%H3|sp0`(RA8UAXSZ2*b0McTf%wOP^A_cWq`D9oj@)JJEV78+Kll<^rOg z%h#ud2%)l2oi#u@YcHGb$W_jP}sZS~+yj_-MF%>9Nxp?}_(5-J1{@o!S2n{$J{Agnr zZDO=7bYd6l@I&|BHT0_Zl!lbs?Zs79{zU=)fuTNvSd(%D(&D77_hq+Z~XxI-srP=4b7y5zzYv4T-`V?c@0Laz0U#Y zv#_6ng$)clPQLAG+Su@r*iT*uJ!&Mgu<3^nQjXhUHWmoiWb%<0(&vN*M9+AY^#Zdh zh8E=D={~#s+ZO7g_~`Z=q$B_N!}s(t>N=7>C%!1i&kufM*+eA?O`Y*eFz+4nmJeEZ z<=>*uxP5!eX%jN-X_>|hCCT+l@Jm7C@R~IL2GYv!-CW;J|8Yuu6uq0=tD|2L#xHI; zrgZ!0UbdD_DDF+z)&|&bT#^@z>C4)#7{k)GqsH{*ZC8xxOWf{){{PRJ+mGl&75fBv z{+OQBZAyeOFbf?J#>@<*JZ}%32F|LTWwk;P$eFS~Gz2 zh)!J0)I;B?Jt-cga(Lo&_tN{j~(M!Rr2ew(=5d0Sav*$v#ugF8ua#BrQM|1y=~`t zAzjwn1GmEX>I^T>(pEw4`u!bWXIkuOgyu7<=z9#Y&7C@q%eUmgeyRm@QMG4oIGP3C z&kh|uW&WIzNj&k{l1zmpP-KQD$E8DhmKb%Y;!*OXdxQ&OO3bXhFFe1LYjK9>t7J%T zBe3N~yo_E5Y?-+;2E?7Ga3m<+Jb%$^^r>JwT%Cga_RgS&`N$kxVb zrwiJ@)lSpmENH-doJzTD6!GM!jVh%m{a!|j^rnE6B^e{y2z`+3AYIg*bdpAo{J{UB ztLV!6P@eBYe#_|tep}E7{Ps={tn*)h-#{_KYAvaFDCSY-hTT(H1Vv^IkA zR5JqE(!zrB9SiNtKb|$`1%Yuv!JCtRzXwEW@b@{~^CwsOrr4~I|FDPV(|5!@9KuO4 zy4bi)KA~PHDPj(*pDnSTY~|fH1JS&MKg7`iZv!508*M#_TGe)(`BgFEW_&c?_m}PA|ecY7Kh6T-F2fNar|r?KJT%mGfMjD(=kSqoFjYK?pj z7h~v&`$rMv-lMrgf=16LzEg!i_jsJ!l#&1ONAUR(h$yyxqvm`JQWnQ#F~_z zXbmP>62I~NIKRZKmGn_mb){J?1%fF@7J)BB;0w-cQ|p6&H?lqAV1u4r>i?%f|%4Z+~nsgcw&`F&9M`LxCbuZ!n zts3F~YVe(G@Zx3KTtK69=syc2?Jd-Ov$TLVE9^#_u>c{cVneQhFW@}mNrmzz$HJ4Q z%(Sib_8P)BS~o%VKf?ti7(9e`fQZ2fp|rR zui>w9w+`o53k9T5TgbZM!*tph63eSHRJl1>nfX<#?2Bi7vU&_;B{r<6a|ui^cnWUa zL3Bhm{W<@OPu+v|Re$EuBJT>KUJAde@Y>QNnK$5;HoTcYB3yPF6TMiq%#s9;bwV7l z6k;s7R!cfb^Scc~xdic2-t;PCml# zPW}$BxQIDCIfek^du&*3$=VG{@hvVW*070Hg5gfJSqbc3?zy`^Yi4&ksR%Aql>o}3 zrXYJOPd6kqs}&}4o+?c(sAJ+Ioewo4`xC&ojT zdISeaNm%LfOiOYK>Q%SZgj{Y!&MjpFA-p**-GuB`s;P@Ned9vEJBNrxql4N2YP!jo^&PppusoI>(=W|g= zKCpJ=D1v)MaAkTc(y?HjH_j7>PNU-x+6MJirnwmJD;&dOrNQCGPGi!dO63@02v5tu2Q zTFX>h&{yI*G}@u<*H@G*WCAx?V>KFW=ojX*v2WC|v3QH!@s42XC%M=u&2ganZz$+P z(&-Edopk2L87W13vBDPPa+^MrY{Dxo3eAc_4QhOFVl)r%?`Qw?S(x*)IQALp{2an> zbY7U`q)mF#p;OSeT-mX&z{WSlCjIh`LP6i|JK{X$szQ{Wdq2;ZX0!^#tWSwg-B`PJ zH@Sq;T*Q$xVly>>4X(7=D(&HRZ;yaA(^%S%G|&&B`&L#_WF}#GhfKz?NDr2&%fs-@pKv+DD6*Eqj)1maC7XdmYi_xV^CfWE1e~wtLY3WU5&k*AL&Bhqfc54uvYRT^TQz* zlJ(%68;M)9Gk>SkrN7@9q9CD0Gsx4P;q&u|(M9Y}-!~!Y1nUol)L5F+#09-$0Mg34 zY|)6SW=Xh+lC0tQs|HxYKjKP1itj7d^!`mZ-~inqW&v0j>o`4>w$Krh19Z6bfJqUP z4*GQZFykRdD9)m{YBQ$6&ox1Xkc8;?s09Ono0GshMn6d3W8bEgecyGA1=I^00Dbu0 zN~X{BE@k@gy|o0^k$Q7N=4SKjs za=S}<+X;>4_O&CanDWRzSJ?raB?XYkc?BKgYVS|SvXjLnZIX=B(hTu&um<9CCjry#aPL~(aojriid&Cd>V@fdt=#HBnCh6QvUn!SHqc%AqBQhP1 zS6<%EX7h)er+g0F!N!B=-OkjY4J%%ooS=)2_wn^zi&6x#s>ncH`CKl;P!I;9){xNI z%i|K@tZ8_+J5QHL08vmT1h?4O^YIRg}NLDSxZ~~O6gD;RA zYj)a;g~meWX`^z$&g{i?8e&%x2j{f)wi~oCrw?+$am?219z|<%s4>&Mppps|o3iCU z6*wXkWJB5d{joOPr#iBb_=g0p0P9Bjw`fn+5>1?P3>TN2lq?`+pJz=?*W^dGko}~w zhU_Gbfps%7gUz$8hd#~ke(L6yslHlXsoLA4TPVz ze!Umu@$DW%OJb$_q*j;#1;%`1E<%xcfWg8u#3pM~2%w|#^XuBuqqZ)BCl;qkiZk%c z)X2<>BbWvwxXdI=0uw<2L16(4k7LSV zo1|EB*qH%;)`h#V4X=vaoKIprr!r)q|Di4UbmzXNY)Q?|y_>7|NoyQGD9AfMN1D)< zGJ*?KlMO-yXEPR~#m4!61FpF;$yBAJFL0zPI&59(Iy+#mfKp#yz^D=HqjH0h{|(cq zRjPDVUUqhR;kNlzbH*=T`3X}};YD|bHHX2EKH7|3b`&|f39j-n;+3o(af8ntiOux& zLJ3Zg_I?gVWZ?fYX@rmIg=Tv3uOEMP@8j2+9lF1K+>IXb-fxFC-+AZXZ~n)J&J(&H zy7PFoWM4J(Q>Y>a0zJ=Ak}l9U=rAs$z?hvEPGXhf2eME@QoJDwqSJXgT~;m4yn~{E ziLP#c3}O3P()|MDWA_VAsQtcUViS&uP0oGJjxpuXt$=dqR`e=|+#c~z=@bv!nAG3q z1aRcsl8Z$1jRP zSGoWYmALVpaRn0b$Ag#lO2r?U-7(2|(H=S0?cko)pp zS%r_8R_h)Nwbnh@Ns7T9FPFwj#HJ2B@f}YE1^js5G4aI36H-xRM?ZF@gYjlSveI)8 zNq6}DIM+D(QV$3?`#hoW_C<*}pCB_jq|OGyPdj8jIEJ^75rnTcESw|)eFvZJ**8JdZ9+r4P-gQ zYr3BnaY;mKaJ0T!(mkPszF?;>lt>@<@1DS>b(Zs^uzTz6LvZl0v!n0rCUFBZwQy|N zeH&#eQ{Pxm>>UDoAD@P^-PZjs&W1L|-y?Cq@#09f+`-NSv%<+U>DtbH23iNxWqqxzSXG%$LJ+%YevcHiF%hMMc z6HPH0h-sP9tb0aZ6kiZu8ar}<5*H9Lc$$Hczk`v76%Y`SPLcWhhWTn%@XfNKt4GVK zGd5;SEepNMd&tsP6#AC=@_)>fMI(fY(1W+rVf?96+Q1yf1u*=)MIY)fDZU~t7AGr$ zi&gnTIZ3Us=Iy3WS#wkJcvY$k%=8c7iK4Wyd%MWibKY5L9$v~qW;c@Gs9zVzN5gFh zMekAg6$e%c@)Nlv)ski*6rd>v8&wjj+fKh?E!$M0n{(4(EVbkd~mND zEI+aA58*xtXxQx+x+GtANxyWsIf>V%Bd>QD{HN4!NhQ8WpJza2_r89XK8o+tCFlZ6 z#hJjBmr%cDRlR5KzV_EMp_ZJ*So80lC7;zdmI}(_Vo)k{X^7q7PyND6aiJh5&hp)p zY7UD>?-o1IdX%@|S`*H*JN|TyW|!T&Tn!!cnu0FHa4|#J12$ z58N96qr9uG|5npB<5p}GRT8RQk=vw`4^o#7C23#ut@u(dLXXt7WLF1Tbvx$oSX#2Q zWQ_^Qws&NUfge1bEC(z6awM^{xifcB@D1MTgmJ)6mC>0^3R;Gq4upwFrXCzwr(@d2 zkO};RrSFSUL3~KIG|`Hv-OdN8kux@Ak=>}vlHrE3lGj&FSw4a>gae<`@xRc7{_HhW zD14FJ2AdXTG6%>3!vWOxMR3DHl3SIIDy|{V)F^x@qpy>~B?aP3yq);b5D`pXAtY4) z2}rl`gBFe)G*{##po^rtiZghI>n0OET84%^a_-LKVUq3X3hL>443s5n?~=mQI&aPh zVSty1#Rr!8UV!)Mr3~O5?~Sc^C(VHqTyHlUDHb1icS`8Kb96rnqJG$4N{`t0-$I$t z4Fz52`a^%3O$S`Ld6N!&Ps|k4Q7ZcS>*9dRG*#F?Nl{u$kF2-T7X!S-BXxnO>D=qY zwsuZJd4INO`FjU2Dlb>J>K;JBs{Re=$0|p$drzwkH!C@^9L{}evelHz(^njx^tJAd zF4mA_88XK|EjXD7hqU=nuPz;TlR9#vmid#WcYFv5=G0aB^1|oplnl>ZeNS zb^L>(wULTcairrju&|Fr1w-OsUCQdDKtm8(Zh5f=EP??f(6};LuTT7r52IZQyA09( z0&_u1g~^B}A0CO|DkOR27*`VJRIKxJbOd3lrE2 zI*#fl;HXRY;~n&(jk5vwE-!G0VZfV#J-q)eheRwlil|ZyjlAO&-Q9hPH5(_4#mZ)6 z5%-@L$xQ&nFj`u$5RLU&)^n)Ssg#MY>zA6uTC!9xzHTDEImG4f)?PjqPeU2~472v! zRl4&RX>EV%?MmmM;hC-nn+e(M_`OQ9|IV8Xbt&EqylgM=pi7!6O_h!p5-S$CBx(~_ zI7E}7PZJym(U_(|HDr)DWr|uv7l{~8Xd<<{WM)~ctbR1n8kfb3i)aH~M_lM+awyCu zj_cVj-3I`B8w*}W+_lQlT_ve2rKpBWEky%Bonw)rRF+AfOFKwk07HbVC6}yKcIR@a zuZHW&uL{<^ECl<==mW|1SW3r8yWtoXQwaA`h#4ESoNLS@EkccqzDTUoGsUO)QW8c( z#Z~0>B(YyI{_Emg)sCCzCG-{(uJE9CJ4GtS@@t7hQu1q+WZ#eU-A?-cPuNB`6=VQ? zco-cR!_VeCRSB;N;Xbkhru{~U2D0BN9KyLFG1eyKLEtH6{(cLwNWd69_N6#5gFnc% znORP&QR5nm?afLAw8LTsdW`Iq!x(X&UpN*ykxxA0~k>!iw(QqWDV^eKnT zsp~_F91nao@gxmpS?%-mu`BAcfu-_1O4tScT;-pM6=I5l`n$?O(dR0!q=D?WAod%G zH9w;vca>ff^>g3nRCw|e`y3xAWqXiny*%~o zYa>PZ9C5gS-d*rT2BFS6ir>|68t?YJI4RVzr*qVK zcN%|oL5=)K8AT9Wm7z_wq^Bh2Z$ZOWVPw(9rrA5icMx)uH;{x7u^$=2Cvs7y4fO(G zN-~r45&p*yFRvi<@vviv$>i7kTz}>eePr{7^1Q56Yc{af^VK#VGMcOs`-Lv#!N9ItkCErgV{H*9nK?uamiM8K(w%)zv>|`6=+`&pN0~Mls=krwd>(y`RBSk61Qwg78uxee4_D{_gU`+U#4H7?k*Y`<-Ep zm%~t29&?4}1DU*qMk)p!0^{<_LWb<-E>*A-GsTdk30Rnfs^|n+Sa71B+8SQD=a<4O zN|cO=OEnRu329^ZkQ)R5Izy=J$TrGfD>9+1J3k8afoh^jh|}_8m&^%U&*0NmlH?kF zwyq>B18o5+zc{Z=xyw70&7?V}z{^JO=7PdRiv)%DVtc@No(U)wsQLW8AxA^+mI6IwsQGO*h2MmuCbhJBTX}Bx? zrV-BH*U(f2jqKf4cfy=|O8%vDN1b3h>Pu!G0ygJ6y7emw9al;DYH1lp5t0|1i)?dR zPI5|%cJn3zta)FZug+f`8>5binK&sTnk5bMBat-9C+V4a1@uumAkTj6n9fJHk@)c9 zpbr17tp)WqTSQi*5Juvo6Ez;zwRX})s*5BlE(LSJT8qhNGZ% zKtlAWQRWJutW>~28DG9^RkU}6;HEjhJoJv_tb4_Ixqtbc7r`dsUxfrpn#oG8ku zj9mk4u?YabJ2CnNwBcwn4e3I)EETDm6stA$Bo)rmrPu>!#Kf)QBm1z|6b<7QuID~pBn*9_+T^?dIbdEeUxLA~CeXQ9CJeF)a8Q8L`w?rQd)#|ib zq>9tk=qVKcIr_~B(-Lo}ewcUOr`5B5IU0Oaan78PbMY>|{C+JKZ|nsPLUd*Fvn}ZE ze&^@_suap(_222e)_s6nwo2xe7!s>7ht%!-wdgYHl@oL0vzMO{pVq9mOivrv3e=0Q zuGStazFAeJgEd&7DhPq4OM*4)hzD7g1;Ey8^a0&C5U2W~ejJ6&M;86&RTov`1M>8d8hRy!@wsT6m`!^TSzyCz3{H011S?N-= zDS|vBljIb9O87FdpNy5Ypj2$8{)0)fgAofBdwr6jG;3@Ew0HGJ zd&CFuIx=O{B~Kl$BpXCNB_=6a7q3FEc2q``ecN}1=LGVb_=zgOJTraLc5;EVZv``h zg->fuYRO2ndy21%BZi8@l-S^Qdh^WGcK4Qc+HpjFzJ;y>eB4hj&?l(8^VUxSxjN_5 zg=M@&)=0ijNCCL2#hkCSBSm5f6~v`zQwBeVk@eAz9wYwqRb{(Qal7-#l`F?5zWd>H zRMLtM`%>4_$ZW2PJxBMnOX%8CX(j!aA|WNwf(S=W;dUqkivbrtCytWcn@Wv^O{FT> z@4l%V#$~6O3YCRiQGzK-5KCpOnlOp*QzOP3&=9d%(pgQn?2z^_d&p39S zm27qlRzzlml?lLU_!Q=|63umqx&&UFEq>*qjYTC3%hT8HV7`7>(VH? z>=(y<1LHWumXncS_fcw5ouehQ$ePaOc(BwY3k-0JDaC9B={5bzB|YE7NM6$7(Z{h) z2H19k8l_^dh&e86smp9S4jmdBG{F!|BQ;I#Ewyyg*~O)F#xAC5v(e=uza*kPDf+)m zZY2q%bT$3?%Rb zeX;R;L|E`U0Pk|M#BO)5IY(!oSW-#Xo|T`kp;G|u4qkPtWm+!ruClt_}5shJ}C|m`~yRLfSrdD{Gkf@?^ZG0#k3$!d>AYdTdEtIS~iww zz-g4W!V8(hpbSzCoLEv`LDg_ZauSg+*n{Fed>%#ue{Puqrt)`NscTmmFTZYn zH~B@lO=N>%qhS*tD;wx>=vyV7HkF-3A5Z?tdU{AhQt`R4Mq%ilcytUS8L5uzZk9v|8jf*xBvaF`O;M;InzdHKlX(XGbKaP|H&9rWprgVMcd_aHAGn{ibgCeG7&0?sWwC$P40TUnbx zQX}YliMvt!Kct?}H-Req!Le5dijwi7TsTy$IM;me!l};X;sc}C%pN)Z{q5-I?eS@u zCw-0Xyhc}-O6w4OAa7QP{1@XtoqH&0DDs#LNuL~S)Rpl9MUuAI~+RfO5Yv!XLa z0d8_Q+~f-45)Q_Kq$EB~ZN$e%Ze53Uv;BkvJEYj`OFmRYHmtmITfDtbuevw#jKT7i-lYorgN z1Qlgec5P7r%m{USqB@D8!^QJ%K=ixwN|>avzs*!^WV!BQLorsSAD}lJ16+2E-MkF? zE@rMcDPkh;X#mcBv8kvhD{ipiJb&&0UFrg~p7+v#Nyo=V={yCsk8Fo&yKx7x!-`|n z=t@J}CuthZ?;qPB{rB@kZ_OcX;vXZVwNGOGmLHIZGexLYHSK zk~8%oAfqyb-dPnUIdh(lIVatWbTFAnAG<-l#rMR1uZoKRD>j&{Ez_j~C@--kxhS7L zM4#L7%~x$}DmHGO%)c;e_}kG?$D5{ zJrt6>7RI_KAmWX(>qV zsnJ!82gf}%A^rt|9{X;~7TQ|nzVBDZh6cZK`M+3XTwKRs{^=At>N<75nx#!iU>z%l zq$I7@6r+!ce(~igFQEEHoKqQ2Hy5PjSxU%vq;1YAP%@GW4e1%yG<`O!90~|uO2(0( zoiH$$k+rcKR2CXbU%mJ%^hNh~@Vn+l4ik(k)NjEfKbL&l8h9MjiXS5AW??D|K)Vo%Q{3$%+NI@Mfr zw&jA5Ue$fVCtLPK7K)xCIb4UDT^;`lQP5(p!n`MC3zmtj<@Jx1xbQ;%=f!@)YqAB# z^eQEW<4kmfImf~T`WM$|-LLX1{~k8~NM&hO(meWwn8$iXM+vcPXmo5Fl^<2gcYX6w z7Z%b~vNT~C7$`BU9Otk=w6@%^lq@0(jf)VCP3JD;Bzh?2R}kg?g2w)bv;H>oIN<0l=PxCXPr2vuja&EO{*QlyvX{o%sXP@QL=h6=#*jGpj zjcF=#f-#Z!^57@}q!~+XMfXYN9mtt9Ky=4*^P??jPX2_Wk6mv$C|ov}#sR2U4CB01 ziSvZ`o~CvH&h?(p&;TQxSh>EymHioT{-emkRRN0u$dV3y13Iau9I{#13uddn&BY9X z54+oXj>LjNCovKhk8ZR7uTe;9HNN?*c0*+>BH;5uczbFOI*aEw@k?E zAea>j8$e8CV#|UIPDuG~Ds_ZaZ9!q=)AYft+u!{trI~m3DHa^r@YUCc>ULKK6$SW) z1qKR8d4!b31||gR0il&BTY@I6D069M8Ygr$ycd0Z+iO$CEqP~v68X}F`ZTOuXCC*@bZT8rIyJ|8u8bm$+UYD#-qpv6 z=0HtQk3eP<4-<^Z;6J^NY9>>pI&mRXOaXl*XDb~-c!!IOzCv=A7Kksy3iYQEQN(2g zT6+!ky=gA?7ILe)+aXIXrp0?HiFjtRD4#ysV~L+(HOs5gr!o|f7=yncj@bZoZLyg5 z3iLVI=Cni~Wp@^bHoth}t=1(aEEU9aGEEHHV(+QeKE;aRLYq1&dNPZS9oM8S{izjs zSSXRNZ73LU-&qk+Lx28Z+>i2Kc2b1wQeEY@Xqqdy1Jm#B2ItkV7jQK!^4=}NmiV*R za)12pb{=+#7HfRr;q_PVuJP~yK#ugT@c_2Qb-imm%UYBLr>Hcn)E7h|z(nx;1F_h9 z*X^5T=Vt7c;11n=;v0BrJ)&D}@KR~VJ2Q&%T|GHPFt)&or`JZ!^vE1JX>A&PPqq(tc0C?dw_ieks zmD2xJMrUzn@2d__!8hO||65kJ_x;>^X@4Gi6gQrwWihN;uG*BoO`xG5=*!m|H}c(0 zoQDefCg!gxS`{~%!8`r=QT|x!lO?-h`{*{5hkc_?V;ju>HqZ^Auh1u3i`S0G-G$hM&F99%FBfiVTWAH*L3~Y*mVB2 zuE!C!D&KNKamNEJM?AplxB#7Mt^yow5TSYD4e4^AwDq;(IcB@_Io5Ud)p3E^#CiBC z<51u7)&JrMbb9q1`5O;{Lt!8(@-7x$bR7ykhW(G8>!X_o^M87-S-M%`nLbZE)APC8 zzWTp=E*gn_(RGSrfm++?=UCffYL!Ek**h4tVU4t|NbjxeXyR^c&f+=Gbh|pw`7ek6 zD!hK@>~w=nMsrf8}|I$9~VR$yyuw-|?d?!?JI&tf!lhVb%GSIU}#i ze%(Q*$!Wd^LdpUU*{`&aP1W#<)Ee4SL$}mu%UGI9$BGa*ADoj#9?;V)_K-!=5)Y7! zPRjAGcGuZSH&xKBnE3s!(oRmGq~hWRyJz#sxfkgx$37}U zm$La=IePiAs->(=dD0*U!b(-{9L$uAU#X<$+GUq2Xt-P$Ngtf>1-<9$w-hqP1AZ3n zMdf)QdH21*>7>9^Xe_9s(ezy(de_=@`30-dsm{=vlaTSntI^LH;I}|O1%2;J!UMB{ z2Iz%AzTFSj#vXl$gF;<%sGWJbP5W%hX}I+Cr+J+cMW1Eu+D7(}onzOGE?|9qXH(Yc zDj07xEBi)Y6Ha7Hh|Wuo}x&B$mj+z@P0v%b+%-@uW#76!O8{F6^2+1 zJYVR$y(!PN*z~oEEdaB%mCgy6JIn7Kv=;-3!zs7BU2UU5W$tDDDS*O{2#-e4m&L!P zbIa%pZSDrd%Vkn4!f{+IMB5ZV_Tg4>Uls^YSyT2QQ-8VLEKAC)w-J^O4q> z5|%3=tF2$247Y7wcwd^BY{+EyimFLK34Pn%(_}4%h6g4nf8d#WIeSjyP*tK*|bGSV0hRWgJ$E18UsJI7$N*LE&Kl zu^xY{V~_mih25?mVLt>w`6IDq$qv#^x7$67+P^EMFSg0H6djP$S`S&m(xeoaY=lW~ zl~H%3a8pQ9h8|W#Y`nACcxTud}cNHTF1AR0=9}NjyTF1a{u2c~b=%v*p}<0Y2(jAWD`NT$UCy_3i31&JjuBM<*wIL|HD#ZFSh zhe@{?Ye3~;(%Hx}UH6j7$~)ce1rCtGM4y}#Cfp_D#IH#3OQ?&lMTascv5aIeINjed zH;$zzfeQ~;g}Dj-?*YAs*Wg+cY&wc>;JHeyk?3b^{)=o z4O0ctB_TYW+m#QuDfH~UsFZ6iNZ+L}*_qBaTD+ac3*i~uZL6&hq*R`_yH%d0)iBD2 z+Dxj)*-Uv_*TnhbCs5ybb=x^Zl_d)`YtRbUThcHTnM^zZ%u9ZrF7X|O=C3|c2f5RJ zfOYH=hWL9;d|9V++!J8LaxrMRn;flmC=~)w#?cYK+vsA;8z`aaVnfi4+P2=GA z4u2JXtI4uPktMxCEK4(32B@OLI^IIi$sBkb&M)jP6VrZj+Q<%toBB~c%!>qejLAo6 za{K*If?!66!h=*ggt{x%lggsfYK%P7kA|@Q^4L`9Ju`=t?%pCAXh_A13h}RSr2&;u ztR&T&N-m-x@ps{_Z-EP*LIQn~R9H#dZum92sKnbrc>CrDv zZEl{wRDpWTyoC$qjvnELl?L$iKH(+FAmOgyF#{ms-R%VFTFKNYg=ihA3aSY-j!R#P_!#F# z5)-&ZGUwXIR_8F+6N|m4rxbnzi%(Z#ogs~JDnvSc5HWXqLtc&dGh#;-TyEgr(SGoJ()NAOQb=jgK_JJ-N%GFsadc$h8=7uRmXc9Pq_LbB^`e8v1%HXmpk z-#DW&!d>_6bV?y^lex$${LOVRivx@lv-6<%Upz(+N0x9ix7U`k1Uklg2(9o!1Gg+y zvqbTzGzil6$Y&|GHdhpoamP2)GjIWXy^PLhu~6qev9!HwpSWyUa~T{}D`An&w$4Z= zj9Q_0_R+4RElW-b1C;}=N=3AYnKb5@Dh#|`Si?~RY-Ax5$Uucud7isiJ=x1EVA2rf zG-;UHyqbZDyB#%4+t0MQJD2yTt35iV0FAl^3_iqCYi|Z=Y&&On)xX>SCI%Mou1%|> zFTiTCnmW$PPFEnIrOgPK={mMMu1D=sRUX&*Qvl0dZTlGqM!U=hS88Fg>;@LgZuFpE z(z$Z7#n@WW%;Rr2VOPBecR0;XN7>zuou&RqSv1oM78Bhmr<*%B-9L|oWIuue|ErvY z!uU4z*GaaNTs5CpECD{lQ)dB3I4&|iyn9gf%Qb{mOVJVvKOBrbyw?fFTHJz?RWG!K z*nhdC5|TUm%MosU)2l1q!p1u)g3fR_m5yI(J{BFH+>SsNR7Ojk>*eP$rR<4C(&K2Pa#dBo-`?0>f{u6?+-cCpjdK!fC5}81^VqoHKtY!qX2-$$v+~)CE z2mT*ITOky|;CpLR>*>fyBi>p&tu2sMRN%N09{Poy1+buQ{>D;ivL$D!G|8!vXf>dB zwb-<5gZKKB9pp%+5rWNV+3eXuYJ8fZ=UioTSZx{U7Bs2S)6%sJ#F7$$4>T+y303c6 z7Y)-yrdX+D=Z?dOxhhpf3Hui%&je!8X0YZeFL5$!h8}`xDiRwN3)6xIzw#9YbFjnR z_1Lq*LwG$UD!cRLBG0-T;#^4u^eOc5%XUQmuy7umbt2}1K}i5J&o`wQkiX?^DFtc-LLqMYo4jstVgr2$+pzv znyW;kH_?8f$2HG(U9;@QHC3J0^rSktj2_x|GPb#d!2;}6U3 z|Jm6ge~IBup7-h7I^GiH(Q_jb$q@IQ=3oo}$*@5ZA9 zijSi1{wOTP>9IzA+GLk+U89|kcEceG5ySL2=CT1{*S@Elw*%p@Pj8(;Ch|fIw>&j& z3d;s=8;^KymLbtXkPL=$n{_|H6bdj!?9C5!)HOZHc`WV^DjBkpt}TdvzZ zgrRJ`vgGK-V#a|@M_@-Hp_R97XIgA$aU?PxKnmgUj;mst)fEw6;DE-|Eof;LzYm6w`bCHd>4+djvKY%D)z~#rLH!Er26oQGDKW zWEK>S$!hcJtR`!-IlUA?;`=>gi9d{Tb~A?^SCr$gR<97FocU=utp2Mghh7M;pdVG! z85g0PyyGgJaUr)o_ago9XB1me-`~OFV(fw5j?HJ+iK42{w40BZ+vzjyqOvO8i4VNs zsz(LaoUU=(49XHZOFz4BMlGdGnteI1<~zb~>!_0Im&I#9Jc{?fwG#FHknSC)MEU>+ za7t4`zHm39z9>3OI6+8*1mo~2t2fTmEkJ*DHkoahEjG+PWo#E;0bR?&4$zHQ&#AL$ zwc*(bZ_gbn7OiG!Gi#ykErtz)s@dth?5w*>0`haIn5M>sQD9-+P9RF~Vo}1AbP&D2 zTDX6glvI53GZ5cqtw{}Ss5G5MlbkYoghrigeeFULOW}2DomxpW>FUh*-A@9_63LB6W2mZ@NZpZE5Ak@rQ?pdDpzI4GK8sgu<$&c zifI8jLBB7p+Dx>VqI61?@Has+X&?g4MK8W z_T;%+PzBy1svwaL{QN55k6$uKL8vYUg2ihm)xJ)I zcL^W`1J%NOUKmR)QrUaQP+{xSBnB^UuT8Ya)ze4qS?tWjSi7}TN0n^Kxj)?t-BJOX zIwt@S^~%rmzUE&X3X+v#QtOg++W0h8%p$Kv@4}Va+aA=kyj8ac&DGA6^^tR1NG*<5 zzV1+2zDZ|JRl?LdB^1wl_qjzRzT27VzJ1p*AdrqmR*f*Gnxk#$Ste0~6o=|v=Q8!y zBt-9Cxr$>gAx8$@SIwY&!=)@Nvtt)Ey8E#_mZTdR7W^MjWl)a za(IY#?2tsvk6iYGj;DFFMNS{4>)~*`UIJ33gFatE^^tCL&kpwzxH&Kz3RutRAiv_^ zQp_3?Dv;GxjjN9Emj!p}f;i%@2;*Gl82F&R6qz;WA4^)4)csR#_1pCe0Yptwr^e}! zFIF0>3{?w|Z&=yN3v=jYNn)`&1L$i4N6i>scwK0b1k4PZJ1UK*bLl%U z7kSq}eiGi5^l^TN@yw~A?KO3{$A5n1%-oA+L#weBfU#C&_N{pIu0Ee1{Wtd-z7)MA z!h5KW_ebZf$Jhs_CopnGzL8v{i+}(Tf~ET9acQ$f1Gjq74g%uf;^?cnnIzGyuj6Sj zwAh=TKPdEr4pexMHllOjH(=n`ivzFWMv&ypSeSj(F*2!DlMV4t0?Qy3FO2B8yW?J* zO-+(DIhM~@bAFC)HpFYiKVAEim$IXjTau`GQxPw9UZLwy6{gLo0yKj!2#GiLewNzv znTxISvF)T`y8vP`>(T zrp;so`0<>Sgc-uYp^D4>?<+8x;JVUJxw`k)d;khZa;iG! z4zCqr*oMF@jBxp_M3b#9io!>eg0=86yi1raNeI{J;&A#D&=;h#?_07h2Uvdn9}8m6 z1(*g`>A0VSahL>mk(!kko!e_XoxWeo8uRbTcl>ci6FBEfNiY<&WEzx6E$bww&8ei_T|El4Z7-FS18_OuGop)J*iMt@Ctn-ym7@o;VmPZ zE=>}1h6P(BZ%#GED>_z5}yzVA}v4jq@yE*0ik+y9$m4p8S0;+zMxbzHZHf&Rcx%pYS z(8q44oC<|?_~#Dh<|OLpW35)9KwH&i731KR#E@1ceU772O`_&JGJ8lxCKp67rNNlA zXf_L0!CPik*N)}j78znKa}OnE5#x*_fsGOM!owyygAU8hCK=MbCXNRALLEM6%gs{X2crCmtD3fB6qS*+xc8(zSQCVEq zJyIl`72YLv3L&dH%8KVWiHY$__Cyouc>V^w>DI2T_ztt<8M_ag z9ln?yu=9|`bVA&CzGS0k{-H?xi{?b&F4_00e1Vfs-WJf1 z2V*dWKHpmn?y0>nHzy<8lAKDTPwy$A5_q*dOz0rtfoeZMsK4v=UESIvAck*@+q$%2 zS^dJS`R2;z+}iRIM`4);cOc1+DbO=?lE$BT^t=2d8Uk zoIWiLXd}=Bt%|ik8|RY(C{lc{`XXh|Jn|}e`Tc+yQG(_9R1%}&{Uf6TarHiej+MFx z+1FfLZU4H;)4pl`MLKnR=tfzG%Knsm`kFa=6sru?bwECwbw&-);IV!Iusn>YK>48} zuecy5H(RHp|7hg*OtsP&$%Y5Gib~p+vm82>>FT_}!U*|YQz95Ak zk^r(`;gT_Weu-2SuTM(K*JmqRq_|UhU%>|eXU$m};jLPcBp@Eq@RLL0X9CeE0-^qZ zq#h>BbkLVeoo_|b6bCB$N82YIqy3K1e#a)A#mlUM)FKJlzm5!sL-$DXVc0}6k&IgX zG##7B3p_Sb)b-IO>Z(MEFiB+Z(_>ldgXiej{DyB}G%Nc?MRtdO0%G(J`{zK-IVVn^ zL?-AL&mkX(*Nw}ZPlk|ZmrWfM$CK+LB(h(6G4cN}$-&&TnaU10d)!x)bdl_4^NU=} z+(7d9R-c_C6*1bh6d2p^NYJ)z$l5?FSc2wUbQPJID8O;mAg8-MYI@T>tiA`KjxalP zRNX30(YVh;m5fb0CEm zY*<-2XLH);iA(kPAmt21o7g$wl|3th<0sZG~fKKZT zKqns&9}x?ioc;i#%XSHk0#vf_gU}yg$^z^MBTDG^_`$u9k7U2ux4OGznyg9~E6fq^ zVIRAPD)Al~A}g-H1YbZv>ZoMUNm_YTc5Mf}1VYinTZMUaf0#a%;1BOt9$8&oQB08$;`A3Vfg%?4md zQBGf_s&Xt(+gm3|&ZiQp3a${{Vi`{*m}X!OIrnY>yj+(#G0fCnE?5?=!G=z@HG9bEu0K|e4#)+Oy+-m+lN zxRMc>dfk8NmQVlPGU;*_+ z&g^z$8K{0Tp_QgM;ggwxYZqpJ)^%;jhT+=9u4}i(l;PT2#cMC=7$M&HUhE5QJ3e(9 zt1Wfdm%fr4gl4*6+OGJYgX!Ixv-4U)GQ)8Eg@+`AgqE7FwdAa-+Mpo&ChdQ$YEh2x z_u|F2$7^3YN9bfi-~I}X;&+*US)0I@jL7oUs?(EsV_Ie^UN>|+^OXS5TMY#XjN%YG z_nieTGn$7GVF?zn@F)?YaMp#g13&FTGY~m@Y0!n3b!_oOVu^d6n0g}QK*TQ12Jh0S z;+1$Yk$rL&`#`RER8e_=hK|lIY5nZ!Ji8j~$Cp zgwxVtZo-6pRJ+ezm;lZ0D5IuWktZ!BS@~eX(4^P0Q&H?GfabKp4r9b3y(K~sCk-XZ zvC3hbkaQ;53?gi_G$BPF!35%*VD1iexOG^MxOZ5YB;{dI$FBCm&U&|we^gQV1^14B zIKPn7BRBtE`28ZizsgT!1AjwLdC2blX6^NAxvzD`28Hb2Q)OkvRppTmY*P1rLqGGNSAM@=F4OI& zpL_gE)tjF7kj*|U-dkX4M43W1`yf4oD;#EzaMnZix5KpEgFeC@>m1HQ7mt8pYmrgN96W8nDO-{`5X6eU9YJDiod& z(WDW;x({cPPM~Bm#(~WOj#0s_jDrFWNPN8|)R?SINmcVmPu`QLVpOq7F(gY5tqR`h z0}HOa^plVbyq$T_9r@oSn6~tK>0iX4i)C|$dMXPPG3ecJ0NsF8C(C8mRb~BqMq%+> zh+&xy0eenXW=0krOs5x@6_)Zs4#p!c6%DGiuy!f!OU(1(w00-(KoKwewGGYHwwbFqfmNSOdiYF#esY=Pt%ZSM{8ZcDVg4wzrcZ?VBmRXiND(DfE6h z;GFQe3-X0pdRNE&MlF4EwJ@13lL`~yEEB&Z*9`@>;KGq31dr*7T4gdxGYjsQ_{dP|K13or?Vl2Z9Ecb0 zr$tf-@tfeFjL9j&qp~8LkKY7H?fFB1)TAVqgTKb*#5DpSl}}C^j(*G=h(y+Kazb(G zh(o~B*z5Dx0=-1q_*sUezB ze)!SI(62{IVwW$Q@-F#+csIpVB^3a=kk9uCbK&+;Mnw(vHl@J`?qyMFupUZ-hDHT# zhR#|9gzHi!r%YJpyVU1>NFnmVSKz|jK?o5CT~~7r6$e)Vrn5BdNOF-p*7CHL+ipr< ztw1zeNQdFliBBagdjc+|>N_IN!072Bcs%sAdm(~xdbdy@71?&s^t(HxhCJhHR_dgKNXh&n;c38FpqbcF2x3w;k(|n9C^?mIHz(fxO|T$Z{e=*% zJtqv2E{abT!GJxVNX@;LoQYq0=8F9(dN;Q=%h;?~!}z_VKzlsFG&y}Dekq9#?T`@? zo=zmY3Peoc%Uqug!s0SIu2ZCv6|~z>ad`ado!*Cv8xYGLr{E(FibAABdn}3GGmnT# z#0;U|8!Q<&MO$(+N^)UDRvY5*4+x1^KB+APMy;1oLw;UC+M+(1w{6U41FSC){%LBE z8XEGAYY30M?^#l^ph$Q|JPN`)iZCF#jH#7-gQ?ZU6M2L=<4x!+DWL{!ISW-AZM*EZ zKWo>V=W3952y+FuIRoBSt|SrJQ8`7HLMthQVOoAzNNi--tQ~&)soOVj2~m)}G5Z%c zFI@kDbv)spq`Y)(hH1TGomBSwSEMQ_b123#nf?vX1OO9>5io$vbf34p@(dHoYV% zDWWgZ!IdSu)^5-_cpP$yby@IvvQYtUxLpal{OYaH|nb_vadwn^6JSUy*v z@Yj?1m&2XT)&LP|w zX{^$*U92N^3U8wCFp6XzP#)t9#rm`pJNLbE{#guXLL^zK2s_7JAjaw0j4usbp2Qo0 zs{d*ji&9_7XS0Y3w1(YBfulfP>sN(Mh(HS#CSangFyhIPjl;R=Dmu}^E5&{ zPnY35Wl%BABBk&ohpY7u%0rzxCp$gEgbNCNbqm=AKTAFK_NnP8FzYO-Ouc`W5apn< zF9NE&$aLj#b#!E#t*!Kjw||svE@6y1GOb>a(f3rm>{Q7#?tr$~7upUJpVQA!M24-F zo}m5ArMqoE ztxm{82RKQekgOCYfD4E=kruWsdLzPjgWX^*p!Zy8{gzxH@C1E}%nF&eIFR=VSqWVc zezG(f-8&=PIGHdWf5^oLO5JBOpn|wgnnoWF z!AeA?saaaM4TCcO1AUtCR~<8aef(w)Rr*O2Py&PEU!D?HmymVqS3F_y;*j@2To%TW zO;qZShMXo<#fn=u7dGqtWtU-}ymgfhS6Q%1hhUWp}tSiG`mC66S*TVhE@S3Z?b zWI7XZ{`4MbrVz_}1?br_x@Y&*dJzXMolmEiHhs@l)SXr;(yfGe6)?SfgqH3rwI^9n zU#t5Y|<^N9j{B5#HNk9o%*TbUc0dh zTxV55P*E^_Oz_Ey&x9Wt@4wKHfY;#?!d^pGi?2hACU>O8ZzD+j^vllq@3`aqU!~28 zLAVha9<2jDceem*ULPEMKwjZLK9a1YCgN~;L%gJ94={?N#fxW9}^iTJSj+d`sK$p zqfnujU2_%M59H~lDxqV>sM^D@09w1zTW?V39?IH7ABDWd}MX z^x8QaDXc4t@S)^h{6IdnIv`?Ua6xv2X~6Evd~x2dbExDg3V zPNt-ugB>a(eh-K{0r|#fh%jKP(EDv<331(0y-=ij!Qpe($p8g`1|^01TD=wy>+!4+!?1DQc`4>I6UHRS~N?_W+GS)t!kh zEjTQ^M2i)G!5FK|Yl2QJr_hrnrgTH5w(`YA!rwnW?x3DK2+id$1YHn@2+#OIxbIN~ z)WWvDzsfgS%M96s6(o*M(L=Oz2S*<)q;JyY3P?{DEdu2(yU>zl9oH=ULnwV;cvV=S zbh^DDhj9Ec)5q1##T7OA01CJf(_mC_ga={-rJ!8s)3^+R1OMbf5vFVGSJ0Skr@gn) z{)f<;6=e+9JWa>w_*_tU4S-k{$87Yug1o_;sm(|(8XGX?$d%%)hna-vuISy5J{~#h zB?TE{^)6W|JZ8}Usz1xG;%M*6HhP!h2+4>5aM)60%q|*bTcFfRg%{r&Aq-Yh<1~38 z_mO$q!JX!XixlL^#L>@1M6J@SB8T}tds+T4vFiF@^lz@xZBG(tr$Nw|0LAXKzoA;Q~R z39Tc0GY*#R;+xK&KYRs!{*R_L4YGfr^~qG5bm=;Xb!BFn^OQ7|1I#Hi*`z0WgC4yO zm1^FoCF?(Eeu*Gy5Z(};C4w(nJS^{Vrd@1+%3}KuZ8^t3xEQ=j_FEk&8PII}cU^PYQDIf^Oz7e!d6 ze#~gPd^=N;dSaHru2X+G**rdTm@8{fAd)V^WEHo`klm!PNiP%2vJB=t!kCgHnLI~l zMX~+7yA^%^g@777h(f6InB9%W9C6P?uYUu{*B36}=|jS0i6ss&u>^hq=cQJUW4O8= zNl=ZN8=soFTKOY9t_|soFziknB_%uZHXSu1$_~|qAj%G8e~`LxRobFh5Mtug@u~6r z5J4qLP0--KYI23U>E<+>-pr)l_?mV4Tx{{)IuPa%q2rHH6ySg;mh(Qu*6m-2H-3(! zYinlnbe`~;@Gwp`zNQY0w$RTvr0DlTm?OTzD!Qed~I7eZo{shUKjv3Zu9zG4$EjH6`{)so%kkN=uqT(_phw%dRU zF49416{4Gq4GY`2CRb=aZ9D0~JYO zXQjUo)3ofz5M9Kg$Tc0+*nlQpcmR;i|A%B)dQu#GQDn?qMq|FF=(`X3%H1qVd}LCY z%oG+jIER4I+f1s0OQN&FvIe;ZRmB*+HwdSz zt&GtN4ycc~n!8$T>D(4lqBAbzg?*gfKPfc?H#rI7@fOt1o468v`340XQIl*D))yEVBvdM{F>q~HB66F`jahm&FAZhJp&VIhTDf(}{yD9BsI@jo*N`ub2SItxxFPy? zhm1tj5c|`NB#Zkjk2N1moNw&HIKe~D2S{T`kTIw_tV~l$8hH9L69YWy%Bs6b26i<; zX*cdM%Sf=%?kmtPP)_Ecj*(L>W9B z5N7E)8ynY`II=uFbknl^v$yKX5S&PDuzt6pz4T%Yd_*^!wwgECyAAR!Ird)>?Xq$w z#ni;aeHc0?bpe5=#n%mPm@B!m*6Np)0{EJFKPEs>{=YMb}XQN_`xI*e9d@FfhD~ep7aZx?i=sL2$I`s&mk{um~xX zlNDJ4&~8C`!G}I645jZgsL$6&OAncCNtu8^Xmu%R>N!J%zOmy7KS+4^W16YVaRzaP zS;it-DvUlRcqoO(kv0guBZQTHnbY!CewwMnA556C(%QNZyMKhQYOa!IFyZy{9({%( z`d?vhISU9{Mf;|+P?T6vBm1p_ekmtQ?6c@yDn5}D9;LlsfD@o(+m%WPbgL~XjP;a| zrV5`q%V#=RnM>`#tNdEDyP@bq8__khWtvKk`+!^Ygz{PP+{jqL1I(;hmhjAwDt=97 zMXn>dC{)QjT$I!w&O%=RaWYU?xPd-&%c#)Ft#oA1fgylG-ZS99^6dc|HQNb}_mJSw z6~GNHSmU*IZ_`#sMw&TQ5lak9=9A^byAqY(s*Zo6`)ttq8o7c2Tn8;*y_s5 zT11Lp7K{a8W1kYm$1GkjhjrJN)<@N>Y|(CKqDYNWTc*+8Iq=ZseRFX9)3**;gCTo$##Kfu4BAqQvkwix6^#IYo zd$Ypvm-+xb+Sp)tqK!TH&*|5H*2C^9RiKCc(~n}QVqeVBIqMdznVtzUCTt-e{AJgc z!>lRUtt`q4(LqrOLhqLnleK9;?1y$0G{9Z>3$;BlSQ+I~OYW()+1{zOxs?G~Mnieh zpGHex6lQKCKhcOdB7w$cr-%6bl+pJ-i-W`20^3Rbo=$7YviWap7$vA`7; z9TU88z0bDR&l{o7;~dB}=bLx4Llqv zVJ7M^650t3{EtQTlldfI(`PsyJ@K<)jHtLcA|9<=BFX?kGh z7RRLygTK&9A)=f}3hCoHTLHg>uu)B{^ft-c+jKJj@6v;7<)3y7V(*+CJi;wZNf`wl z`WRs%Zq`b9%LaNMU_w2m2L5n#|I6$k<7V}M!dTUD2F9xOhI(TQ&fB&6HLQp*%n(xr zYaZfkq^ZC4mYCG$gd7wZ%sJ^+pd7DQ|#d~kJ$LUVD88if9!eoW6!&OjB4<#4K;LMjhl1mSvsZ6y|({)51K#^ z%Ud8%EC$u9E+VL2e`?kR`f3}EJ>p*_`W@q0@~k>t8z{Q_o#v%m;cISzI%Ddu6>K{zZG#;({v@45E%wZm6sf{I;M zFw@swqR-Oz@qaJUMQ}$M4jqZ{im_x&_|y+Wt5>ujz>-mwMyrld0{&}I8Ix@Wo55f) zmpi`y_6+uhePlyuZSb*ajVQlA!gtuDp~?D4C9aP^ zfp}$`9Jb?$C%`jL_riSHTj*FGU`YV)OEQnp~e%Gu&FK9cGMN!c&n5gJ^ zQB*X%>;xk6H|l7@K2LaP2))bbcUNWqT1yeZ-^bK$`!e0ze(Wa^w|st<4r-zSYdqJs zzfuNZ1U@)X{>qUlSE<|S^FPzOUN|*SkqQC|lt1VMI#ZIHXGkYD-a#@$p;QkX@~Kv-pFZ`p*{HiOv;?JBRFnB0|QJWs|X1SLkr zh6aT#0m^8sE%XR)gvmpBQDJFmR6&?4@e~Uf;5X1`MS*(O|EFQsKatZjwZa*xF#We* zMYZ`yh~3%WPFL365Wu9DJbt>)L4>PT*v60qV*deC0LwKOqnnHCq;#7CPp~Nf+4{~4 z^}PlJ>wJ7}ew*wnMAI%|QqfjMK7kgRX3&3kd1uwn1c0=;={%*Bd!cFT$FvcL7}e$UrbLUG5#@>_nf zo7gf|#*VU(x|3KLKU|ev-$sXHZ0L8)emTe@#KWWG9* zX)<3M$wZlNYTb>71`NBI>O9=c!n3lYBcV4g*C!ei!2~oINk&oCsbj^BfEs?hFhh7S zAuT=)2eqtnB!wd0(UEIOrS}%dsnq3G+lVmy#U&q-CrHqbIxxvnO_vtHOZa~D)PXDxoWN0n7b`Y`Z1o4t;hSt^(g!1 zNk-O%vZi;pxKU&<(1)ZDm#Xx3DYc`O6yDh*b=Gl>d4{zL5%^y`kJ)>$j)x294o8wZ zl`OGq-e?3}ZTqqO5(A`s%Sb|n%X5VLaRDHi6IsgyoGB=Uh&|D@idHm%@_;dM6EP-3 zJ3Rjgq~2W;oxOR8qD$~Hhh2p*dhtkSIbQ$A9x{(!u7}RrMz^r73u@_WRb6uFAE~n!Sox#lddsrymq*K(zT{G+5Nq!w|Ks>&dOD55Po{sN>(3y3AFcss-Ax;m!7w8!F%dDq1Io4BJu zbW}ms=g*scVn)w2gsHaulf!@B?#qeJjkU#G6CTcDC1edp!q8p{Ne+vNX0~{-=IHD^ zbFr0_yPzvEQK9oT`L%BUcmqHeWUoF6?aTzIt)Ka>>i<+0_gSHL@HYYI9({U#oQDxc2&M!EL#oBsk9o_Zh*fT;X zTu~@;wcK9hx>YzcMF{X8F$L&|By&QR%~+LzO#}3x@QCo(l`!?>gc(x@J!HR6^SIJ~x(9t*sFZ(zpaQ@X2OZ{cK3(Pp3VVK=s6Z&l ze;i1AD-Mz7RpqNll1r5P8E(Eh<-Kl-=r<)QlVClXoXvXT2gA5!FGymmz4yL`(g63! zAmQ)IcDEhdK2%e0ceBGjPBVTlJz$cR>`rO=egqvNwj zQn0D8emk3-F7OK|J|uvFUpA!W1VK3_T4x$86!7G6+L%|@qPHpSoUd*TUUn9l3EY_}Y=34}|WPtVlQzTc0&BKsFC zIKKLzW``9lltNNux{`P1BXW`KoA zg>wMHiHDdMNmPV#E}gv^@S1_?FHHf7I>j0bc9N3*EJ@4OdUHxW@=TNxCSR ztXI-UH*tGONvd%=4>PzBscu?C+GN7drJS^tm^Uk6$p|HS-Y)%_l*Z@1f*>M__UTX( z5^5om-DZ`q5~8|3>Le)0OZx~-aSrEWrEWE+FCTHQIVPXqy6n)NkLyn=Xc^a3LaG&| z+)Aj1do%PAqj!X6b4Lj5cAx@k;f629q6#LFSy8zSS?!)->7A2g_Y=&Iqhk2Z#G(*#N;F|DIQ6%vADs@Vfjt}MPjI{=+ z8j?zV1$zMz5cdhJ4yt^P$`)_I8Tiw=ojbskDCNR+5%|_h63F!4Lpih7qQ#Q*4@&@3 z?RlOK=hmiY?^UFiO8qd6*+ljszIYG|ck5UfT(|s-=t9xhzXBvSCiY*cuVlYJ)s$3A z)}a-=k-h*I3M{QS8vt&W;_4F&R90}qh2m?@J2rK`LK7;j6`!D<)HD6RQZ+X%)e7PD zbzFnRx=Udx74BixLpV_IP|;tI3HxwjOu|(cMg=Bq#KLSDH6On;a=Kn-bada zWar+zedz@TY+ItgYlD76TRKllhv_77BvhQ#V5^yZ$UK1QIHC8jpm~Z#WO;FPRdf}e z1KY_0u$Rp(cwFZTafL1Z#Y3Vq#o;c6_lSIyxLm;c+BBU6mDBYHYv)FXUvGp^atQxD{fR zEgzOJ0Hj}h)H2ledZpHf=SG5(sD|ZsK%5}}l&v3Mm2Y4m+blyik1tx*)|Eixik@lhG- zl+-t#3tudWs0hk~(ZUG*2jj>){AQz^e(xb`@mUn$J2mXU9?aipT9-%r+wAPslyCIk z@xwZ*SQA}DE`>5tZ`%9Lo~ zKd3eyhc_o3F2yrunMv;IsFNdCDF5bPC|o0OYxwdE>ZudkrM zo`pBQrow-*(w`fkf7gdh1Gu->Mx&h6mahH5LY8Lefc;zQy<(|iMTDb5NrzvOXPjJJ z4){09gF8w3n+Di{iz7tS@~CH#+krK7=w|o)>#xaIXX;ich3k2oy-`vaS{eybdfWa3 zAAd4s?^q>{>V)uM>{Zi-Y9wC?P*s+3#ZC2VYBxncK44m8;Iicn6&Rr#BabV47Pqp& zZ!Knnzjx{p0Nbl_9InBS69?Z*Z35ky{(Go~L`hnV8b5~KO34bTTuAsgJ{Uh*+3I=+ zt?V643uW)1;}Ke)N9DGwxt{r(>EmqlujM;mmyg*p=Iqhx-9-vwQCpKjW4-+1pnc#w zvyi$&gA4XgIgl1Tv}DV8<>(J5zN6fdCpUf83h(I zMi?bc(&`hGvTJKl3?q59b zoA#=m#TKnm2W6n-v9H5UH3-sUCR1v9k}?9f60|gZOJiN$ zDrF_9fu2sJ4ibBbe2gtIFV~h+uw_Edf|*kjW+_Nme(1V*K2N`irpc&D;7voiAJAl!2B7RV2a6w zNMn{M%VI4_DAp7+cI{g5*Nak$V~PrL^Rc-ACZ|b-q_YJQAiWkNtE1KV_{&;te^(8?w7@XlV+mQhtL7hU6G?vNo&&2#IgX2n71_ zAud?0_foL?V&ABSo4xo5f8iVXAaGZKC-2ag<4KY^UxUs7i!R!aBZc|`b%ClZAkUv| zoY7HBlGpoU_wyy*QBjL{vJV0x^atcCXXq=r^e#4->RdcS`HuC?va@%%e=oJ6Qu#GJ zix7iZEgKqZAx^qF!w*O6FmAm5y=8DhOg5@4dFBj;)XPoGi;YQ&k6+OkTVAuVgx;fo zPkO0#-x~_tOia{5@dhr7H}E@K@VZ>L{8l=m)ji+&0(SE0ZEXj(ojx~i$M}&`-x;lF z&5&Q+QdqyaZuv`(&5mCFe#9ZyPEnmlwfSz29n^#6H^K5$V_LcbOqHX@3BSJC`<0=K zeHJJqNJM5#ZmB6fk7V+B3Au6TxP~oj4%ztW?){&%%|1F}+=>q)#I5bkqKh^fUznWl zd7zoTutjz{pT3EC)#EiT(wa&^4akWv35SHmXFCs{_+--V@k*pF2@%*nR|E7nRE@dQ zV${O#t&sCGXy+=vqOZtlUru(qaf733bL|K3ja)Quf%l3A2ll&Bv_6-{6?*P(j?4Qz zPj<=bER}B&hD*nu$6exOWI5x0=jc}GS@N)cQ)kpBtw@Sa$fM)X^{yTq&W~m{fy4e zF3%}$G$MwH)dNL?Kd~f&-C~K>!rTy5`ZOpEzeC?PXc6h;SWP9ku^@c}3M5h3=pxzA z{7&o2>AcnSiDFOd`hYz8PPx8T_M?>!kjG@FSPRWon}Ve4O<;-0zD=u6IzdiV2Q&q* zBYVk#&E!+EF{E`)IkQXrzP8MY%OIIiXNlNJ_2g>^Wsui&ih6J#G3s)0AN( zBs;h)E*BDSRyL)a`c_Z+_=Xk5vXXqKTAq@wH7PQPxuAq8OX@B9&GC)GK$61CwvqT3 zy;O#XQfqc*iBeWVb|)TWz($ibyJj05T(i8ymT6AU1VlC`$reKv5bt;V1&$B-I^dgG zg!hm4j>e9DnAY1Vw$U=j&n+wRY3~}@_hruEax!|`#4j|w?3!+>Y7C2JZJMmCTr=W; z!lVZ6*;AJN+sS8SXYiRxIC8!pMa)CBixHqFW7AXNl|`lM8Cr4zS=LH+kb?C5i~_!F zS2H0F-kOq~pa}kO{1}L5|D==V=Jz+ z{w?%ei(4732imBNeDs6~QJA#ZEVa(YDU+GY ze??!E)I481d=p%KGjpsNX?amfjWm)3ntiUSiy0q>m<%RkI=z?r5CYW=i`t+8sZE-M z>!!r>FA6V6meK!@weJ9ovS|8G49T6R=EXcb*zghs5d{TBL68y<5k#bSgwR{)q>zN< z(v#aua_PO20!gR|JwOOuiUB1QF@O?8UkfU@Pu#=r|J&!e76|X>_kI6YUp(%1XJ&V2 zXLe_1XPlQE!x3t2xDM$lu=lGnscXi+GaOIy?dX*Cpz_I7&5VsNbH7E~_Q2IVW~O$X zS=DS%R86L8Mr<73eRfrCjz`Ly4C{WBw^p!fS6uWm_fJ^c&ymGDp_Siv$Hn6M)CCD( zVrOVwUqUKhIdRBVNAyE|#aFk4#_QGp7W@3P>a=znIh?umOWaAW zrw-W{mWLqDtfbrwbw^d_^Fif%_Lc43vNSDdrJw%_Em@HloP{SL{wKjR}hOpqsph!3q$Hv~+>n`n6(Qbz}Y}Ml88IcpD z7XH@Fj*eW;jYft3>x+tVHttxkcjT})$Dv{r}~*W|2O?Xz|{7AD`iFlwJ^X&2oo zu9ZoK)CgmQF+?8~9)q_|_|LMLANMDN$-JYwBEKE6d&nL&-RvUV&q#?Y_+wNP4_1Hb zZRWt9biZCHCejXHZQh9-_h-E0*h?Uh7eS=&lu3O`+^WAD^c|gaDU8$jM>W#N481a( zx`4u?r8{9cdr{T(Twq0Q_2#Nl|NIqp6zZaaxeiR_!Df>k2J2kz*ooZAm5mig@ zqKZEIEkY_a^!b;+N9jxEHjlLll+$Hq?4T$TV(LWeolKo1*hPPl^J1F6ewTm!ZgY2$ z{>r~@c0T85n$7=u$NJX^QnpBcgF~4`7Ex{CQFHW{BP}nX9wkn+oV_Ogt|@8%hlUl? z*X*LN*s4ORF53S<7329O^rhlt9)eZtg$KSbi_{aH=^%~J{aKV+4dp>t+6)9?b$?a~ z!a`<9dZv(WFK@i(M%^3t-lN@qLa0nTTG)J?7vTDVt-F6*>;>0jZw>vl3IR5Bv`YBl zJq_L0rVqaD3m34iP*)&zETr>!4KLMQw|i&4yZcq6;Osy%nt$WbLaTHvh|Lq{qn?*; zf&AA_ND=s|>5RBgBNXWtp=LB2c<;2C5_>u#Xd%;ZI@YpvRKw{Au%~0Kg^mDD2OD$u zD?3s^$G3McGzy^(G@ZT3>FBFtfynj=Iwo>D@K-xJ#wv6?UwOxl4tkW&hK{yl&x;K} z-u(`zA;_MF01FL4G7WE3(8pR5LGu~`;W_q(SgaA=f4Mo66EF$*-?v9U4zqBac@@#e zzFx*(Y&Txu`GMb}GgxQ5fX2IRxRvP_@b+)f2s>(7hUtBAk4CsqyO;lUAOBP%M-6cZ zH)sVOPwzzH?8<99P{cvByrBk&o^bgi9O+&Gb7OnOv-sqRoUM1{5S+$)j#PW&%X@Uz zuTHc^W&oCnr-p=zdjJXxwn5I0o$A06on{J)hU}=7-YS8%^iZ z&(W8FAb<@Q%QdJIJlr0G+!))U1&(&;6HqVI*5ctJ?!sq%oZ!?+(Ip%Y5(YN6ECB+3rcn&2vx25C%dRjz= z-n}Q-$A6;|aG{HE7aQnH)?IU5DI~t%9P#M_5Fv$f;Y*F@`DRz~8up5DxanR3A(qO?kT^4-P7Ubu6sLBV^WcxOoz=g2nd>y2gUDETg`lSMnzwT4vxB{13KR!x z1fLHy!phFDbkK7d6Bk=GL^aR3WxY9C- zh`VV?Dgyifcow2{X4E5Gjy~Uf+Zv$!m5$Zizs)h+)X6kY-bUXx&x4Glh$a;QOB7u* zhV#){w)m(*_>^t5XA8=~jY0LF6$)XH_@8o?S?a*ro{))m0BCl|yes6r-QlL73uES&dz z@nm`0;pV-2PV}0xZU?Y#GAw1`z}XxeVC}k#Rr6L>4N2dyY5=uH0c91<2>YUA6}kM@ z9Wxw=A2H3PAM&w8U6>(-}dxKpEfUdW;XGEtCP+#D0gHlW75D{$NEAuV7_I&43_? zPnZXpLJL@53^PTM!Q7J~%m&f6rgk9Zbv3oYy2>CeY^E!eL5{iGO9mMswNci08oP-F z29ZlZBm@MeSr%Vq^sRgJ?XngE>=Q+RIhP}Yrb6ODNUwIT7;>OOU^$xx0wYm#mP51A z*TDwPCcZehmgF7avMa(OyQa#r3&0l3z-%0XD;WqIrtE`P?}#!W%Qi3%6bqsIL*bL@ zAAnB)##;ts;S(@$*+p7#36LUWMqxi3RUpY+0tXl z-zPLQLs_5;Ea-k-Yd9|W6k;^Bh4i4j{@NSB()stFppWhFOyGlQ5GO(wt~c|)jO2{h z-CZ{@a?#8MoxubIOkmKsUH}v9w1VbmWE)_G6;^;%?E%}d#1D7hJag}c5c7k=s}qNV zRRMSYLvUq*WCyyXRC+W9>QLckMxu^Ww0*?)_vi4vhO?}UY>tfV`N}IT20^#TbE62V zes@|9z>XE~ar$*j%#3&(j0pO@0n)>#ytzl&iFTG z3UIeS05|aN?qX>0x0D%M#I!JJ0Q3}qo&eB`WYAoO0kk8CPUa-@cQ}0&Io4j-$xBj(FYh_&3ryP6|2ajum*;#4bjL}{`8 z8q?D#8^!}QWrW9QH%FRBM>^3wRIQk;VFor!%xKZKAMe{w)@`QO@gkyS-vZvZKvrZd zo;_yQ_Y#gge}ikB=-;+`hG{b%L0BQsf8hbseEDeykJ>kLim5%DZ3hR+PnvnpNt&q~ znntqs?EpLr0LL-%kXlY`6?-#V4q{?Jk#6@NMI>_T~0wq-nBpZx+{c?yYDAv;V~cK zNwQARw;k(?zorFv&_M?`$H5H3jDJ5adI7*PQY^JAJV4)f+)&le&YMPGP(WZ3uW={d|0gp$gVE)w8DfQS5Ri(4XHXgk?0<0m^@J$-Qlu!;Jecilqn zra`i$+|4f*o^uF++0Y_3T4#dL9x@9gePu;0ormmO(i6Q1%~ z9}2T}COkd*9k9AL9eTiCUDmXl8>?q{?f8+Kru%!%)!;9(XMK22EGx zInr%MuO4!u+hEe6Lp3ZB1|2IZSG%{*G%QbklpKKO!5m4ydfK^J6#X-Tv-4B3GvNXa zNeV_Wx7@7F@N^kh!RO^QDv}H$A#tI}h;Q*PFW#Uo-&#ZPh`q#Tdw=amtLCkA$MfZm zrkxKxX10>(NVERxAksfHQ`rk5y{lnA-fwQ5K$$w)o0GO*r;I;Phw<;%DdX>>n;{{y z&pnOMN5|TrP|TlpGvP=}{z?`}KxRZiu$uPmoRR0u1|f^2f!7lYM*0@@D3j8hc^&i)SUV- z5&!4CB;;IYk2kw;Qf%9*Vz)Kdz;9sQ41e19j&ou{Steqma)Q!Az*6C%tT*a}vbK03 z$uw-qebzct@7*m=%YZ_1gy_K=@4n*&gb25c|OuvA%w1wSOiM%&4-wdL!% z4BPjB4C^;kCIE}FM`KZhZ6f>jt=>b$5z|*72ZqcZ!)- zJJq9bPGi$IJZnZX6BFLMIOiNWPa3KdzCqfo$RTe>c$1-Id{I;#vS!%x@FO;sm7N=* zsAPW5^S1EwXYI`7^h*IbpS3ee@ecC0ON9aL_?08vZb3M3{>p|pnc-+*d zWeN-%bxtzZ;NAnD&!2Qc^L|;}{sOM~6I}BPU*SKoz`xM{(g?TMJNNguB>6n<4xm|a z{ybYRUm}F2+3XC*cgx~>v7+PC3rtQl5aKJ{EWS28EWWr-mK-$fbLiw2Q)I<;*F5!- z8HaF(mC+nga)h4XN;qRJ?fWZaqXIk-vQcdnjY59>pYUTQZ4RCtI1s*TN(aEw^-#n61*G z=wb`6{A7_OceyC}iSyZA@YzpXj@;xrm{ZkcAf&7>S)LyM5%V89Y8Qd2WY}>d#~!5}rSVEz3_k#lH7N z?(8`NXAitd{MZe1w;O%o=G#reoznI<;}P3M4w8fVgGj;@s2aeZy5l7z%7bprNW4na z8RafhW2fn-kcniHVItoBW~sg+d2$iO3YC6-P{aVRvg+$v&udx7@DZ z>fbcrjBxroE^4dud`k<9a!X5n^44fq_^Gn04mUv-o=`qc<6j5^0 z+-BG1X6E!BbX`6N=MKq6-C#{V@RyreHCpvD#F~5(HH)=Y^s+@Ya;c_eQKo|yqFm*~ zAP}C8WsUoQVsoPt?fJ0a{O1=41;aC@f%fS{%yS~m1pOOqHwUSWhFv$6wxk7w(2bzt z#b$R=a+Yrxl8c6|GoIa%cGFJ8dGi__D3Q4 zI`^;|JI~tT^6-dhq+VP?!4W_6cZiU1vw*q^E%p<%*i|lFeu8wl%C*>OersrzYO93W z*eG261s8=vf9Le{{Nf58bIZllRW9jdKr_kWEeK7WM|_qpo}(5{>$t>vaNbo9?YXI^oPk+rURmoi)EB zYRyF>80lMhaFE`bPK;1445*BO?Hs^U)zIR*ov#Vid_z+eho$DQ9_L$|Y5?mL-6P)v zcxIe`ioZ$PAfMI$IhI(dhup)8g}$E8BdxlEsgCl&<~h)lkCr25zIuRzTbc>v1wKLc8W+UXEesyxl2ZrC z6BN(6)3Qh@G|qg=GJro*z@G-rpDEx^1Lw~YHqnI38|Xp$0=Nktq9)-Wpjnp5Kp&H8 z#qR;=wgnIl`37j>lK0rZWx`51T}SQ7;(TdcIy$n6w&N2z#lk&PIo?H9yl~BoG|g)v z_GLb~-E@=B#y6Z%*fW2=iK?AX{`PMwYRSnp`xos*Xz`JpoP&5KTE4b&!BXGV%U9;E z*rGM^z;Ck7H!x&1V(NcZ$b!lcA}mUS+o-HA0QzbZeQKjqB_8^xx?D|1Sx%FST#d3j zQhI(*s8f9&?HHe{$5VMV{Z>WJ9x~SOLIz`F7Lzr|+B@4Yo2Se;pg$N%hN;=ND%44R zaltrkUoz`xNWse8(Fc*pS#oYsnAtAnHmZmtSQMr$4-j>zCg2N?KJ2sj)OcK_1BZ=mpZF`OhUk~RSDJAQcwEEwFttdZ#O&#msSC^3A=<$4 z9I79s-V@6B?9rz*!YJwrcL_P@ER=2JANX99$#bEiBXN@82DR4Er2qAVC}}jzmxcUa z(L6!D6k~!F59AFW(Z9LAlm~UeB5fWu@yZ$9(5g|A+a$HD8WaEEd~o4{DX9RSwR~1o z1$2Ss^kOrA!cr$+`^=U)JJZ1<%%4SGY#3IKPw&TGe16@$_yiYuVTSRMPcfQ+-|VJV zmQ$8hM1DcU1@9LNVSb~!-(W`2K~_;6*MUq2X+?FAGKkUkV{bh)C|O2X7uu!`&d49eB@&vE03KA)>;LFIcUg@W+L$TS?;jOlpSqw*u=xtX6LZs&bGX3 zXSm0#zv69s{r*z!F;H9q@}1<`jUi3K3ZZd>jf14)PuK+7010jWp}mOVF>k!Lye*%iadJT>`_NKxW0aT2SS{ z^=aNU;SzOLnDjPyv}K<+?)T%Ubf;;<>AL)1t~geg&@Pv&@|@_BX2kA)tYL#qBSh-W zWB8Z0Z1-LM_A7{QmcAE1ytHgrN}|z-cW_b0b%vP<6N`}*s=B-&C6UDH^?32HJUeiG zVNPyAL15m>8I$LX)-o3p5}mdHO9yl8yDnGdr{jj<1oYAq8u~hX3~S@!5xxGtIwQ$B z5|3gh(dUqAw|F@o9QxropPH>OA*DqNSbQi(Key_V%KWO!MUBF*$a&WM`lBJK{rm!b z-G!bG?A_ra9f8ZO!)O!SJ1v0P2t0S)M&K)OBT7$dSh;EF1MRIuYzGky*Eta>QXp^4 z5iOc%M)dJRh$?foo(^08IbL3s6l|mK=bx??ey#mZ`dO5o($ME-ivq2cAWP~TAP9ec z0BMW9U*yp~c75*+6K5U>EYjwZxb#q?E^%q3e_4z%)`(YXX^AO@IAnCFm5>A!fZt$W z+5qz~J-EMUC}uS--|FcmL}OoD!fs9|9uq0liDfk`fjZx0()!SzsRS3|3h~yt<+v4 z0KmUsl?n1WO*=NyCrBG(yD?l7gbBQ`5JkeRzJ{+_PXL$ zy)*bqRi5zYd4O7z7g&&4oKmSJ@p%0jml9dwgJY?0XrN9v(`#}*9?(V_qq5@;CY-Dw zpR12fsrpR2GSJ@_&lLHVCJ89z)=zC9=}FG-)0y!p-uTWZi1?_5+XzSB@c?qXXp`3- z6s+NE|Fn*-#)jt0{glqU-om#Ieh*QNj=eeEA$12sB^|S+YoBhcDLA zmH5+>u982mmOroIu?W;hRRZ-Y!9K7&4d27S0oKTO{Q&{w1Y&DPQ9L80`VhQ517lmPUI zrxhrM%LnZt589Ih#b-WfPdlJyLcPE#Uw$*56Pm`IE+F)I4dM~s7c(N9g=7cm1)1KY zp8s=lr-9r{3g~PeYzT5YKwU!wa?qJN4jNROLH{G73oLpMQ#@=t1m@-jukF4d9UdDzWJOW+Wv&p!zKPeq0T zJaizykLU3d|1Ee~TH|%e(_qn#P zo<~!@=K=PiEpKV>Xx{SZW!duRC2x6jYjwxlb?wcu*>*g7$~zuCTi@_@U1bT4-nI?T z@$wCi-mULcda!!B4pH}D0|9^vbD~#o_u6y zvzkPAs7X_g;GbwyK0l_pRbBq1;iIj`@`NLGQeRQ1qqD)ibPgh4f@qK|ukBkW=a-@N z5*$GeKhSy_9W2Xhb88$K-KJhHMPu)L;q9q>|5?v9!impVga|A^bpog?^lZ1Nx>49+ zKIkfRcKFS#>$DabEgXVTjtv%tS{?6?zgYk@`yG>29?*U@e9KaP;JK{hnAhZ*^>wl; zC9+)B)nT%(o@CY4`{K`*;a&N$N!w>=R|Y{>!~T1hg^9xFx+IqB95*qHJ#mAmcUNrN zUcF>}sCy7enXv$AG*+dN3T`VVWgBTQ*-Emake*My$|W#LzuFz*efxR&TqIA5OrN65 zCt`rhE)3gx{F-poQ=UmMDMoYDlWf>X*Kt6R3LssmeyzKJ(K{;hD|lEnuw~k;2j362 zHDS^!`DyD-uMfD~FrTh^dC79aA&O~RnYX5VI|yIo#{mX?a=Xyo!wkaAI=wx(bg0!8 zvNizX$;rhMVDkIHWj3#c3=ooNeEW|OsLqC(b-rC2^VYYu2{UW@HGp%~ZpRD7^caUS znSI_;!}Ep{9@^?ctkQfa>uJ{Oy)0_o+@a+L7ZR2BZ?cnSWj@%d%OAjC-PS^kPU(c> z%vrMzALL*;61i4@uu-jTXd%Xv-sfYPq+=$#-Mm1@kG&6dvux#7x|tUP9rR;&(i;c4KD1P|mMMLy%S*G0%|y51}S ztfw>_az6Tx-y{#vw++W+pjt`1L*P1=67^efv@9S4TQnDg_p zU2DF1zHGJi8HySwxM50<0XF53WyR?!H!h{e>_AU8y#>%Vd93tVVct>bDJ#zra?e;W zyj|C!4lIkFluymr0_NMMj1t{@bF7L2&5=7Nw(R_-gQ*8cENFzmwsW(kx4iTKplmh; zKum25?7bXJ?<&I->~@`Qhb(>^uT}6v^N%%7bclHebYQ_E^@Z`q+JYa2>6>U`UZ#O; zh^58VD_D!+6&Ld9)EeO@`Lg70w!%DcWlNB+Y-{8zTMGM_8-Nug7NO*JB0s*uc$vzcgYlPvK+&IHZ@7$!Br9v{@$ioe%alMdRtuU+B zPL!iHXFMrdZVkq}VQvnl`}EHTdG3LpW)Xcu?hCfD^aYpxAmg&<7-@o4Q0HGlF(AV> zMdJRf^!AdTHDjKnGd;71tI-x>*QnbkadV7YL^q@z(9SNon%W30R{o!20EJJ4YB5+>@{XZ)c%dZG_{ViTErHgJqr zFK?_LlNY)%;v1afg}XXB_CbMcZ;g2PgkuxPzv*OwjJNnjI(EXeM#i~8##v;=$;X-K z2fXJ%u0P0C@Lu(zKXAE>&hssDK!_<|oz0}jdT`QX3R;l<&E*aCWAb$y4>n$CK#mwX zjma=(Q&Pu{{%1-qfs&P=L;e>s(}Q5dOU;RKm)Jc7*OTzMCrD#CcoLC z@#(G%2{1UM(Hs(9{hpRSz!KmsDYvR@C|>uw^xE%1B|?$|4WkH_oh~!TXYk>B(i!sS znP40T>V;kAfEPSEMTOYG;XCrX{~??N3F?cRFK>aKy__0Zxs0}gDR!U@W^&d$P>@rg z zMjV?uvViT)=~UEFr4?wR4X9BDYLtQEGl1ep0DaSe{muz(sirJYWivn! z12JAQwQXf;y*Oa}2*4nbDihg47`=!&eZy=30ARDhbhnk{2v6xL^V~-C;E*Gbr5S(r z;E*FcIpk?N(<{;l>v?~Pe5{ln0xV0p|0Q6v1O#XtJY}y5P4k1>!U@I|2!c+Ujt}`F zXgY5|7k=YJ7h{joB^ow}72B-qHft?xm5$Xri)a_8KRGZhBBNWegQ5w2^O{pNeVkh? z>2ng)O%JQJixSkzcDfYSNT1|ZpA_FG5xzV%(&M$_&(qGBJa@D^5|>~po)#aCj@v@G zf3LJ*B#fuReFw$I_o_xP>A0Iu>VhOO4ze@v(Lk&|HSXB+X;|7QP!HL)$9q4Av}&^F zNBGv0yYC(i#-5gQ*M<8!3oWv3lzZqv3HRtd8ibV+?%iA7P$<~c1^y|<{KbsfO*3aq*lwL-! z4G;Hv*ePi1{D5OndZPgh&>9f9jjUcmtJ?_Pz80YDT!l2cLAcpEq$hM%zoE^O^dKl% zOTznSK9&R8w;rw1^_0%)N3{8t;0Ux4_OLIJ`ZWkQ+DeBXo-3jK^%ic$=dW}7$|IpE zr~ZEe&CEuC$IUJfLAzSGYZ^Rl4x1-ytqz-69#)6V7}-)krY%=K_zdlX&rmBY&(Yz;&dKx=*;%8`d-G$^pBP4$7j%w z&!B&{On=0Grr*xN^U3uG9XzwzT9IH%prFUd?hJh9lO(${X0?6fBv}Vy=kOV>d&uE4 ztd@IaFdnm4=KnMtI|tAwwGTOfKJntHFe)>u|HYu}JT_P&dxy}7cUm}vMz-a5cCTSp zhtTL@d9+!StJw`}JlUm?90_nBDT#THp$>njUcxQrj!*XBqv%MtW#;Y?C`{!lBy zlfcEhg0;QFgU)2>F<Zyh{q0Z&$Pg^R&+DTDHmB2OzbyMO#Ml0H7yyR!6g)sC@vE zENWZ$p3GFe*ODrbVsSg$Nm{9ob%3qAqRhZLG{0>>Q{wd&Ua99~Ir}JBikO94>%W6# zma>2bDQlEZdiIu<0SzDEkf_*NP83v9n73giIq4z24x2JJH9(HiEViDnsPK-bz8R-T z*iRn>#0@VBD1LVBWi!2;@Uog-P87&My*SWND^M#avn8e!1lepc%^v^M7IQG+^Olhz zg5+hf#ncyAZ87KNk=4s1i+Grb7N!{=d2STr%rDPcjWYxqq@CgC*oRC;v`+)=!>vX- zPbpL0#=>w1RzVSB4g1ls0&Y|oCYvkp8AgTSvK<8G3Y-0?C18sU>37K%ZfR7q z!PQAWD+G_V5j@6|6Fgcr+~6|^9xD^vtrZKC8GLJ2CL0nD3z8u^>#PhJ&5;a|O)dBg zBtv8)4x$JwCMT;$3U)CYo(IfMtza2u!!k_9GEByT&%iQF#)60giwz2%Cu?+`9h!&j zP&Q0$t>f{uji-w|o;KWcG^C3>9#3vM!XPY0DGys9pY)=6K&>05Y^c=Dos{(%>|sSU zxE)6|_%V(OpE0Px9vsyhwJoev@7D6edFz<0ma6|NCbLQbE{oafa23U~x$2UXBKl*I z)5T`I9P6eTW142OOUc%)!%SrfBvuQh!o3?+Ft!fY9l}GVy(f;+o zTdanpUpHK7(exp})(i#v<(cGF^DvQg*k1$rOtOK0 z542sCZHX@SBjx9?<|LjHy3744yB4$)0$~tii@R)0zFR!)BGi9p7bx1rl+2@BwU(q1 z=5|-E30<}hMX^c92Xm*ZEI zm!Dl!5}nI;gLiHjAxwgoD-C z3fe5z)_+QyuW}o2z-n+C@UR-Xuwh~i0_d`IxG!N{-a>4RLoe1zrnJO?lZwDR(2CC> z7N0@wQ3sG~KZ9m?{0?S?c)Gv}ah9hABD4@7;-<%M_nsUgJ_F(`Pd)**+dUl#URPp7 zr*7oDPP@sWr44kt9L(BW067#8w^}w39l`Ta%dfx=-lW6okV8XyThlxN&n@I#(N1Dl zlrZ*a?#r`Ac)6O|No!=+w&H&g{A2j<0+_+7%&>cwnY|tspNDY3pm>($SpP<@9?WKS zS!la;ETSEFbW*P(p-)Fea3hMW12fpgtyz0i%C>dEqoXXsVOHef4GGlFnPo7wKdT^=vQogfZTMb*G4w*YXDfvMK+N0|!%Qi#=<~-*E-y zC;%o!Ht2tPS&$seP_8c!GZ7ft>_BH}1W8wfM1*t-&fgBbI)iQ*(h++|ToSu|P;-Tk z^sySD&j*TN!ARS&r&<|lwJdGiKG>FZB^rH7FK2BROqSp+;ct)HexhHt-+GIj(E0?d z^>Y_YUbM4b^v!`($tuXr3(Q}mm4rd z^ZtH^Cr+@mkrO*Rt!s{kpx<$SgZ$I;ij&{Uox%n>i3dZCX5-D=j`HMkNEe#Od?}2! z$o36X4L;|Y8Dx%>p6k0N#eA2Ko zp82@YD4!N~NXz+<-3}EcRnXlv^NNI3hnn%`r5j%3&S&YCAc^Ib+*h)@#_5n0k5R8< zPgs88@gU%>YZWW5fV&c}d{HA*+yrac#CDFqc1@tapF23sH^(0WzAz_0Fn_fy;DsUV zO*7jiw)D*?Kv!Nqf9=!DfSG7^(Fk4k|IXP~cpcz4iw${af#O9PtGh%GTvFo1N)x}{ z30HDGJauf_d~iX}o%2in0rnWuH#OmI+HyTGXw z$+@|Js!l^aeqbq_hp9P#8h<05EB;g8xw;PEVb9uEYM(^vGOf3}~jHaUYU zvl`*a=604TqH1v650}ixn2Bb_rfo`{=tPCNSuZ5}L&OT+eJf4D-aqjm5wwt5oVl}g zN&`IX;f}Q-mt+xak|Rkf=_+#|$!NA55iDG;+uDKC@5s%p%w9=5c2l77ClXsr>6Plioyw-8G)eZ(o7o>6m%*i3(q&;`Mhih^w8$~b zZjzgFE)u-VEro?<-Z7ea#}v*bxg&IyQ@rAMv=|C3Za_Fo<(n&vDH6^M4|xJ3vgLPc z;U&v?9YF30JclYGW>?^O9lyz*)A5_^!!{1?X8hC&{g%b#^CRzbi+3?v@4jX$kC`Z? zlj2WAoQuVwcwT4qD1Id)J})DVu`{k9wkM?M0FMBP-%mU68+niYh}vk@(Kb@GD80kO za{BR%P!ERo$R}z0&d(`?%iSSuE!|HPSh}}vxpJj`h~PswoqsWBrEEEpu9CiQ7lk*P zu*B-D|~q6nv+>ISkL5qcQ~ahlF1cUaryu3 z8z^{mas{iqnr6G+cSRK_^S`J&_vtamRGR0MN~jA5@*Ia-tuwTd-D4ktZUQk@N!MDg;8%);41g=#J{`w?+ zGRpK(_1F7JwmLB?AzJG{s5f(T7gn$Z?KyUU1NOT`bQErM%M=3G{Pvs-_Km^zOD(6& z`UCHiQbMkxZiA zX*6yo>FTQa86nzOm)R@4K1Mb31Z8B)B#(L;j);s==ca5Ft{;{@5JQsIl%rU4b!Dd~f$9WTyY@sKoE{&Y@&wrEl^&vQ!_n266Y(^WQc0o@QK@H=C$e=mG*z zs&EgdrqRl@Z;X`YBl@7%zipkss7CUJ@?7&Qw+Q*`<6s4JaVXvjClV6ZW~ zo+MGvXx_ieC7Gn?ljD=(6O&NNHqXzX)1pu;dt)B!Ho)CgGK?$qEuvBpY6uk4_`+`s z-436mV~c5D{^#M(@rSTnYW@m^N16IuughN@fh&Qw8FzD#cF2GMtgZX!?ddBAD>qf0 z8qeBxnbv>YqDhtX6|Ine{r=am9dy*3)Qg=o($mP*^osJ?wfX!1gz|HaSN=m-{&(}& z9PiVY2A;FWJCNf&*e9oL zwCN-*GdWpJ=i;9^Y>snML}nn0&&Ii=8j?~-3P}&lh%&0#Tsn^!coXLWUV1f=q=kZx z5E8O39H~;~u(@b4*Ez}I^R?O4ViEN^{xCE4!g7OuhoVNrM8}3Su-|sDI z^mD8Ge#_l{RgKpKR)-RRONs)E%5qVnGIYaiZANB(;n6hpdRF0V(8ZxlF7b^bWVYHD zWpqRGwaFxv#AnB4B;AQmk565{<5YFB@o*VANs7YHGuM2za6hhy4^+K*dCqFzCBd_` zsQQVzpX%YEBj%wrCtb*##2h-=xqMyDIx?P&Ss6NahFXZ|&8YY?7qRTUDo4X4(dXz} z)bY<_P1s0@k=h_Kzwb+AHJOw&qk3&lcy=gJM@2_PW8?f@>A1Uu-r3@`TS|a%j5cCr zP^CL1{lK}MKavyVvR}pI?3JNwbBWrBgZ057U!yAJ3m^8I2r0nUiXo}tDcUTOlpCL_ zrcHnKa*j)khjb=s;VGd=JR1=Z6RKuq_w~+0F?~j)x8Z5;5{v$tkmuNN&Zz-u+L@ZR zTX8xwAt^yi(u4G&hK2Ec3&}5Px?F{+N{vfRIk2Zii2ri^x_r zU7;dHJ+YChH^hHQwjiKDg;f6H z`j0~+A|p|ZlElQvriRmYDTE2gqM#xR!q)ivP8#>=dSn=i!bXctjiIiDu(oQ3k$^2@ zLqn&}^7aZ_73pszL2B1R-!%F!$%neS-9ULC>MHC$Tp)&~gr;gUNK#fplA7LVe&0DE zIWZl@gVVy3LrDM$T@f9oW@qj{?Hsl?2m#9Kjm@3J1?8*CD>f8w*|@rV{*tAu7HFxL z>5eG)FWj)QVpD19rY*}#mMmJad=ann-MiY+K(9L)*qdUwF(v^^x9KW%Aos}DzFF9P ziSfyWqi(+R(x|t5mEyW`ZFF;ecNErsu)1!Z>K_zpZ&})%7iy1brBFMHv#(vN)JDG~ z*G4ZH&jq?;-RE>nF*HqQQF>qVd4wqSL#lM2isr5rj!66!R8E(d;MX5S!e+xh^BNrz zu3lVTsiH$N>uGm#jikna6Lo}y#jXwO{K3mi8?J7CQ}t8CkxwcYX(9$_YYILVAsE*UX}Cc7KgBxkcaHqK_B3)or7%oufm)HM^*yUDes+fZLiXK6*2m;nV^n-hTr9XtNA!OB$nY8SW}>$9NHToal;X7V z_&nsz+Vs6D#gIk}*c&0JIm0>Fi>;>DDC z_1%>ysGC`lwCl>KJ1=>58OyZpOf#HkwjsR2aEd-fuRg=t4R>kzjSKNMEQwjRFwKY8 z)7HBTn>1&#`!>CptHF5ka0tK!7Y8_$Kc?O=_`Oyp^(}MCr2(QLG!?pAO`Vv^g$x@X zHBFD)Tbm4<;&ziNQmwB-rc2di!=yxVMvZJ?6)O5Ho8+7jmAW>asM8JU>F#tQZG%Cl zV9@&1Fg|G3h7P1{FzlA!b#ftR4jcFKRA1v_7U38+o6J#CeQnjB)$2c0vrcS+YF$x4 zA$y7r)n&&d`=NyJVDD^~jif?f9I0mWn3F0@AF78OvQj;eIk_m`scBExYzETKTow#; zX4!P2Na&M>dMdIp&Qyg55FfIEpSonP5qA%B!J{bROCu~ZB1N5Z%0)h+8j=x{8j-Dw zjXKH3szF$!5oS0~gOk9U5^rVgsyiA;v(haFe5yrKss=J*&<($4X`S8s+%+1_bU%fY(gSnZ* z)1aM=&0C7OiiioF&R&61by<0R@wX^3ot}|XuZ=5rnI1cxOy^nCh@3U;0!fI@nUu0D zk?XcVU4YL(sA-p|oXDJ+w1eKLM}c&8#a=_DHon|t8gi9RlUD(lC}Bk+9Z$xyfqg;T z8u1KS+}ZgoZ7Uw81CQ~+J9}V43(gh}bN&TWIf0CmTPCj&Pm?8`on1ZVRPE*|!=5Q> zDsL)RN>2;dD(QOhD)tiXl19oSiSwtr6jYNQeWrXkeOAR@(IHV&CX%XV#%_&_T}76v zSFk1CsK(h{<6BwoCL|PZG7#doE>$?D*v9jMeU@-Yy|XmE(Sb z?v)Enm!QD(c3XkzSt{Y$tkEMs)V8cGeVkmRGxnA5Lm@{;s=4G8M|X<4G<}-(2baz4 zL)x`7eVu$sP8Zc~-nB6f)e6*Qs;p>Zq&AX7L`FnT_8RQnhcFFc4i^_wH#PmyC6gq^ zrR2vaLWL%%lcVATw9Hg4J?WA{lJ!XmX^DyJab`%0iVrfVP0za^?E2nS`Xr&xki)&R zku)}ESC)IcYF$Z}^Xy5&Iuh2S+X6O1-P>iEK4u{y>d>^QOtx4IZlC~dD%SA@w{3T_}aSk8}iTW%xvU`4@+L22RXv;Ab| zZcnIzK70Px3@(v-X0T2t2~AP+N08;xlq&jCRam(j!GEifd~wxA;Tqit^?@R=eOM3r ziI!?{yZtkXb~L2xQ`eD@Si?Gfu#P?XdUvKG>UWvvAzDcbY44Lipu{ve^-9s;jM-#O zd}Lg7Qe+ZQCnXtk+-amLMW3P5hLM#7sN68XV7lVoPOOF=4fEd6>-d`N^}FwQxPi`Q%{FD~4qCHrQT&ow}3 zPh33UHFv@evrnW!&cePUfm?}T4QT7_?eZl_kIjovoBUK{bV%Q+vFS+#8KhiYxeE9H zr7PDga+i)EC2(+ROs1AjcS*@2*_j~*NSpC4A#39T+=V^{w%QMf8 z){gQTGT8m>4Dl<~4#WP3e{w!!85`oNy=jqrWl-ISUp7HF#U zVj=3o`4s61uxU+CzW8dq+JBj*I-sKIr!9=Vh{?YDHGgmEKNG6cT zb0L@2^ga55-+J~MYwH}oKk^h)?Vy^CN4_AZ$)zDR^YbQ^OiB3=qp9)Ea;Yb|a%$&I zDj`_EFgOBS6zBrGZG%WNRol`?opz@SEWCY(GT~jHm)u>A(i24}XHTCmLMr>Ytk7xdPSa+wsAQFpE~y2hP8?vigXB$GIOx{jczJbTh*;`G>_?(_!RD(d1w z)6(KH(zLW*^&MLttj^mG=kTH@ zM$(xjyh`H?amdLYOaenOnC2xYg!5)Gox=ZxRUy9FPYlT>>00_PcF)BSW(dP^fW)O5 zARg{fq)1FiG$uwS2Dr0Im(U=hbC(`x|0RaTg@CTiG(>qpH5t{kj*}N-N8S}q9DRMQ+FK3%)jj{k-~M08S)ahf*#>k&`aD3dL33!99WB_ z`=@!fSNZAPon2C{SnQIJZ_GF5rEE$_I7`&$T;{Wv*h{Yy)>X}3rtV1B(3cLP&ruiJ z0V1g@d+F!-^dfGq~Dh*ihDm z^jnC4#J$-|b)xs@i!b}BtGJnuun}A4 z(g_fRbJ-*|e6e0{BdvK%6|$Pgi~hO#Y;AL}%LkuM>mSqu1a81WJacMC{+vAa+^J>s zeGrw^V_*L-KULokbP4p;`@6etu9E(SO)sFzsSKrlo?CL3ImFFV8(oZn#=wNNP>%_@ z#%v=DrY@xXbF8SgQf;Vm>Ek&c2`&NWN-`<>&O!gSR1jdNJ2Y!v~T0^hu?%}*;hRvbZwYZ8(f zk`k(xj+z>sqezq?ntVv6_)cA_E?s6I&O1tXWgkNF>#f^Jd01&^cyUB2*+h=#@7}Om zy}ZnM;bfnQ`hjW_CoViWJSD`S)e2> zCx+(eQ?y2y`H~=E)Y5SmB;M2~@`G<$mLV-8FgDoT)a;_eodlrT)zRnb9O>#>r_;2D z*co<@vCSV>ITp|kVs_y1+o#z1RN`Dh;?j&c2#jk^zfPP}^9^|^QE4mP-*NFKBsg@w znsqnba*pzk42bquhwYqBk@b{@Tt>)?;Ay0XMG~D5lMs^}Mf>+R=$DN7lsU#mMu!?B zVB8=vguaLZsV}fs46&fr;F1_0XLKjoOZN?@PpAc5UUFVaUJ4bC<1&}37CaMT68UL> zeKKE9*3fw|xN7Ne%Ui?ds0kag)n6ML6CH_hK|1ZlYi2@T%ibeIK-Fi51qe2|zM5Ai!ln(iEDj59(~ChCzD5tl0IGw0~k zsBFk#5)lDyf!k+Xq?!&ejdPBRjEf;KBXA!8l73i%t)af>T zo<8;FAM6SDv1IzH#dFnEqr%xgAun4^U!(tm+U>$z33YcCd~q{yrd>>MfqsT2dShx< zY4z^nJxH9hEueH>$^7Kc$VX&))RYy--Y(;gUcYYu5eX@ntNQ@9E44ktri3Iy7lXgaYfp{Myj%-64$Pdit*99 zQgJWsvd0M*M_1NN0KeF>g9xdtU9554{Lo@`=$%0zW)BCXTP@wjd;KN z$O~U(oTG8Kq^e^gXD_b0(X+2j~jP5J$FwH`IXQGEQYG45fX)>kSfB4st3%U z7^z)W%ofp^7cX_oM!6BOZrsU}&ZtBB;gQKClIT|jPb2S<{?*ghtA*Wb*ThDnS`>+j zB7SPJ^6h!#Gw`89)vfLHk1EGqRrD49FN#gTQAh zz`~E;{)uX++bcJjgZpYSJ$m|rMaA>8rV+JAr=Co3-_!orGhh6otsv_oD}q)mTJ8nG zJUaig&0b4_Rzt4 zh5N`ca@=>%qUwsQ;-m_q{`c9BI=}ky&@S*UtxjB$rB*0*V`KKv;)=TZaXY`LpzHSv zf3Pu}aMy3%{P%a)zNG?n>(lg>_N|`puy!Er?vc+gHXi=q^={(^zc=yyt@K$fDo!!> zofv40&es;WWTcWjcY%_)VEQqkzG{IH+988J2ERm3hylmG)h=seBZGO9W$FD0>kiSF z8~h4P=d>;TPGs$_^=$g_>QD3u>e}aa*RBJ*FtxU#{SSvPG}d03&z@xtqel(yJ^uX9 z+W*Jedw@q(Zfn4VFtfK&qcToBdOUj+?1G}ApdthT1*F%|36KPmklyRG%w*=9$+YyI z%uG5VAqfc(LMYN(1Zh&lBVYyfM8Clu&;8#$37{VD{qOys{|QfknO(lRzO~-K0&p|HEpaU!kB{$Vtz3`d`v`Yi1!=PPUr9H|Y7GbjXfGHpPV9S|Khq zS@N*b)9DyJo%OxGQwu}8z+x&b%P^+M;>arPOL{^d$~#2LpmSRigzhD4wJ9ZE2;)A2oXYg5aJunNI67{)JWD!lzHjeEt$=kTUg02(X>GXRX7hg7-W5_ERi9hWb})^Y>IqM1ewELfm`XE zJ-Oo=8^azQt$Yx_`zx=*x9 z{e1o`6Ddcw-eNkH>Xhj@xebl21v>?7FkBa=4R~v|7z7)6CqzVelY13{)yC* zv=V+0eLle%YlnwBzrfbUS?a|6tC^wz5@7H%vfrZ)9{HS#xBYBp{g{P%%|tFUl>S{@ zX*E{Ms3)Moj352TICkboBT-gh>GZD;0eF8Pw(R!e_@UqJq`I!84#^K~F11vVr_1Kx zvM?A)KL6lXYqo`jCXZY~USeOlI{WI)GoN1LNbf6c8{mlkiZVx94g<{Y)SO<`lAfEK zn+z{QVl0Ux;nj*fMSW!Pdh8Qk#CuIn<#14EsmznxHKxN8fU|BozWDzPdn~o~;9pFiyOg?9|tV)rOnaSZ2VYNuslPC!tj?5#9htAl{Hh zR2+W}Z+O%F2ByMJ?<4Fj!dLgPB5j6RB`6L;F>Xt&yGHF_N-obvL3Qfa z7bgygH-vJbOr5dLP){0w^)|qySHTmZvLO2&z063u_L=t?YS8jWvc@cs@yi$zt4m#@ zXO*GiI%ECs?ou+V4RAyO2zZSlHt4E0xIcN%{k#WGfXk_5vTi=Sk6!ejb^Jf8P(dQc zdX}(7ZWI+zs2}kY))2}zoyk)N$|86_#``H!Nk6Nglj<&2x~oVWr$IfEn|4<_LgZq^ zU=*YU=-C*?VRRTA#7-P~yN<170(AwovP`=98!;VX`f$079}gdtLK7xjYR9Fxkl6JO z;nHA(#zMBRz(MGsVGVC^Z_r6pU&reyE0+8sAfz#(!-cxJve>s6A(59Ov(B}>u0_zm zgtY&!{6tTGDG6%ukyLjYJAucrlAun^ngGe9GQK3uqHkhdCf5#zpD=}wdx7vlEDAL| zNxer?mV;MdHyu&1kHB0f@Lh3?B;1f})Re#>Frh9{{63LeaX)eWmq-%NF92GbO+=rN zBDE!$?K>?tHWryr!wn#my;?$+_W!uY?`)oWt*p<^tVz!p&!vjNoO=^k{Bk;7Fs}}< z`~rr&t)3P$4Q)wTvz%u~kvYV_ExAEarl>T;R<*}lHYPx> zn#(E0^uKGsS=Th9!mHL5PI8T3UwEm4fNpeHrA)T?Lz~>oyDZ z2LxT3Xjn;kiYcX-lXMrE@_|DBz|;jbhVM!hY~S)JWph4OUWMhWd~?pcDRIJ{bd9|8 zxbn-ckb||FDulbbm!t+quV2gJQIhgpeQ83nCN&w)hc*xWU3l~Q zmJRkL1$f#MCgT1#@V*Q%mRW8$JyLYD6eImI0yTWjr&l?_>-@d$f& zhyE7-1UG>!UlzNZCGqz_xTWLW<<0r zR~0DZ+n6AgVgZ*SM$PC-nov=7y}fd1`+2|XbZiezJ}%+gzm>l8b>va*Ch6E&(XjLV zBZ_Lm-nf1Z39cS}`(Mw>J|JsWMQ&WbbG{wkMF|};nmZ-XJCWwtYrlw~#k-%_N+iHE;f8Zu#2AoibWQACQvv ztd8Q=^3JB}X2Nbexf+=F`|pJXfLpYoBBmpuIirOfV7+heaF?Q&?-{5>@7s(x(k)&R z-xidG0ilIH1%9l+X{q$0@93p=e7Y}JT95zuG+<``2iLG`AHi)$c1w+Or61D1Dfm`HA$BZ|3&QtYSk7 zo-PZ{W0Bu{RpF`*a+n<7b@(%~9V%T?B|p=kGAi<55*Y>a8P*PgcWXA2OA69q<*QSJ z7bKOg=o$ejLEaldAQ1;&TChE-K~4w9*cGsy2JMJ%Tuf$?NsFhv4Txr1(aLQLcN-67 zI`r8YVCZDP*3qZ3l0CkzUHnS4n7CVP_+5+l4z&1FcZ=usxA-Y{i+9WKw745B-bT)) zY<+FhI>KuE$Yn)N7qLMSD;Bo zo3(xVaegTO==5nTW!}QAw;WWx$@7+QuNC(K2FWv+ZCkcCpTq?0NGK1?jn^c?Cu+5r z@>-Xie17DDkCubVVx_Y^enCl-Uv9rBCCT~nqBK-_DUcT^*y3W5GGCGZ&QFPz=_Ls$ z0u!Ht+7|#Nm)PlSJDq8#pV>XA%1*28p7LxdiO<*NX|IQ$)tmwx(3I6=p{I7yB5K%6 zzcf`@wh6STRHNL)j;Yi+>sMsf}f6Jv$T!*xbR@!1^5(CD^A3*eUI{;QrFY8 z256mXO!BBD&#_SNw%2a+xLhbLP7zgAw|DM8e=SGJuVfq4iQ3p2;Fc@C` zlJ5BF4-5to|FqaYP4f;1t+36o)wma>T6XEXpa?0Lx8-IC_^0S(?5=!b$Ypa=1&-q6 z)AS`V{h;UPvodlZVDH>b3+tAe=VRjj32)U^v&vt__wk|u&H+eIbcmEXnC3jv%^elf z*K)^9emBvVAA#lVc9sR!xmMTF6fh}UsG?(~J*Lj(A$ZFqeckkPdR*!zaAaobjgUzt zMAwj4U*6qy@G^W=*PcbK#E0K}8uA!!mIl1>C z^{2BuslUrEElCm;Q4mgr(6?-gB+2sB>6-LH13CM!))m}uyu_Vu)c~x=y-D0uB+$2r9@51T_u09Fe3>Wjwyw6f#Cp%ec!0M1ky^@PnJ z3b|enVCoqF-id%Eg_mFIl4cq;77pY*qb(blgsjnmF#!iU!wiSbLm3T;kla|ZIz#-| zyyvD3umGox(I>OLYot13hLr;luoVjGNUPau7Ho8-1vf6IBjoJ~L``(Kmn2gW|Jcbn z>|5`>#CsA93~^e;T?sZPp| zfC+v-Fi`1L`XC(!*UyLip!4HeC$!8dgy<|Us^3OVlFrGyNCn9U&y6%}Fv*SVYU{G> z6_EbX^T>L#vM!}(G4CBV`YD0gc1;JRdQf#QT$GK4eTJ@AE8KK~_5>HV1U7 zj*#PmE}33&d(@_(Z9Pd2>VH`BRhv-$?~%`+|LL2ISXTDB`!3yrd=}lig z(^#8x{O#5$(2z>N7Ygky6Pi`ZrDsuSd-;5Om?o%wXgZg;T&gdS?)Out4iT=Ud| zIk~K5#mBWjme3bC*F*FIU(Bpc(TwL5!gg+RZ|83Jc1~s{l5}_F-3DfJk>wOuSSR+= zE!O*yWn_h6ISh~-=6r!>tqgvz3slau6qwI*x&LY8oz2ZyC;M$YGgq}Dh)aSMk#6m> z0C6i}8zL(=hD67Jl=l?cOFyint19T|3J>MwTA>i0r;>j}x~_grN86D$%5YGv*7E;g zNCqfe8SLs*%^Xe>BmNIdMa9e^3aenLY-0?~Ikp2hBmZo4n3#P9u=zwm(G{FxH6B~0pD}9x$?`7s4)#{BLoV_-crQBFrNF1y)zA!E=Otl^mHcxQBizIhJ3I7yS+He||LPYNi zW)-R%Z(=qVy2rThWN3l-v;2%eSrUjfs8SDhgz%2VLX~6>`c$>6@{(th&!4zsZyJ(n zP_>P?nyBWtA)+iK^8mY;nDb2VOoY1*i)`iD<+cj8?+l|4NXwWA)Gd@UaGaAozY_kL zyUlr4xcjwD2zw2I|t_C<-*iK>K*1eV|E+9yI0En_02_=M1W@mZ3e zVF+UTjLiGf^nsjrNq>`t=quqE`G*j7<+;fR5D|TiR0o#vulK()jfi-OCL5MJV2qG!>MAfh@tVCSLE`n8+VzhBBGCk32FJm>gjtm z)+F9nrU{UNU%gR}7?3M+3LV_+26QT2QUtaPs-}I1&N)15eQJ%hhFVewfTd0V9;O=Q z4P-lPvJUz>EG0{-RcVFP1yQY#g~-1yvJ_ei@o)Yrvq6_Ui%X3;F9x`iy)KV`2xF8? zXUd6armz$LxTJZBI0|v@@+f{VpO+ev7M#|QTF(!rdFc*fjl#4%7HbywWGO%eu`py= zTIh6mNuJ`TRnQkIT~AdYMR;!yEkZrtDpcr=y~Wg%c2bql9BK5++JM1Gr6ZI?G)iFw zhiVyaNXo1=2_f;n07-2E_u!qVD61#RDRUJ|QkA75Z`=^-9ZmFaJ3- z1luGLi{#^2$ArCxQs7tLf0jfKW7g#BC$q^)9QDKFTF;WW0dgpg3UJP%cc7m z&<^VI;%>iHClo=BtwIR3U0N1RANffn>G%Z)=KWgOXmsjD`qr0)YD)&fE#OenWoQfu z`iwN*`}sHd!LUxAE1OOqVCmrN@%Crl62%?Vc99*V({!QnWXt)>2fij$LXPmFviEWV zGu0-=A81hN3;}^tahFS*p1$J!H68O?N5c(7z;y&!A%r~FG_7_yj*yi86*V9Sf9m3vLVwy}+C5xaGj%umv? z=9p$+a1?ZOpM;QD6A^vfFZ%#=@nIlA)Zb`y3OvFk*Ar?ay_#BSS=6Zm(3+17q+uQtuFFwno5N=@hiYk8`R94>KL8smkZS*7^^8No^ zx*ZbPD%{fTw}k?H0>*nrV06F|R1k|Z$K+I4op~UygTu9PLrhf2yn}%!&VRlOZl~KE z)0Sht%w^a8uY_D7#L%CGoV)Q4F75QPsmTY$>-1d@tbPPR@rAg z4E_72(PLj(IDVomi6onn&A=1}6dU7O{eKr?eQOx9 zk*tjmOG`{ur-1XAl$~zLHQBRC4Qo#*j*Ur3j#%HY@*ohyx|$MaO`ERCP{+ESW!ehO z`#Dp^|3au)#V4}lP&$4u)Xr4)#<$k(E#F<~daTYZ+02)}yOMEP!gp8k!=$0ipS1+= z=;o(eHp>dGG8f7gEuS5@Xc?v=(UMr$WHC8GS8&7?$HgQjMy}hm^2p`OyDxFXu5zT7 z1$9j|z!Uj0j?Mv*DB;WhTdDY4x0`wkvA9iFq9+U4zFACQnq~p`l@tUJh(+8Cxrj>M zcDm;*-2*O?#hy0a?&6teazu>;*_kSN2Twze|QXVW7WQo3EO=& zeDdVc5sPGtN#rg+kUFl&GYycKpf+TxpsKHCND_$AdKULmh94m|MCIs8340L4?5w<; zB%+edq_2qoX+E)yoF(7+emM%(v59|hI(z~l2gt|3pXl1eT~_)?@xiZANT^Vqr9s>! zNz!K|jo{zP_0zD!<-F%K0eVK6);rl|h4eJ^+8IxJ&7DN)4_RjKUN&&(0L*vi&+~lL z`_AR=tcBhO22wQ*5Zdl$Oj6Uc?x!NS^G>R-V1eD{FRyKL_&4dI=qemfPFG0_eLUU+&Kl$@x;=dek~j#TRy26HDu$f#@+NT}9}DKG z%BHqxpm_3rBAYrdEgK3?7Kn$$tVO_$GM9gYPKz(6OKA)c3>!W*X$Y$%>FFwPVZ!+( zq702nC0MItVXbCqtZ*jzA|Ar3%0*D0z^D{{=}o$LPC&K zA#6QO_*Xg-i^b469s@|mZJ89J=53Nu4zszW7)04}otA%_{v95b{tJHr;_-_sUz${u z0Db^GBsi(Yvx&ueXmDfr$F&B~SDx#qIuS2I8MuTGB)=EF=|hYZfdx{#)gN5~K4<`n|Y=>CXr z`5`ShqA#};oC3ys!}pU{5M#0?EjVV}e8U71$oBoptkEc^a&#C)K>h;^K!|d4MaiY^ z#_gn$IO6gX%fn28WIgK}KlL3hSe&BI%z%>#(Rl@2J;GnM>Kj%8imC}TFCi<*YF%*J zdPQW&;u(aUI&*(F$ZvOCP@-DY!b6>L;`TpX{{O@O6jvIV5Sbe6_CIx!_VU^-js|lb z*~-!@OeeX?|DAh-te%k^!0KBGSl~Cm$3(7%bLMm%Jyq$^dWEhPst(*jn9v#iJ-}Fy z!|&4>6q%mh<3-$3;vXEkcmenfgUCeo3eq5~74wBN=lc84+D{9(x*@di(7v{|gLC-; z_k8(ZUL5Dzd08Gmw6KaU*-8DXBw>a0W2w@jv~X}CSxkkN8Yl3TlcaLAHO?_PI|j#; zQk$tx2j7mb6={@e1u!Rh9;v}B7_$m0S^6|h6P5Ut_}L;^Noo1?obe|Jd)9HIJv$>? zP=7Nrv?&>3k#Qm90}}g%x;*L=7@7Gb+mvO^W06~;Rs-m2 zBCFP_wPCq~2#`Yxl5Hvz%kQDDiPR>QSt($Mi1)>wx@3&Qt#F(wEh;laZx??=l%HB0 zpAQs3xK*EnHDqD=6ZDcO!zx5lrjd+vH=WUatE*Tluv~M1FCXtq7v-c_l(>=P3T(-2 zjU~RG<-1)WqKs@+emdTYC}Vm5TW?3idn5q;#z4m909xMt8!NIUTaycf7bED6*fMQe zE6exXeo3S=DeVI8iM!oxcK6puVN>Qol-ev-6N^GsZ-}g>d@F`EM~POd5&j;78SCIj ziqh0+Y6LM4bk;|ZuflyWrCEB6UVvc876!?x1~p$rw7P_NmKVlBsZ14upYPl!&sMqP zOY<$jMtWRok-=!q#o6JG*G(sCxjd2O5BDXCq*wpU<0@hHQ!E5AbX-$dHXLjNqgbTZ@*mORLDkA^A?B=q1p8*VXFVUc-2&{L zEw?CHx{%)^%1g8*=VJ&h_=9oG&B$T-g><(ly+l=%Cftrcg3!EkyDr(0!&>FSg<5pov*B5k;HVqNMX;YO#H*`{j>iOUT#j$31#Vu-|iEclguds(>BMw zY%?u7%>Me@BSo4*Och z3nHV{B;fFQN~^F8G>MtGUl&sgs(438XlzsjLXF(_f7k2zo$I_bygaJTSyfS26^F0m z@HKsPfwVLkc0b%7kLH*`*(;AL5GYn=!VlqngaFUVq5<`x*Db+bIC!VEG@P#5}x=OBsEEvhh zg7hR$g$PT87JvZw>@o`k@RPI{U{aE(dSQVa&VIYtmS@P80aH&~`A?R&yPP7eE$S36 z!Z6`f6*jgMgpKVt+Us1BEp)k?u`&4gtQ*MVkkqsEbIC8h^*+0^>Tx;Zj_j$8OFe~}FoGe(n4nR?oHNJLv!(y!RRpf<5)2EO(Nzm3z6Wfwg zTT~TW8XXmvxbdx@!jFAJJ@?aLGV;so4zdj#L|@Xw)@~~7vxBT zLI=11TyD#>sAXwHmz1eWUb%YZEOce9J+f)T4()ybnY2bM`CeXNGgN#Mj3A^ZWMjzu z`4LFJ@be&L{^Cnd19^OS9X~ix=JK2+m3%EPps)S2&`Da!z9=)oa=iWIm9FveL&0kt z*R`&iet1RlT^0Pky!g}Cu5-spZD(OqQ9#Wy9K6@g{FAcuRespDWQ_))0VMKDF5x3s z{vW}smW)rhQN&4lNoj5&xh`_i`(}pWC;e$;meqBJ40(wvZkXH=Aw8IBTtV6eprJomU(N@J}VWgJ`|)%xNR z8J))<5F0UQPlyv#88NbYeu0$Ep%3$O1U%Mt3Pj@PT0E;6dHlQEUABKS(t5my^g z%Tk*-zXZ`?Nrp5TpDnJgDXYurKpJVv(<_v9X#UMr-{NE{p&u0Tp2X{6~w4t0sCr`aZ^r1K}X9Th>1hT{pQK|70l8( z%Vu*+$dbCKJ;_a(;GIFfKY^IT;k6!gf|EXd%+uj|M!K66>8gXi;UCRS%1>eSOnB7l zKr)YA*tX>0$%EUEaHKn^X6F7YR8+O+b0b^JMC+mnBIc1iWh3?#Q>e>V0@hUPRGAqW z@`dlGCM`Y_Amj+j$+p%RaD9Mo9h`{4!VmaRzWn!6(%s%vS=zC#CX_5^*F^@dT^6ym zRbVm#cy)yhfbV`c;_4*54b)4@FYM)gz~6kZjSec~NVYOdty5=arfbr}rp!qWLiqJO zpk$M1=!vT*Wk*PDR36w=uC#m}8()rG%+c4%;)o~?J zTog%9(kOH`4MM}&Ee*BTxeMaxb;$u_5#b;EX#_^Fj2y|@d6+d!6G@W0>GR0fkV!;S z)w$bc#096+&VI~TbfiE#M@Ce7f!WQ^VnQ2(oO6UlrHqn?r&$f5gq8%`W zelmndLEYPK-YGPFAhX1>r80iGkRCy!OpaW&JZu>Wa`zy3yKX-m-QG{TJU)_Q%Dh!kOE`bP?)97%PP#>*HPPy$o1MtyRUtG@v|g`l|+!F+U{ak%Z}^p}bB=`)EFN(IiWfl`XQA3M|~JI2<_9NgDyO?EB={?q9eXhR|d>l1?E` zR??Z1Zcw0EX=s)*EkpGlFA3m9tmNV%`r1ayjwS4i(flJz|Ii#^fUaI2XpdQ&!{r(D zCC1#`kGIt{Ln|u}%!}SM*@9df96O)k|Mo>l*T-FTJDiDS;Sq`PQ5=aVO)JtQD3i3w zAZx0>AiLNGG${!ZXgu6il1^}cQn^TR647SLlfwLc`N1KtqD9N-m?$~~;a{US@E+6M zEvlr`-PCAJg#b*^S=XoZoVYIG|7U3GiLdKg&St(7!0|U2UV1L5YkymFt23c2A}T&H zf+LBx#2RIKMsfy-2(t~}f=BQL(`opDf0BEXteTk;%>Kxf7>^Rpe0tjjXu6~>+LnR* zVrw(B;Xni;$`TyNYg_lpSm!JlBdgK^!zV4!P7*vdBbapx%?wT>_6DyJO2c>@Bz4rg zcWAEj+uo@sB)@_)Af3A>bo-$ryZ^;C9v5=+7!5d36?)|6nM!{ZHx-)paMn8U!psmu z9N6w4mw+>4WA+qi!C(HB;h&6tJwWC(O1Eq}e2vQy3$!)J$(4qeX9cZuY~>`6etUbP zJHiMGnS1PDd#$-~`S_(9o@<~EE$<51yZhp1>ceg6tsCZA%uvymk}3q#m8y#5>(tsX zJS?JrCs*(EYnnG}(u(J~zE#6Qd)G75)bGsTQsH|{uoVw1JhyQo=zoY&{yl^J2*r`H zu1E4eL5LqPbm+y_-AK%TK%cvq%wYLgCQz&VNZ<(mhwBZz_f&!RF5GnKuWtGet5Lzs zlUr~{II~mtJ!tNu$=X?o>(%uMM~0Et2O!5CgbJ7=KMc_maJY5q3bn+K?W=bc#yP}$D!5C z|A3Kn-s@?epU~4dCQ$R=^fZ-OWr)|KKtUGRg3tznm`?pYm=yXKg3b_%CAkQ!E0#mj zA#XYdv0oj%wB6~k$4T3Te;}xH4+UuIPsj0N=?c0AxiiMmm2?d-)G_=DP zy;nJ4k5$pBZhK52#Vt?2R%|wBSvcYxBPTS1rT=JX-+8wD)ilBGQlx{t%_bTm9hF9V zJ_mE_LV63yXR}McIk$7uN`S$H4}9OCh{?o}jj-4P`piGkJn_UYN6Ds=jZ0GkleUC! z{}2*u>Pt__yvGSke8PbHHuc6HH#OyQDwg?*?ZG=f={oW)_sv}v3OJBo!iDPd+&4(N z`XTY(5C&rzER>sncDD{?UR11FjfI|SPP5czXO}{<3o1Pqkq{ptqZ?_4+Y5WyR^8Q& z^b;5My}}RT2hVzC%$zy95aj$c*bz}TKB*mAT}i`xq8#)I2NLtpi_&G)t6Qr}D(v>e ziU=;2q^BuUbv7Mh%h{^3mLIt9`BE|O^;zuBcGv(OTvrtQqCk=KicG(l)oXF4rCQ`U zab+0|ILIWwP&*x7%MYhBqZs|@;0g0%6&5_u1B3i z_c-X^dv-V^SN6cSy?S%VR?a~xvI}#!HRyI^l^h7ITAQ_mb(ln%XXbBRT2WY6x8qY2 zTlp5qQuM*MN>)w^|6mg5=V*sVkRWeJR+oJ%CXVE3b5zx_rgey+licDf`Qf2-_)I3* zlw?GR=FFuV7Rww4>6XpTZ5(_*+F}hG$xNM>932`{Sqo)WUiZVl=+Ebd?!57;gI4rP zdUyIu`TM>dPw~#<-(Tto$&HVQj0+7bM>!z$)W1aB?4o~(g@#3XR{1z=|-|~HDDtQ`uqug=m2fBq0 zK3g-i^(WsO^qn8a&><36D-v&#$(>{9{Xp`68M%JNWHN;v`_Bd6es^L2*-cTUkrC1H zq2Xn*4P3TZtqfT726U^L#pD}uuKsv;@uu9J>4tJPC*3IL)`}&!Uky2ZgFbPxu0ytm z9E#h#fK`cuVxvPt>@oGFO}X2@7}diMmhlgd8ug6K8xD_08(qc7k9lHO&&{^TE0VrF z-%4Mj4UlUxYQ7=GAnQjyrKwL$OPz1ew?d`hVJjdQ#wBK7SIazVs! zapE|wCSJzd7@`3fttGL>%v$|f{rM?`e~?U6$EJmcH%|WSHNq}i7QH}rLNBeVENMi{ z`u>GgE2HA#Rp60+G`nG^(5@rbT^V&Ad#+sT>Avo{hx$t2+5RjYO$UE+pr?6bP70u@ zh^W%IYR)2F?>9SbCE@QQ%4^8V30zH7>oAp>kfsYLo1W=5T6=_mP!J~dZn~sFIN7Ys z$9Wk+GQNs=Kk>OGNZci8_4>HmLhzBenaY|yso01V&hp|CLw*5wdooj?%T7RM00j9@ z`E8Ok%^)wi`;8vo=;Xivp4P|Fl&y41JJsSjwstf;MXHa>1s59*Tp;LCMpY?SL@Zf` zVk8Eo?=wX?uu2SNN;fz0eMH1g6c>_wgT8_-lN(YweiDrq>+>>g;Cm2lhCT(kG5PnI zWQ9InrjO0nBc=k+ox;B_c7No}SN{Gu9CZAr&m|XYePDnZIw0A%>bC+Glrz3^%N7|m zyS?njY_p?!%TD4XC5j>>>i`LA0~=YMRN*Wtbu@-{jpxEim^wxQfX@!Od0m)s=ul*k=LiEI+T zjRNh2*?Oq^*tw5i1^t@;J3n#>@Ia{Sf{*nzcPpan+?~AaS@qWrRxL9(#RQy{x%Wz; z!(#Z|{5OQd&tPC03(Wfj^$OvHScC{=b}vFRb0}8>x^Xu)~H2>B|1pg?Kn^gAa@ zZFFG_7ZaNrBAhI!(}IBqfD&)hER`vCx6?kKJ&o$wNr=%Op6CL)7k}9O=4MD`5vvs%q0HH&PVHm+a zeTE-BkMJYe7ZFjXoGtLcn~hBdAod9T?8m(461iF5$X*59hHNxu0@~^%aCKf}KU@AI zKl+8Si{BIA4A=Mn8cGM>2yoJ8yAiiP`1aqW5lFI$7guwnP?N35rBVe_DI?rx_=Hsp z)ySR`s#RoTZ}5$b+<**KS=k7L$HsC$>;+KcPrC!UkhhCK}jSG&#LC6^5Xfq2JZfAW{m8C@Z~g0Ibm2uwV1rMkvC?42Zm-fi5$4D!j4@^;&MmcM@A^sbTRbdY zla?94MW1FQjk`%nN-F}ld%r{*pM3V);W9~_KS@aeXv{wnE#9R^J*my2$i?ygA8A=6 zIa?;ZhA@Xpu(?Sj%RkN>8!x4Z9&Y^J)NosUj23S)HW)U6;;>oBBxlcH=4Iq;mO*Yc zT5|Vs7Q1-8E=ZUl9K*sSWibcLs&z8**0^aR9+F3%g8+#ftF6q4R|UJFZe(vG5k{Ob zYx`SF8_{C=ytI^m9yv#m9fXMHX>uFg?=bhj!!I)wmKgXL1erLavza3#&mDQ(%>>Ic z17XCzO_t;)(;=+)aHKk^bRMpxXB{3L7r#PDfQz0TLO91mKMtdhAvtl0q0XAWpkVeP z5a}E}q=OX7O=@+fR=v(oSUb6K4eYU+lb@1*vGg&1&<_ahdzc^e0{=L7Q75Hiu5UYd z@cJtL9ge)XVOc;#T18CrYUG>(`TIY89S2Yo#3YeT6dRzh^z46Ei~2>zv84cwt|lp zVl>@xMfgY|eEy!m3rAE@ydB}p)d=3a`*2idn~R6H)Kkyjzw;?6o#$1L)@+~7HFRvT=RQD+biQ!#jt8Fqa0`uV@wD(SA@eJucwI>5H@ecJFZZ}-U#^Qt=`jpm z{q?<@Jo+B5q0;M~eUGCo1wDS}Q~uxZNY;45v2amJPvBE{_-}T8PjiGnoi{!H{mHG? zn^Z0QX}jt9?@y4QhMx4IPa_*T?eV(3bO`;#i{AL{rj(xY`c*Lmzwy8CrS!D>yH5wM z1bff@;LP$0XW~to^DWK$KG|7b_FdP|fA`$nRF*#QiuAIETax<@J&6%(mD00bbXeaM zspQ4;$(=fDhBchNRZ^5A)gqpZdmU%63+&*=p( zdh6ehN+r5O^fRwtU7n^dJ7}Ts+tanv$K~tQXN#1&CW2Y})PG*(+2(UBYZHhKt)xxgs$-oR z##do!m(h2-m=pD6w9G-5MTy77BYEaxI$pfZM%uYkV)_s%4=ZrTiNwc`*S=+NC&IU8YFs9pu36@OCPh#VLR;T=y|eMV4Kh%?W*F1N4=3NIZ=62a(y# zu0r7v_b~w}n#D+d;7$MipG~IuPIt#%C1rFm{EXT)+Le(THm+NvfB_vWUPd%VM2EOM znHE#-SuV#Z_Sdb^`wM|gh9JCz*m1SM;ha%%CN(YY=Ai&tSJi%xm|y3OZk z7D6$|KmrJ-idmf8-;w5GbJy=X;?5uwX=8R5AZAJENHRL268D)g1@~s+_HTdvF3cJ_ z_iLw*>n}UE0iDy{>)aVG_s^ZPFqj0q7`;nD@9<(_?(e;LDc(EG>3Y1;gU+a=%TQcp z#_d39*2p6(_mI8hNJrM!Y>ZetanVM9kdT+R>JGE~#NPs?uJ^s@C^}B+n&joZl%KVg z4i_Hd;d*{6!kGGVi&W#pug3aeBC=r5IY~uPZP7MxQN%UxA~{bY8@ASv&XLHkvqXu> z%L2$^cFFeO&Yrr%CGcOOQWvrur>$LyLocuj;7-q_laWXCI<*w!kt!5PIbD{oODe5(5|bl!J8wUk{@Ue z+=#BpUXs)N8veS}H3j)NU8rVw+68&tfBoHyzv==fUAW>0;fk+t1=4oD=lYw}HPy>I zfH+-iu+x|xy6>_leeL$+!pHnWzoB&8G>Cw$QWr{o4#!Z(IqBN|E%_=+atz0X)XUdE z=zV9D#iA9dOXAW1d@m_qQNZ$pg}Xn(Z3DyWzICXuo4Ku#PWoht|-fbHETyD?X8Q>xS%=@>tDePC5%W81cB*+GJecv09L5!x_iWW<~A>)|wfSG+WX zY?sjw4>A(|Vt&#T1m`V}6q9&EtUNI}a-)6)VHeT2#O*f2PGOs~`i7)J1bqrACnogG zy|c}=*rWF|fytU>SYh6AsZKi1Njsf;8a=wMp02>M%0KNX3?TnJ&drDE8(c4G*q)uA zo9ftDAY>{;o-7KsXIoijqX9F!LAL?Aj=%7O{b^$^bVX$@AycwQq?L$v6br)p!Mh71BDKnu>fCC_s-S&ec+&OgWll7mvU}0S zoM;jKU{ARLY}2CjQiNrL8#;2KIol|(#>?Q0#wSKS|LzEuf0O@O>YD3yHDBtQ=jA=UQ|wB+;81)6>hsVsLLu<8RhVWp{hq^eL;F%_@^p% z33EVWSqC2J!uRwmI8|(!_*k#;QyFOuDcz8Z)eRHXK_8WobBlK^E{V*3r!XoTSe`sM zFSrg0ch|P9AH%G+m0MHDl?Nfra-He@^N`XvSorQxk>DhPp4C zhE7qLL&7)G>u?>|Zdu!@Ct;o9`hu*LGXB4q)e6-LY2yP0)JeG9Cq+*8xXJ!DY~mj&_Euzqk+(-kEN2Xbxm`wojBk2IY&Q7 zl$l`*=VX?FZ!8nn7|YsbNR3)F}Os{e}x8iWNp zlFXb$CjN1O?v~f5)NVkE0l&42r$Sv^m=m;VSx4?(Wwtg;YtZPF8iN8JOvzV+`sVOY zxZiwwkgylOaqV?esb!0{xMmm zz{k)oM~d2E43gL)PMPh%Djb!BvsM?%r_dMzBuc$q_@Mzg@7uTYn2xg z=t*5Do7n5}_i_yx(rz_w=gy1q)fz0N{xeBfve4sQYrVW9+(E^{3iNO-IOSQ$FUGda zqmO1Cr}qQOZ5Ento|g~zwv)uu7zGi{PaV+6i!D2#$o5T(G>CVWX9l7s!1VPmhsoFv<;b7k?xrv4ecSbl14j+ZmW%}rqx3Cf7(WUq^>|O$#kErrg7v*(qL}8HyB<5EBAE3?XrM%?iDPF4uo#4;O`=+JBJG`U~dPP^)f zqdH7D&Xc9C@c)IL$Ws4l?{B zmKE)!7YWsQ5aO5-&?OZ<-pCR{+s`Lf8FaHHT`jz2Ria3Ix<6CN9wfBxQfKwR6#&H1_boHbSpaNhD-(q z{Eu7sZu`>Q?GqBh{P&%^Gh6-p|Nq#zKTlo%o9}h*(A^Ui`)!5$Ozq!o*}b+FVYgL@ z8}hT33rb0#8=Z*6fr~OVq`>%9?#IW-G4}XJC??3=J9yt^NJEa<2_q4;LTE$ZbYQDY zi~A^LKi@HXZg9ezPszrMmuQ7^luGgb#Hs}0Om`opIag1dmXx~6eO%FAlFq(pAR#eG zzTt|+p}JG*io=JlcrW+)`^VcPoj8C0>QbNtbYZJ$vjDVpZE_8{icJj-GC^3Ksa3!m zLYy=br;FPt8wRWAswAk*r^&b(9;D6vXg`dyxhh+`j0mNh5UTYYv_|$R1xIVXC?95tk#2qOuw^W*SqDumDygR z!GeAG4!-5O55x}1X2Ez8_Ewl3rjy4nS7~++j^J8j4brnTl1+jm!pS5i8VY2ju)>~O z2yfGHqXp*rb^cxP>I~H^;RxE7RSR+iGNGb70v8BC*dNY6o0C_H`Gt$#Tp=S7MZWZ< zP=r{&AF*mWV(>;%m-N<6%MoPb*lr=)(%QtWzv5=r876G|Kg!~@$|M|^Q?T9|VEx~4^^ zk>jDWvaY43$-IfQvHO^wpzXYbLv-hgQ*lncKKF@#p%Yxa@ImLr2n=2t(%% zFws9Wo~Zr~b0W!$pDGx+(D;NU>DPuOxgJaM3zt_FmZVicULHX{mshFJ`Oi=0YOHF| zehjI)wDj>~{e8(Z`@)gWq!B#%)*3aFxa^HZG#GLlr;^Rt&R@naurn;Qj- zUC{Sjavw;j97C4{j?GKWGv>=A=}W^t@Z~0m*JWDUA*90G>o}=jNV&CH8hKT6xy|Q~ z`qW>lfQUBweVbTG*U|Zx_(!-uwaP6M{z4a&T>iglgRbZ8|bc=3_c1t}YR3T@t^^V4N zD-H)GJ^z4dbELgEJtsmfoJnh15 z(|j$+l;eoKO@J3j{kQ|EQRR4-+Fm&*~0AJnDJ3{7HY2&V~n3dG%>>D1BWW(rOn=F|pfj^&3OuU^Cc^wGHh?KE1>J7 zDd4Jz_eyq589`?A1yUfx_a^R1KCDNS9h$}^A=2gkjdwrwYyAJO9U1KvP{d5H{NnZk zb~5O9G5JC=q*U4B2Pb);tW`Yb0glpr3y&9A>;WF{JdG|0ye)sRhdyO{?Nhpn?+CxU zRt7*RXs|*}%RT@KSoV*-YuO+E8z+DHD=8069)z(AqOF8vLQ*AFsqqJu@deVQh+KQI zcn+hcb5Gz7rG%|f1qwjqIMgBx$#w|UIBWyid~5HUdjvaX8R+yKA%%fk>9V?|+W3<} zK3DzbLLI@)4vpX4c%d#+CklP&hh3-&o!{=t59d-MkRBLBu$2rC9Wu~nEEz^RANQvd z>uqbLXj`wrwrWJ%>e;}yx`?)QDs5SF1$;Ia7iLeYx2^Gd*wzkQMwpJ-*3cmPH7hHZ zGQ%ogs1|@C`tXxbs9`kR4!nCaz_z}1n*kpj@WQ06e!@`AEij|s??;dvU{!5CoJx#i z3`6VH4BDP-jFg`8Dna)pY1dK;p5dW}9(7kwRB zgWe=KEYvF+;k6Qx@vRJuZ)NQ2Wx$SX+`|hV#<7ynjBq5;dXo^3NKl(Btd&YOEbv-6 z8DX1M)IkB}AP2-onh6dPQ`~QMA=Qb;0Arv1@NZNCh=WZOV)I*3FhEhkaUelc(MoQL z&^~CzXa>TpZ&7Z87G)Ac`QEDQ{7=G1(v_OvrDzcgSrvc|zkq>M3PD2@gD8KON8DFJ zcd!^kZO$)Bt$DjG%Mn=c*v0t%CP^X^!!v!6auOEA+R|RwEYu)fYpjICgIOk?-=|Ly z58QvZIa});xdP*1bOes#PrCk3VLci>UR)cP*TmT7ig}J@O-vz>k*{`VzFHq9V3CoO z4uXYfDd@rCQZrKuGPlK6YI!6UlBK}}Qo3$j8HNA@Kn$iKSPthHsp07)jy8>sbI7yj z4-WzdNqkTei+&kAuRX6&CEhXiY#eHs}{}_yh=vyAIb^B2zH1O7LD!VNqif<#T@(GPYrOr# zy-i)i;5u)0P5A4s!Ky>;C2P?{_In(#PqQB*QigUaBoyQ#Zr^*sDfToJgBPN24YA&U*i6dA$6mCYOs! z_drT>kN3BNMRLip_^st>$+78#aYh6o0JD_%PTmCg^Y2ZDKa0TvhTN-Y{1cK8dRs6P zRDOXYkV9r)6p~hunUkj^nW6b>bcX8vS#S;B%=u!OoyUZJDhH`wcsTlik?cYU7}=?W z&xedcbyT@0k1D4H8z4bXW~_8uc|Zew4g3_3##7mY)dtAX06m8_9~WsNR~@I&4-sVEWH++U>~+mjIM2l&%7500PH# zPVofpQc4(dh|h2vqE5cim$%mroF#dG2nBxg2%l4;c7a*^ziWE4fdY3JW zpB*!oZxx@s$rw=>oNr;ga#4OUpLtJ$pc-UULqEtEjKw-Q*XV}d=?E|Jl2nl$-6KUq zyG`d)^Cg1fk%sk-Z;)?*ihg8C-ulTGX?;kql>bX`7RHOEm$1_R#Y+G43;H2WoFA}_ zg5w$LcpF|Cb|4|b;6J+4S)m%cg3v5b`xYfeZ=&Xw;+8}(+W-#hseyd>(HSIs zzC@Usx&iEyv?++88tht%@@TWYi0vOewu9|m3{GtKBLDY8x)A%n;Kb+@$PDHZjW)R? zrL^kQ?F3tgNE_rRsAEIpFBByKviv3T|F*_&tKP^6mGalK0Z^cyhX0DgddQ^PtQ zqMvx?p^mg`p)V>qcVF&BFei7rEeD13R!~$FO3A(0oQq%&X}*t>)86zw?sAlNE%_XD ziM_)L0C0-t+cY=l?-eA$!#bVuW_~_p}>;&PIZ66J* zoDiF!hvAQ<-w@%BWUYVr5@om>9W9+CEZ#%DrEXyNM7#q2#Mi#Q#=G4VE?=`%OB_jP zWLN~qXJUnGg#I9@Op809jDOQbUhf!0$j0RUrrnKyve<#Rp%q<1Qo{i_b5@;4d`Ny^ z4@?k}iS9|oiiQ2{HdcP4K5*2U<{O0kL^vk(qVLWgE_#OST&D%D6Yh7|`n9T1VK7I% zgcqi>R^8t9t)1&TT;Yv_{Sn;s=)`?0+7nUV0+Jfb*lJ=Tei_SW4Yz*73?f_J62NwQdZaVSTGbZ5a4-EYf<*0~F z*{ZxEg$j43eX3xFvjc;z^f$p)OyQ8heYsX6gcXJK^SYoL^CzFUK_$1*f-_=>bte`O zcXvh$%o`OdTM# zkPFZT#zv(^mS4Jfo3{RWm?jn!ay_{?zN+l?U2Ru%XSq+bn+sCpZsua_@9CT zKrFRiln2gWKS=Ko-Ha6B1JSeZhW+4b5)kXQ1)ig2z6IW!c`XO+f>yisUx>vP%Ol!D zk@P1lC)8WAB}=~_9+6iurZsgDf&C;iLc0#yecplt zoh$KAbvz&U3K4m}>+RoYyC zT}AfV<+&yDJ0W?M^vxM~bk0w%8J)??|J{U2DpJm%O4kOT`~)7Jl_9H~y|TQsk#(j9 zeToV+8c3B%0nS;QrcKO-Z&nglO!%*==Pt5#n9>8&As8EM-Va<*>J&|)Y8RX|DI^t~ zzc1AngLlqHz<0rlsr`vCfh5ot5sA^U5jy0hfIck<`-azJ&NjteAi(t|Ye7&vj#jH5 z3ayYYoXBNt%Jk0gf^V`U7EnKYX0xC6 zrL7O_zsM$wNA$*q(=ZV@d+hDqmfHIS2+jK>>V0tLXyzm|>kG$ZRb1jJW$f$e__rZX zwBcvNojQYKl)~T~kcvS|ID-$xl;gxzKMw4TYA_C|-s#?H-n5C(RY%YmAK$3J*Nu7Q zK(^BcCU_QX+Klm7=~cj9D-~_ItLEZAd#!qYM~Gn*AtjQ1+N5))*UJ=eGqop7>n?1{KUG1v+8zZmCylCX=pxf7)a8HqjvXNgN4Ygt+2mW4HHSz2AoV*W$RT(v$L z=L)o}dxMtk{*NtdG|#qX#nxbigg+5?HoXRo!s1s7-!Y%w5f}&Uxs1fT3R1F7!UF&Q zv`J_f8XstKT~eiT3qv<0u1!RWAS0R| zE4;w+peK_5P57Q}5LQ4ztijR-w8UPlZg<}5%5d(B2qc|A&31dT&cFipJ9NP~cmmg1 zKSLVf4Z^uukqBGS$psw!d?!wT&H6eDvZ?TW1;zx%#>Zx+u*J|TW0Qxs&qmwbE{7lA ze8P06w8v>W>@s$* zi3*|DAxN35gKA2m79x#g^GrI0DZ8*Ftdb$NpI$kfNkQCCZTT-<&)`7U(s{T5p^;8o zm!!j1N`=vzXcO8&703Og!!~EK@&}UV1WNbRthj{CiG_oNh2we&6IJxUG({43S$FTo z75$v0N)XmBh!Xfdb|I1BP;%jYH=IPoYNBZ+0>YS8am0y)6SoRQZz#g+KL~As6T~94 zBdWleIP-G`N33YD2(8L>OX7E7ZQWdjkZ28`g5ZogDkxYP+Gq5Pfyhr4I#hze&z82U zKC602&W}zXpbd}lONQj3HZfI2$JB4RL;TmRK*VKj<%+_YMCbyiAW~F?zC0?&1TtQm zf}aYY#877r9>>2M_CTdl+MXoLPZde7suo^B^dK!g zu7H7x?U9aakAr}t))9E5Wv~0hc)h^9#IjIVgjhnIE=3_1rt^qJWFavoF*YGF<1vN0 z9_=8^iHZXtGc+{99qxrY(x)V5VzSWM)IpMkAdGM?7lu)HNV@Aua#q%KgI5QqU8^z zF&HkxXGa22vBBsgg1+i@2qw_iD-cI!++Guiqamgso;#WdRYAmTh>Ip@od#QVseM#p z37ZVnx|B1@-$~3ueSh>r2zIkJEUZ#zz8eBJnEA%6I2#|aQq>hA!bx70E<2n`!{C@t zkv<|@QcA!KJ(^n?9>(^r|Fsd1;ZDbetyBr4ILAQeAb3vrNU6iJ?5|Xh0d`8M^Sw3* zf{DTJ*Y)EJ6wyY6SWv~&i4iueimtKVRn9ebmC=Z-Y!P?^Z7#hSvo#|LRM?+!DPq5O=@aag~Y z|RozWX2-iiZJO~*iOd{hTcDOs5TJgpf()2`aF6}n(snAs=bR~U+R>49mUZauB zj$fDKwH-U&^3=Q>=m~lHxrNxPXdti^p|Vz`f>jmm-nathd+G2u>(Kc(nBGAe2UkZ2 zI5zBdEI*DD@0QA#B5fjAb%XiH2(=5&-ehe#DJNUDS7jw+#3RLF&&`-PP?E&*3E?qO zN)3c$Vg?CQ@$!1ac_s@@4oB0rV5O|DefNRCvD&vMp9;;?*mVDowXefGUPQ{x%ly-q z&*YiYm&m!%4Li2rG&w}hR>%GVc8kdI3!QCo_*lZS5rkh@&JEC!u5T{qPjAv@Z?EMn zSR9Y}FylIxffX?$nl=eMHHP50Cp25#eGX1);u@{bOu`zS>znKD;qB{gyT|45)9?PE zAJA~YLpM)}AphVUyZ^(s)Wttk=r45e^^eei1jC9Bme!*R-#jL~IeSF&Fs&i#lG{v| zUY6gikH?=SM{lRm1Nz^C(h&A-h`T&khr7gKdZw^=H@PA@ff|8M8i6ivGSFpxx4i;c z{&wSQ7Hw$!7Bvj9p8>g+CO{JqxzfvV9jFj|5<+59;67shSL#2&I?=1*n40lE!hU9^f zZyRN=mZZqxGVq#K9&>?c@~fbIB2aVAXC@`MSn8^PdD_~ zLS_ukF=N0I@D@6+f-zVu8iThI@Ni&74xjXy8Hf+Sf7}SxZfzV19Fz$ia|IW!W~eDM z`rsn%Qpm0hlhIi;P+<&jfT+5-h2hmU^MM4Rl+3!IhlS~FwMbH`8>tbC36;V{-)@Nc3ZU7cmbw@6!#aeW_i}h(ZJfiQ>7Wfy^lE_Ev zqZ~!7oSZ$^ZDWEI5IY7bi8>S(9rXwn>ya0enE%yx`|pr%2$d|=35)pJe$qg7w6_Wc zvx+>Is)dO?R!kuLWYWDdjLzca7EO*Dz|K`r2C2%)bV~qZteaUx{(`WR5MAR&2WI;W)vk)>lNu-&HeM{KIj;#pAhO z8GTVG16mdAyFuY*8N1J7`^{kmH2aSHukAl8tP7^WgTmkV^47l=JP>*)>1sMUG!P7> zsa%FG#a$`fLc-0TUcy+srfIZkRVw9Rf)R{z6(&gn)R3})(p7>c25!hy2o>@_Zk|82 z^IW(2DuCBER)!{nuBTcTS3uH4OQ{TS!OC3^q-g`TQ4?Yxd8Tp&%_$Xfq(Y|`0~#Rn zYcP+cR|p1<0B@J zO<%I_l=6qdC82Gjl0@JVdzzIA*M#UDg9@I&HVT+@hW-@o_NUUc z9U41K#M6$WS*YUynLKA4h6(>oJoJUxmv6?FrYZMB-gY$*@Mxy^>%1{w*u zyG~DWNQB4|h$Jp40#YNi)=W#HaP4vAs>$`O%K zf#8^o8-xJDJgG2<45Uh5`VoIjN{5r9{&9j*yDVNJpXXMxg~iMhbn{0RI$Cvg%8sv7UlB2T&x? z=#67)M|Yn=yZX2yM@APh)S~h1W3#8fKKj-Csqv*^W42o<_(e-fHs-p45PgFZ*SO|* zl!Cdoq{Kbf-F+jzYxNuY?qEJWD5^&q<&U8#5Bo)$L}Dq5e1jj{Q{Zr+S_LkK@X3d&2JrUEKib%d$ES6*U#sU`~J?d8xq0&9Vn zNPP;zkvV}0dsW7j*D6QTW}JMVi7@DBo@SxSUudxwF_ID7+UnJe_Y{=+=KVmPelte# zeM9jmy78~~6h@g$nE&!U#b6ou{=5}K8eOpSd7{E7kqOB!65n;f)kD6AERgx_`MgGv z>m!jr`~ex=ki2~8nI(}wxraaQBe7dp_@d@rcbn1m&!J5AOeQ>f@%&vElF$o5?ziVD>>aLgXHA6;w!yg`*3Z05F@rydo*FYU}iyrLBv)J*? z@&|O-#VZB!8ugAx3OkwHPrX`(58r4NYh?1#>h;Dg3i{%|x@m;E%s5Oz|7>*A{eu{a z*H0U1bK7`G@jxd3%;up?KGKE_$Y(Y`vOnp+W%4~XD{F08ZM<)xHhXNu+Wa>aAECB3 z4p-1W8r5O%Zm|Yq^5}+(j``mg$Um#RZTz=_*2?62=S-{3We@b3g9q-NV|pMYAq%K; zE}f8PQGA1joCA%(vtsD0yCOHwO?Ij(jYMi!Fhe zImz@-28|8aoPz<+?SMGfKPsk}W-5$kpmR}x<4R%($+4i#&dK%R&s(wKl2>kj(MkdB zs*8XbG`Ph4oV>E!()7JtUug7Jr25bie_IvGc@{n&USI~b(3MZkX)v2#Q8Hm#?QX>^ z+0!%yxM<$EJO0f}ZnKwBED1DkA$TM6@J3$$4OxD8Bd>451o#=_e;se+ zSz&su!<)C)SHg?U|7J4bR&CL%OMdV)JpaJ-l07$=(-72?_<^e_kf$TMrDjQl@w6+^ z1_Fv01o}(pK?^|&q%*&9+i}$W?e2IrFhT)HZrNRRFWgP6{b6-I;)4QLHvZ`(>xt$v zVL$G|PwE0=|2#9yqS71YzhIDH1SZuES47BeBq*>YY4|f8VN4M_v4e29@de^bB$gq+ z{wv-e^k7O#&Svb&I%B$>QCStf5!=q-u7T3D}4{xk*s(D+sH!>i&?!c1?9F!+U>-Ba6^+%tQ4&-~$T1$_#Br3u+) zw9oPDhs|l9=L!X#j1@Oc7+|0lhDU}k4Kz6C*Kc(>W|yyR&wjq|i1}X&L3(HGD|@k+ zTD&&hwa*~6MnU88u3J-bwqJYx=ilA>qCdT-prG6vj4h-0uecz?EXnE&TELzos3L|JL!{<;eL%wYQDk6h;e~TzW_tI_>S+ zptNDAkVO|5P zcG5#H{7pS1=R4w1W+oQ+oNv%sj+vEbLpGnzM-8=AbX3myZO^wJkw1z03*O7O$4dU5 z>k9*Yq%gLX(T`RsQbfpQF#(}cu3k4i<`r%IX%NEabO;hPzx3jrKSO2Hmr{Xv+PNx$M zpB}}Y&E^v=ufO@(EqbQWv)^m-QjcdhoKlU-FsqwWD2dG}x~smV--f?Y5`XB$^XDaR z&ZsvF8^-Enno_h)eOXW6EK73eWh|@J8x!iy!o=B*@=P=8b$P$Ff1_^FLoX9eUDR+s z8DBK$t*7FA+GPOUb=`b|IC0&G{npCZv{A3gp1eHr-0{_HYVm~4roS&voK63zxVpK7 z0@+-O66<&NUk3UB6G@#XPNZW60Z)xRp86O5dPIIVie}TH3Th>j-(6=Mf`30`Dy#T= zDE^Hy4wF6aC{(l8LH==*7ef6cd#hY=hVlQ~f?p1+pMHnxD{Qo8)fMO7oCd}I*qUCg ztM9{$tiBKXv-;w1)c42M4eM*!KBJBL#lyNO+fx0?2sV1mFxC#!d@XPAkh`xRa#wuFUGX9Kn{|AnhrFkL z8CW-$TdQ9i$3{;PCo&#nn#c_wbf?qn*0d8_BNbcIPHau5hOL1a39Xytt;fAGnnqeb zCiGIB$;5-|rugm#O}gF2*s^YHZntDl!rle_hQHAy{McYFS>AeC)nNL!yCqKlzj=^2 z`Tx%ks(Uf;D7+Z8ZH-EL#^UKghXeB5Q^ui+w2hL3rMvf%1HAkY+3J%3w{AsIX?ZRm zP#_t)IB*HbK;^l_K0AzT;K_PdCkM#u7-^LmZB~tBU2HIn*{#3hPNPN!^nXi5 zUXTQC$ibROM7%{cIad-JJyMv*zkRuFCHAk{7%CG6@OdecF?1?PfnEpGvx8)8mS;f} zJM-b_ZFowr+?8t$)LMaTAjJp>xx>$Ln08A)1!p$WGI{t{8540>Z$&;tP7p`eXCQcg>3|?}8mP zog5?wH3vap9m0+A)b3URM|-I-|0LN(&RgwSk-_8h&g_`)l!*qZFfdB8lq}aSLtPTN zYq=W4D?a8XyKkHgLb0iPt@ilw_-yT|86%PyHs~SQJ%}2cmp|{Q&`WQnM%t(V{w3?< zT#u|j;8+v18<1rWP(wtFR;rz(`*r)Z476LL*)P&+j|yV6+8*3){h`E~vSVAp+Xx-3 zLEO&N*jq~do9#~GZYG9<#-~3={51X%{(e?-osqCGwP)#l6&=X!NQ%3mOnT#{Rz@3A zVSH(@VD5@tcZVFNU*DDgTn;aybdS)_X-y$;iJDBX+I*KRtd>I$dND5?C}I^wIhmZ)ag+1JA zb=Vw62ZI*SG1+tpYs7^wFI)m(qQKZ&aXvY6nQ9!zvVmg5(l=aktqEhiKd?5vxK(N} zjib(t`4hNFNL_+98%>=V99OOSsZMykBBmm~G-GRaTo#h>AQFz?`#G*AU3*%YSEm{F zx)sOMBWzV8Ij88gJ(kZ&JElC%9o8j$rPRJre`bg3yL;*-Iu&&tUSLUYNDYE~lS4ja zF+I0n&(6~u`C-xqy|*nX9+Y_c*QJ3#2fY`^UC;$Xb!jW-5EX2z>?K{JMPA>2o>rj4 z6BACc<`_Wa4B-<51XXwBWR1(-xX-s+{sB9ZjvRpWeJD zmo+hdY^q?Anntjvr^w_xn$$jWQqWYH{HrE3O-5&LSI~S8UsD|IPkI_?-hsGXbf4j; zub}neUXDe{`uG@r`UctAaTm+fqx@eLr>)b}|p-qdE^{ot-(4tDjyfp|ip+ z$-9FNFO5CpiZH1*O+icR984$?PN2PX)^6~YnyLY!hW)SHKuj<_-*_hvWx+Rdo6a&v zK#CP4jCo9_>487d1?+*_WVLfV6m&aCulFcu zwdflyq*JOR%jtl^8r)h`dX3JCzaf{=k4-mz1+f_%c6MMw(4CkIbQj!d>O%jz$A#y! zyYRpplMuijdXx3zPSc%sd*p}p9ca% zvGNyh{77fbynw1}OyxWhd|5I3Wsvwds-QKpR!cCII~dH$ER944xt`C!KOcTq2$w)s zfO?=}24rl(x-Yo4=(_yd0wl9;MS^k-Ns8J|_)EwFNCthx*GWfCl5?cS?#u2VY`p$$ ze%U^X_l0x>0Y-B2R0GHVPW(@<)uk3)TmFileD43$z)d@m+{L1ns_80r8F& z+_miScD9~V2CC*tZ6X+zt5q{Pn$wmgYKyffKJg?6K^Z8A^5-(9th4i+G(d$`oHg5< zq(JUus?z8pO-$5*3SSR}SO}myLd85ZE2LPY!uMu6&n~s@l2BhL0*CVlkKmcckC2l+ zx%MOEp3Z}(d4a*~x2x2vz`P8+(Pe019d9&LKW6zeCbZ_9Aw#!Wc|iIn$=HzEQ3Okl z66O<$FL94zRC4PXO%0U4_K2D3=G^)qpJuZL^-k$ z5_AKj?L1@uY!|yhV9aHP(vc33Zi|cAgA}H0 zc-XTeuLQ)-j9(z8x{MO6i3I%ZQH$8p$H)@J?hZqDn=#Iug<@qYjPng<7Ype3h33T> ziom3(6s6IEWYQc|AVlaG(pUH#TbL$DE;CM%=;ITa0&5ym$f<2c)Pg5s>AEsqS)!r% z-nB&Jlm1AAyS39K;h|xoR6Ltlu3}@KU8TCK*nyrGGn=+US$RA5dg0sxpi4F7JW{T8 z!VGS`chn+fxRbQ;RUzCWk@%_^ZzM85oH$ed6KJ$p8i|3Mq)mwytY{^b{Jj0#{aEb6 zS4Io(>xh#iMw1zZ&75z5pBni~cESQeT_ju5V~;WomA~trx>phAmYD>`G_YCB2?tG( zfm;=;&c{qkicN}1;vdjbi9SuA7Mq526ydHUHYk>fuKNZCuTVO1zvd?ZFWqSzBEn?-V(YVHY8J%4Xy!gw0aQGaiF4)*S)Qu?pivyLi4!i`&LIv8`0Ckb|8m5n}jzNl>uu|#?HEdB3 zMzqX{+~IU6S2Z+-uvCTA6PEunb+}=^`Q63}99cuY=P>Dz-$6eiH8(*he|Syx+FZN* zwXv%p$%SQ3gJlkzMqHpMP;A5jEIcMG2B7LuLMn-tAa>M$tsP#p;N<8;rLhUgrk`WL{RFvf2y!DJ zI>++#uyW8*bg?8cIaW{NNPM(bP*R8}nM-LKQsJMD+(VW(I!gFAkpu^60#sO;jB^cU zxdn9K*Vw%vDYXJQ{c-2xPo)f?E|9I#}sf@6llXgadfR1Gs@< z3Dqk4f-@w?mMil}W@LKk?u{{PlYxF_DWOPnBqO6fi@|@`-oiNZv{_LR9f)-S?k}TG zhVQ=3rzHQH!K~=|@0Vzk>zG@K3Nq;F((9qTcD>X-Bm@fPPSQQP-5{^zsFq?yN$0{wt|!bK$mK=e&+(Y|A^cV-tZsUZ6Hm=_8#&Jgg&DMXXL2 znsr~p(}Udhl(?(R(J~`yO@oe3;;y72Z%;VLZ9p2K-_~{CZM!gQuTPqv4(2UX>Jp++ ztI(fEjaGTEW#lF;xd}_wS7D5Sht#xee)G)CA&$_}|$@TT-+|T72t?L}F+}!tDb=Dfyh*FNGG0H+R>tkEX}c57-{bLfj+4d|C|N zFMrvj$a1@Gn|3<@y%o{rn!n#GHNL9BUMV&1=>NC9l02`8@gaz{bJa$@jD3icvA^TR zzY}Dwjh(doVLL6v77AD?8cCUa-T6L3>{j^{Yw1j z#ehzXX1a$Q_zZi@d%$EJwKZtK$>VC_x zQ}&BZj>}IY#o#C3yusDe1H!#50#L8A6a|$f{Ws?;j*w&I+C_JlwaBo>=J56OFhJ#ho7Z6?tI+R4TWEgROZ3WA5x zLAoqLTcaZAnU9|Ovq zcnJz0JHZlq;?iYdWS@$}YZ)aK9e9bmS`_@bDgrwrHx8`5on~Eg%Mf?HRa_sw_JQSxx3`~Zq6)D+O8tI zy&>?I&(}Du;DWS4Fk(z_9{KBgofmU#z2x0z4kzQ04M3-^AxhySXO}oU%k7RHEDhj2 zLVZfIvI~nnGdHRN9l}?tj+yygXc zbcuZbp@3*4r?5O8m{i`VYDQ?n7Tqol5lUE747JS^G?%0AtxlXuTzRt4cK#4|zLjUb zaeTg6)lHgk!@P>FQwVdOt$0S|x6VN_d0v+TaJa~aP7%f{Z-ywUxgX;mV9XSnj{T9W zO8SP{`MG;Jtnvko0{KF>;h^`H=*y&<$Ere~z{I#-HNR(kUvW)alu!UlnhTtbI=q+C z$4PoSW-sIek;!$G9MK$w(Fx=_iJPDM9E6{l*llujN&t9LL+ynASc;L*^BV^4Te|Uq zdx8AnE=yxeQcRWDwuL!{d#gjkBcr1kTS#|8-}@4n&5KpE=@y8=-5haDP;?u0dcKk*L>4*dGjhwfdBugpPbT?y0<1T|HgV|K~4$r)}>2@K{MMf$OtbZn8cFW(D**O%4~=93fxv-w!y{ zr~8;WAe4_?Ocj%Euo>8gN; zaY~%iwrRKN4#H=&PqP>O8ODwC*Taa*9viwDdvVDm%~$D5g1x;ViMSa;NNRs=6bOAv zGZZv}-X_9rIBuLve*c9|%BQVwTGaevWuV^oS{hLTv+~~MJDK|Mm~fRhadzoTh7g+)urvi~eULO!$%SdW)0Eit z$Hyg!gio_FCqMu+!N3tqpQrzXZ0?mQ2iZycMg7i;lzF@K<4ePD$~m zTvwH^1Jov3c@`Tx<=?o!y})9}1G?(LbX0K%y{PCVlh^i^Js6H+P4YVhwBaTd(%?l^bs)#lJc~bbOiM0dJD)p4B(enM5LrvfTz5OBloqm>$%4% z+38t-(ykeKL<@&aieSYFpOQ_nVI{mf{2s;6-^b%?Ja*B-T^=>^yE|wv1t2`%^hemg z#&Q7u$~AtV7zj_IbMcn@-{u=X#5B4${QakK zer`ti5ZhVqYu7q0BK#L*V}4)?uanCEEX&F%Eb`3S=;s`=dgfu9Tl7Ojh}+YZv{Yz) zc!X*mS?RsXj$gfcgEbk8F1zQaKuc=Kp7Nc^X`4$9VL3BcC9g|xk6VS=-hJt~35d;Z z*rhh`LFEWr7S*R4y+irpeK-d2t1hKG)0wpKNk(ba=yN)fc z<7Ka9rC1urjYfh}1C=MpiHy=+|5Q-wS{ibYojAt#9vK{9zjV_ACMebVAYBMZ`3Ji$ zs?Lxd>k1tYP4$_dhwNk>%d;QLr9>r$fZ8fty=Lj`)01HA5wnJa9HfFOF^%hWzFz3% zcO)0iiePjJ6KN>)lJt#FMDpyn#3No&d7C60;$BHOgd$*DpZxZ2iuu>6x^>*i zkSBY^ru=S}WE0sfOTJ+}?ue0iEhkd9$ARwEy96xigahj2sc`e``Xrq`mEZh;JP4393vFm~p_39E8KAlutLq%F(Le@a@+5b{ssA5!CH}*on5Hh zPSWiYot=X=MS~r0y?S%Hs-YUy|EwqVnhTo=~G#J!o;MNnD!FV zg`jvO=2JoMNK{2&%05I;D0D6oBJ=St@W`wjQdd{f$yd!HJA5 zCth>TxPV2$g;5P8XeuHQ!t@LE4F@oq6N)9_{$T;({-Bl!Pm4$ohcumno|Hr;h9|>f z7L>%~LNmxjSKvgPn3E2?rKU<4yPP)9Lq)cc^yqXuIR%i+NS^h^?~j`ObnJrlpvkBluOIY-kf4!^GJ2GmugovMyZb_3ClZ>V2{ zKmUnP12Nq1B@t;6DTqU4q$F1}-UmqM!np+^|l z5I(r?nTQ}lG)P=^Lg=WZx(LXRR zr%nnop>oO@8-B9TI!AqhzAxG4wN1SjYgBiLWMx7kEv`Or6n=HeTvH6Qn z-c_C{|MkE#b2<_EXs&tgqGO}TtI*Np*kDORc67S`{_Fudf zrxe~dj#GSXNY2YE3!0z|=f*m!H!aM86Hn`w5|IG2*Z}QJ9 z<^ENtCpQXua$%nJT5BbKg-bK|t?weB=|662KO7o`_OA(Oo{{5yQ^x%h0-CM_EG#Ah z8fNcB`ARw!(+2sUOwWN$J7bEhGn-#n_1}4ohNUe+2GNGiu!a}CD&=!F$5Lg&M{JHY zyy6hD6s1=jkVFYph-b7fn-aO05=f)m>|L_nltw9kibTqK7Z|>FP_n6_lK&{=uz7;= z>K0GRWFWcA<~s3RDQfVps9(P;m1xQd#6V%bVx?7x+yUtFN*T5VSD6XUNo2PJW%H&k zTOB=1I8%mg{KzZj-8CD|E7izMd` zI0~8x9q?8~v7UK-W1WNo(vRyT6zJ}Th@IzhTSUbjPeM*4v4w90ZTv(nEd3&^6#`A*hLCX50NAU|7b z6-J;fU^2SYf>{mIMgv(x^|s@Bz3upUAKA4A|0h`$1zQpSX13#JW;?Dk+i_MfW47bf z?gYF{bsH2q#WWoeW9!y9c^Hb74K8Cdl4(^=JDNqnPnYY1@LT{8OQJI6;zr7pjij^a z>bi+KfKk1^Qm-^!Lz}TiiOI=m)6hD5cs;);Gr17-B)aI;uPm8a zdPR+7<}HQtUmLF+sA^rdb$?5E)8$_ZJ6Sq3yVt~Y%8UZ`0JQ$OIy@JgS8Ur%n?QP~ z@CsfcNcb(h2`xc`wp3Rw;@=n-XB0O-K)(~ZUL;)$X-@u{G=U@lPzJ7(lUFXBTzp+< zsp?$EjYhV|q#egGq>u?6fOxwOrQjCNy%JdUg;rLW6y!hc6x8QF_jTbE>%~sfN2uzw zt?DYw8awe>$fe+CNP%UGB>jnz53q$1+|B1$<4}DT43U&leZJYV^K|Wn2e;vWI1ICv zhnRqW zS5_*MNpe(jR8Y9D8eHLSIX?NDv$F~f8@7L;TuIjZy14sixD*19+fQh@T;NrL`Do{B z>r4%xxV35%$Or4zJviR|A$?gQ zzY#68N8A2d6|Yoi}sA|%UZ)Jg=URfQ&&lv>6IW> z@rwsjF32DKeH9yab>mt^a5ESb!uXQg7#RurOJ4r%Mxsjx8%0k+_ILgGClrf+EIFo> zKPn*EQCSg;Vf=bngl{l`deOJWGqPBK&~=VG{jXyia+66nG?ttcnWX04wg95|kOj1F1X;(hH`> zzZxAh{fqlk&&z-LzqMQ(L~%C1*=7u4h#rIZZwz9TIEdf7i-XvUEU^s?@u*mnIfsBV z_I>b=Ncn&MW_Ts5GlL_2v>P?-%{Xhx#{D51_g@M~PTjcQs2}(5*|-be-#$UxpD;_J z+2%>N72N@UYt|iKyQ+)mFS&Goky-VZ)ags}YWhSWSbiz^P#OKOl7`_wmUF2$c71YN zaQn%WU-nXvY;Cn3AXz(Uu%Or=IE{dcfGtGL9`Ff(Swn}kjDB_*4y+we4CzaP0L5@c z_Jcni2pTb(Yfky+I>k2#3Tgi^d;G@_f0#dhepoSxu_g?{m+^n#jPZv#UHbeazQpHK zX1~y5=D+;)zJgld^M-t?M98MENGn16K0mE^Dl4@79lwNEnLW-2QLXu7TzdoYrLwwE zmGL4ARk4qCp)6w}u&k|&2lj^Sv5>2e7|$y*{Uq{nRWiAHn@m1#yLfTKxR!WBeO&7Y zqnoJ?22&jj#&h*`K+`&HH@o?KsKQo!)xI|7UpG5)-FWnS^COnVbQyi`#Slf9J}OOV zoG(p|V-P(k#zX_6XK##`;3S^Hgi|7XBLacgs11@t1VyMx7=Hp`v)(}c^sH@Wn#45J zKz$6=tn3G0Q;)Cjzb=2Q!ld?A2q729o)|bctvZI3@o+bK2rB{O35s%Kaoj>(p|%j4 zol+BuYK@<+LS_7n(^RC!mTP&S%$%Ze^2ZU|L+h!<^(C65NDV+{^8^n%k0TX{v4A2X z(yQ?lfjqdDh3pNQ{1^avYCq$&_I}Y$8U(pbZ(XFRwy%2vnsqD0$T^%Hh={L-0h93{-}heTz^#G(XC#LRhVRG0NBywv4^jS4Jib4$0Tb4t=L$< z7~fbw{&DSRl32g^Fn~})Mp02)>Ry?~{Fo(`%} zKO9@qW6IcYbOf#@N*x)Vcrl+Q6q@DTq$4lV5jV|q;lYWQ?n3TFzFWF)nh!t#s%#I0 zZZ-(bB7_dsh;WUk9hO13um^3w|0^ZAAjoQj590U~j$|b2pHa&l2TCi?ymsHbay6wy zC57R{>=CF2;fJatwREfkr%F;%JP12Up&*R{1-alIrdfA{W!SK;UnSgBGgkr zzDQ-#!Wf-Fb&8{w!CR~_oosDq2?K=zHm7zV>FUQ5Mb`al*G4T4-}XR^Qep-8lDuWBhwueWaaP5ZhF1wUrQ@ts$L2M%P07vE*ncdJ0&(XGO0)~(p_ z0~%nAYWuvW!ZXh}A(J}mQmUlFs^oIQDP>J<=DDC-YbaFC6($E z^=U0ywr?^@-qgKm6SF4rCS95gYm(b!N0T#VOU$%p31)?6SDTtOZQry<(`ij@n?^L< z-1K16OXefZCz>xZ_cqsY_+VFN2|nE zd9D6>|NZwnzCZ4LyZ61`FM0p?`(MBRpmnp>BU`U)?b|xK^>?lRYBRpg!Zr?VLfe#B zm|J{c(bM8{i^UdeEcEgg@-Ffn@>6X)w;j{gt!;VRLv3#=c!X3yPl z=h$v@yYJfl@o}GzXMVi;W2cY9K2G?!@Z(({A8x<6y?y(Q?GxIUb&z)G+F?Y8#13~l zJW~!;E>`*|W0m`r7nHYEmMSZirz%htsrp{^ykonL%R2gY%;>nKUgi?AD?{m zN&inqeKPA4&rjk%+4{-ZPhNKF(y33UQJp^TG_TW&PL7>6c8cg!+PPil{+*|EUeS4N z=MA0Jo#Q+2{IvC_LqFZ{Y1pT^pYHwi`lr8k;k$I`GPcX?E@53Vy7uWhxvO_qL)YKB zHR;x^+sJOqyLorJ-o0;kpYB__pYQ%_kN11@?J=W=M~}oF&w5VoY1PxEXLZkuJ#Y7X z((~WFKJL}0m#$ZKuamvL?DbRc4|;#zdqeN^-rIYh>rMNp`b_B)-6yqAX`fSlzU}j4 zAE9r{zN7kP_TAR^n|>ek8`N)hKgWI<{m%6Jvwyq(3;M_Oe=?xsfX@fS47f7Te4ujR z*n#r~t{M1v(1t;NgNg?29CTyQ{Xw;ZRfC5Nb{yA?ZUZht3@8H1y!m?}yDA zwr*JLu$*DV!ww9)Fzox`eTI7v&m6ve_=(|%RAKXc5?IWt$y^q3hjGj*n6=Kh)2XFi@;JFD%iL9^!1+Bhq0R{E^Hv(C=CKI`9K zbogS-7fxTOXAhihJ3DlC^6bsCch9~zr}doGb9T)+HRs-(mvcXuJ9zG#xy$FSpX)t0 zY3{bU=jV-`_r*M@d(F$3cX8g2^Ck0@^GD71o1Zejdj7)&W(&qHa9p5XP`%*Nf?pOY z7g{ZhT3EdB)*`b-0~gsZ3R|>e(VvUkES|pDVXy}0?J-*Ch*~(>E%f4IgxIAF_zUB9=T3K0IEwpm9Qd=ciRahOh`pW9371LL2ww748 zwC-lT(0ZM<#(J~$W$TwK?N<7)Jht+Cn?*J*HgPs(HecJev+ZO%%yy2gr|n+bTdVr4 z+Pvy_JDJ@eyD4^7cJ6k&R<~a>V9mreS!?#MU9?tXKg@oC{gZVA*Uee?_4*hGg~LLJ zwGKHB28ZL03msh?qa4pTwQ^eG%sY2;wsrP)ezBqDhJp=yHr#TNxpZ^++{M-9E7uQQ z!(5-ZHFay@*2S%#+hjLux3zAbZn18~Zrj`rRn<WPh-9x^mZS zH>JCa&wAB+^{#Ql#yl6%HU6H^^76>5SB~AKphr(5dp8|CyS64`EBTrnFvP|3T21Z3 z7Xu_Afrywy6LnGuavO*)HX)fe#yk&~#3kxtfSXMYNmOI66cH94wsgu=*9l}2nN_gh zpm*k`bVtIEoj6Qru6mD-I-8n%=)m|+LX*XxFZP<2N=GUiJe_iL?o1RfRlXNZ+{%@- z;}d^j3?xE=Q!|oNQk4+uqRQyJ8vCpBAy5<*6P19NR&qKLsFNcTd=q%#lXFycPLiD% z8_y&+)aicM0iq;iX_}8nr(-1Pfk}aW>Yza6c|%iMhqU9?_ap(y(IgzW7PbHXqwPK5 zqpI_Eah07pXFb*}?&_{*1W_y$5djepsZykalmH>564HAznaT8WerNh*G9|tDgpdG% z&_NIc6%oOaaoRcOSH8b*dA`pBsXvH3mFj6> z_ySR}K$fR78&Ggz(o54i*bpgk6?A)>0eIkPe~DLW&LWdNOUu^wV~da7vQ*Z-U&;~h39fG{4T z!%Q^9oNs}E;4lAUw#=--y<2ETtPtoqUkJm#`Vx46-#`7eI1HT>acgrU=o(6J3HNmD zXKP50v1%*L6h@OxNi~I4p(bzF@yNZ;DjA{8n7;?AzGXBs{KxtK5M*C{_$x~~G`nMa@+y_Zi+0e=Tb+Fe zpv(RF(?_2g_1w&{EXlWR>e=8c>ddGupqHmiTuC0ok~`LN>Lbug-#uN{khdq9ETLz< zux1K7arUbpDwq?m^_)F-YRznxtcZzNnt%y-Pqwki+(t@iCZ#5`v9YEG+;=r8DVdom zEK|+zv~`eXa=_XGXeYHUqqYu#5MRvOXehi0gd7yd$au5Oh&f1JNz&jSci7)VCSfP( zl{A5x^Qoo42WnbrG3h^KGLzrxR$`_tMiYvg(F0$}^4CTeuI6Y4z?X}HOuQ&#`u{2y z?*Hu5FBRnDDd)vuB{cI*`5UJI_xX6-2jYF~z->u2@(VT%5b-srdupC%Gv&;8J!9w3 zSuF!pnbdGAZx%aTbJ#yxo_DL@ez_`{0d!FMpN1 zk-dM;&Sl<-WI7Rte0pPRN~`J;q>bm*b_-zGhR4_K@WactHsU}|VcXojCxsQ8Tf z#6 zko=u=09&0%jVAtfg}_~+zAnD}%agTAEJUm8+FqdAe1$mb+IjisY1!7Y`VctK#Uree3h#}L$*Qlf3wGS| zE@x(*UHU2eBRTh`?>w-iHVC5kFQd8xRX~lFWm-e+`5%$ zS|exzJ4PQUX{zf;tw_z@s*3yy{kN`?xnQusocFU*vAJO}r`FJ&CkE!zO!h$kz^R@?}+gL7q}&p@fw>S16{iwB5s zR`zcP^_l_eF9QrwTKbwtR6vg)Nj+t7<@YyHZ6xp&+u96Ggr}en8H3$(&BR>u8|{3b zO`@9bv$jjdY$VxJujuc90Fd$jyA8kylzXL9+asYKxc|75CuHEPY|z3a$vf?@W4gM? z8Pa)m;apu5;&gdBA)Fy!YS=suLE57-U6n zj~@6&<}KnmH&|1NyIj%VN7a)(KVfDQxb@aVbX}~sB}j>$pU8$Z1sRwj{?Z?k=}I_0 z50`sBsS(w}3NFZ1TX+uq2m|Fqm&GO=D?TO6L&x@$!UQDPHiO%BlDJTOQ5*v;uPrz- zON2*X|L|ks;W?rg4PFmU9`~g9z$Zcgw|uibA?VQR3V_@#?|=X8W1ol*D#Tx|JnL24 zjOOvM8TwQ%Od!M8Zy>X|u#jyg?XE^|aCBhY7li(?T)2Nfra`070ZPPa zjjN-QH6k4vE_jegyJ=;Ys@Py|b5Z9#+IJJoNtO-w1!5D0)~jy+|BBe0uhg zZTpttq)pn4g^}jZm}%YxTXbMjiIE;h8=g zwt|;UgiD~n<=;cvw{DL|vTZ(DNTR!AyWT#elrRj&ISKh0INkO8 zRbSBR_hk8}*0-)I|EA{R333st$MV04qge6G&4vmon`%;0WiPfhN~qn>gx{FON9JWd zCqAZN3W&i6U?*>L`v`Bhw;pWXdMf@?Fmsdlz6E%Se81p>$rb3tYnCyKgE5E5G4kf2 z6Hr^6N~*+XU7L7x5X0j7iL+cGY#cnl420thMRdXnS#sl+?x%6+kja=M^6bG`7t?&^ z{Y7sU)Ek-%4RJIBl^kj1%~>~lGD#sboGlrA-pgMTEQO zB5d(jHi`H>C60Jv>gWw3q}*4q{rBG7P&2RL@&R-S6%PET%$?`7_(A7ub2`yBn%V!6 zEYMls__}1+lF-(hj)d!STe@_K16*Y~wz}S8w1O{Y(Lj#4vfqXrG3>I9ZiE`CbXVXz ziG{znS-LCcJaL#x;%H&Rz<I=j{nx8LSQ#t~GLPT{!$*A1_(?Ejmr?C8$oE`qBk zBU_t{Nmw*^^)p;WB2v*ti(b0=69ZesXZqDL&H+@flWsaj>cUrl5m%4J)wf7Dd=y+{aa<9dm2fVu~ z2bzn_bKhi~WKWac9p!sy#t~f~1;|_FiNO1cBN;2Hc^(+pr+HqM)t;h-d{VF|}^w>)ZQ$7?{4JF;6SvsI`{4mY0(07q(`s0r?I?4)=U0 zOmWepsL`{xZerJOY2T+{-t-vOD+a!(rFh;GsK@1lSv~9C#=|kdzyDGA{o~(>a>aDA zd4n<^eaLUh7-0*A9mxAaT}KXkY9x(_^qSbZE%p^K-*MKZDVt~}Cn+{@HMAJwYce}k zm1s9dv?TuFyT{ z#W9(v>GZPPhPsNE}x4^F1vfBTpqXHSvcZM7{lS%BLb zg7p6^W5khQ>5_$?PE4XwIyToy%{aG@cwGD==A+Ygym5DB^Y+$5D2?TNTmVgdvUl$;)-J(|3`ORAHE;o0q1^h$U`0)5 zTEW&EbteFip$;Ar9~|zlZG z&br4HkIvhF5W`>3w*+fFCDjc($j5|?f;CCwwiRaRi?ULq3H=QDz8VfMzaK|U9m3i+ z$rGHfA)ps~iTe5Mky9sqt5>k1iW(Zd8$P)P56pq&Yb8Z4k>;chRcT_yax$4jE>*_U zobZGE+O;uII`}K$97IEHWlKv&Wh$GLorY{c3xCK~gZ0rqcn1s2C_a<^VOU!aNo(sh zuMq|Ms-f?%Ig)zx{Hv$_&JvhvMjS1D;(}M&TMtp2bQ2e`COl=jKj^3kpvj;vzao`Q z&CkVES~2$!T_HQdEEa>$Nq#zYSaxA}&q^#w5H;jsi8S!9*eb7bC&ZC#5_^yqe=oc! z<7_4uHksq>@nX-({XJ+Y#dxJy)Fc&-{bHL+I!?;DVhu7kxvC<`u^%qpMf$U3xb{I? zE$F2BtescJnDqY-+5i3E|G$`~f>_nZGku%LEV5y9>SCJw1LHaT>Ta$a8u1&Z{l9zV zYp?dlpPv+dE8O|Rx5Dp*KaBbX9enpb0j%J?ECaXmP|dFT-aVB&fS}u%UAMVzGg*$K zE}pE;j?CT^nX(R=jU=)%tv6#AIRRV1pa#nyu+@RRV*g-J6TVX-Oe0>IQ`xinovX(7 ziSNYUz}QAb=4R96^kpM<){V}$frO#QZrtD3xS!DHPD}uc$enX1&s0E?ydtd|hL$H4 zG;?!XGM4(T=hJuKo5J7WK%N0a5S?0CSXEYA($bRDxS5?r;^(V2!^Gc)%fd;%Y&$QH zcG$Ib5}q$Fx8XO+%6CQW<|+wXyxue9Epjlqsi-8eCB_rwp_!Gw`Jk;J?RDLkB(K*w z-j>AcytwqBYx~2XXDek&)kPMwR%f=vHf%wa6RR?IK3Pc;LaFuHE$I!$w^TG!W&S`( zW|OrE(bKVsquYP}9j%!f5sg+11eaDow`Y2pM_x2oVGf!>Fsn zFpqjB;>B5$&3g{8%t_)f`oVrMQiQ6p%4YH-x9adZifLrqF3P;sr8`ecJ21!r)sTyS)M5ITMoJJAL(_!{&Vap~U*@5{>eHT&D8OK0QK*>FEM+47`I zcW_nt_%9=C=CZAst{tVm%o2R)jy#EwC!#Rc(;eE|aJ?3D50WN`EK*ag4~w zB9qx6%rYuBenj{X-se(ypE~jOoJf(Dyc8ldqou>^3y-*8Px4vu9K6qo zph$B4Sl)>}MFU|fwhY`U(@6<%6V1f8W$gn^#!2V?(})Bs>-In$s)}ntVm?1IAtQm% z8QB4(MK1%qn~`%5OX4u%UO_453YGXP6tabV!d_&Fa^X7%MV*)`9>F$h5Vk@Ai5DzF zB8Yz)z;zIhKf7oFJz}&_538szHn>gaUVdeM&lKL`Dg?Cza`*5w$|8i@pZN0UU%HW_ zEu|e6hmp-8xyThhDc-6#>YinL zb7cXy-y_^|qQqV8Mf@I8Rpv+mnFe{PJMmMGVCS2NY-NFaH-e1+9NhcKlA6#f0>9{yy>cugB2hZQ^g@ACgKbX2-SP z`*HAlw_N)@^R?d_Cdv|qOWzv|Kz<`{;6wJxJfZ@a%MbzIzy6rnkXz?Hwr8Ygw zs*>a%*&D&hyAMRE`;Z_kqMlPp9kLgxQa)I~W|G2az|Cl;I7=3Egq#A?>Et_I@KP{v zxxfK(R&mT|nkg;daRU59`qv1al$MJ33&T0&0_tij{GIsvVe&@oVQ|`1w>7uZ1tD2_ zMy?v(KC_C<>qI}{N|KeBlz`GvCF(ztcoP<7QcgPRKaz4&Q&n9(RGqdBE>kdQ4HX}b z+a&~37gC#DQfhOBU0ILRMDTTCVM#mqbVQS!OHiBe^ZKNIt^9H_{xQ`waXJu>@&ux^ zarmJ1Jaw^M`=kPDC8Ze@7_ss#1-h5kayY+1VF=JnpcixJLL)kO=>X;qtSUotzAl_X zKZX?gB~Dy9P%kTpP(_coVhPpwVT-NoZ4Zm`|;E=jhQQgJVyeg-mQK zIqp7HdvyDyOQ*kw-+XSx_R&?VEb$%#ZMQmLm1vL08B_=+3c~{}MvibuF6xO!vtj0z zU8{N~z_Kh5r=ig|9xix#m8rrgFmXXs&`kTPW#Uuc*f6m-PuFL!)MQ!zURi`-Ml3^L0&ee>e%#MMhg$gj$-~+tyJy3^r z(H+aB{<3+78Cw@_T{v&iDAWhmcgA%h)sc_*S1#=pQC?Ptt@60ERZ3N|;^OAcF2sIq z8}^_k*Sw>xi!KOauViO!Lt1s~o(*I(Ns8Huh-%%w)=b5L``}w7__ruqoJ8NhrUy^+nig(!qdXL6zu$Wa=Q(;up z`j-g3Y(lRvN#Pb6DaP;RtAtyYUV2Eel&ndKjX*2o*1lB@3kf}G!MbIsq3oub%JTY< zt|?l<$vvKyjx%6zI$t2(u85(;X;u+~4|_lkb)OR^d&xqd5diN+%U8`~H*9I?Q!oKx z{3pRW?EG$WB*Tf;2kejA&OiBUKK;^Vley*@%LaB^mR~N6x)9zQbNy;mFe%~I>dLRB z;(3wXU^1A@`NpDc3zkb^Kx|4Hy zo@K>59)DE4PjT%b;HrkI>);_k=Mk#`)!%z@OEcT%9gjlq;T=-%AtT$J*=eajKP&n1 zO9FLkftqt7Cq$nSew*uCAX2iF&^G^OppD5mjJTQwZ6 z?Ze_-Jdgl5jO@b0X!O;DWSg@ltkfYU%qkuJDyEyMBP9Wsn~a{CjHX6C)C)uJ9Q`}X z9E+<@VWo0CvMzuAX^e0y#NmwA%phPWOJeLK- zZz!!Ry;I*yUZ)w`#B*!TqoQ~2fbEiWvs0($Zb80iYK8GVB)rDIv++%I7oXef_yn@P zrn&P{C7#JiA>#)av@zl+?3vNRsG=b9W)n?jJ@5)V5g+)BMuG9O@s&3(Flq^JT`~Ee zii>{o4yD*tm z&8?{H7Zv4ZZo{jQB(Wos(6J;tDG|F8DrwhzJ|X=1roqqWgg<}9Ab~+@XcGV3EwIu( zD3}|$uR&w6LJhi5VLm0?|3yppDeuZ7Xeu&1zY-Q4I9YGK+gpL8TSHb^ zR#tXF^14GQ`wqO@Cah(Np9>JAuvOG)q7X}{ zc&%YMtCgeR1Y9t=$nx^*N*&^#Vey6+mcYm1O%{V0DKI25xf$u=u#rX@16UCrz}0(V z`}XebJajmsGh#KY4$HI#I*zn_SHWW$Q2z%;bOPtEFt8RzP-DU75OidX5tHf6Q?0VJ`-SrXTWW+RUCsg_dNRy zSr@k@Ve9VfI>efchN!d(5$$^}vm0)K+hM~W@8q2o!fju?_baI(4d%u|VdP8;`s3Hp z1L<<57UHK&|7f`de)OHQkT0G}MCPbfa3hoW2qMb>k@Z=FX0nzZsG_(;jbSCBG2UV< zfC!^Of-qNjN*E)=utfON%1;-(Ml-#ks-4I~Hs!a)CFN$Mrc`FNvPjnPc9Nf#tC8-q zfGm3jjm@+;8>RGd(m5{>fxYrR?ET3UL5v;q>+8lRk#nVMgmuoIV%;8Cg^Wl^S{W@6jcS1-pE{x}w1{9SM) z_Vb_%u4wK;QUdsQZFOF6tal}OLE2rLxz#C=I@Q+5g(yoU`7^85_i|^+VQlJ+XzJpe z#lhO`dm8qF2c|>WfJ3%K{lilY|2vcuAfJvoOq7B*1qetjzgY^(V|`Pu{woOru%N77Hi)1UqF zJ%oL4`AmF})x$f}Lt?@vY?b+aPAN~`NCm8(Om<)bT!YjwCvJuCWS?I^ld%BDqFpq| zOeVC3X^7Ejb_3Z}RQx*GOS;sJiRIZqk;hM@5;oB2rv@B<{c)9j9x&CYza65&f!FI9)b1!%}qCRF+i;;~G&Qf0PLGuavAW zSFEv1!l^(&(QtP1i3#axWFx(*FY=W)J6^8@eUh$Nr!g7}BO{OjjBJru<2vfv8$M_` z{vP?3z9Dt!qR4jsKy`Or@819Y=^@EaFNU8!<|__>8U$fsKmAH&Nk3Fs zdO-M-orf1sa+T0MxmtO;;eAZCI!}pHrdsZL@4bp{K#JbWbxozK<C&}XqD z(YNo!`6hi&e0HI9l+BR>AT+Vc|%#P;llr=9Ftyjd8?Ba}a7@E6(eMoqS{ggEJ zxV$vu)m3K#6^D?pO|cuv(EENFgE)l#uu45yv`(pMBqKuwGgnCp* z94-$aW3*pQ^sIPM%39SCnMaU=3-)$G{{Z^60h!Ef1gW=Yi5~a z70f6~L-Im?dr5%(S=jn8rL`%8LB7<-{(~yz{OPQiDNo~ya^M|B>|M#uGf!BB;KCpS zif<+y!%|X}9h6d?`fUclM;0|~+0k5C+t8R@lLWE|l|`A461wCC3-o*Ykcx96uT(JO zS@AZONvX8b1B(->W5N7?z0gj-Ifbw>l8n#wYImp;&UX7}4~ zFW9vX^%NaT3W2Y+?lQE*qq4ktTS|(y(3EOJ@rHTf^~k+JSFwvF!9rebi*#@6#8&HR zZfq>BbXS>3IqfnRE7#B8`~qyvObTr1LBFjyzzVaefZ3x~aq1#m&skK(3TNcCjuMZ@ zQj(z%Uz4Ys)drltX1md$J1zdnVg-``FUMJpnm@mU1Pm zutr`Iv;|7@Ijv%f@POP$DwKs>p^_CJc#hH(Kz*4}2SD5YA&9J@Azxa`Rrpxp{tqZ$ zhz|t|_#%b4R-SFqN>|2p%Q^b}=j42z2Pct>cpZE>{r(438Sj+xe=f5Z$>%BW%eitL zvUh+z>o^tt+!QLGGwYZl2Zvu9Hf6 zsl!#F5bmWa^fon{D^DaI13+9%QBpEs3G_0#k|go!W~9H$yq-jyM<8d`!{~&A<*vbC zc}2{YXdtA9V&#nqMR9|%^5#8hJK23?cXQ(bI?pdF%*;r|HX@lppf7g5(Ya$gtu2!+ zdsze%{|uTjf+8HjgUu%V=B=+6hl&7AB_1ah6r4hhN9S79o#&%@s;zo^d2=0Yhj%FB zye5a+Oe{XJ&_!ta5I2)|bteoyU)FBQk(d)L0F%m+OSlM~9X zstc6YvQEltY|cTzmzuq4hQY`cNq7l}E4a6RP=D@~wOyOi3-j>#xfhamdEHJY3+Rng z#YZ|;*OL9DySdWqqD@q4P>FW&vdT(jdHSZcG0QM80WQP}4?GXAC9gDK3t^O2QG?5k zR&x0o#S2iB(&Td(ovdPu%WwAu*^9zoDPI{M3gr{*jt9=mgVumbZ7I^Q%)0$4J5O{U z|B`K`;7YV&f?B-l+Uk7HtC#m8afWQUzbFKBqh*D@&QgG83rB&lWcI z^)~Nr$_iy=X6L1`MCr|`)92~cNbusYO^qr)t+)hMak$bBy4V^hR9Ur3mRZrU@gQ4G zx;+j18`k#ds|kIuz4vInwp(d7nGFh}^`VSQZ%L1jiH_;$U~yj{#-yutZZr{SvskPp zBWJW^CvVvVR7-k0Y)fS^fJ3y4vl|LEQJC0KR1RNM&q@*(IG2S4d9BS?uK1ZM=E@4y zX0@7qSls$Eg!S=9wns{zGllyE4B2gwFmJT^-Syb+dpUUhY-shs4dbe9v+1(pKs!WDp9(NhJBoNx2k zPZ0WZx$q>{m02Y|56oSfkS(4jvTTxLU4Rr}1S(c^@>TJ!W{Qi+GrtIdY&oO$7(LSD z+wFEIY_!eeZS816{rKe9n?$!lbWocztrOUSB>6GhQT`aJwnw>RNUJNU@!588@i{2L z{nhT1ET&mgkzJjk&-Z9h7bGsH!_G@y_U>)}7)8W8$YSyCe8d2T_Cz6GoJi{x@$gkxH^N^Z267PTQYgdR_U7kb3b)P?$FoFyW&>?V(A(`!Ey~tc5^zP>EUEN7FT16r8Y>G!fmFXibaF_$lLM>Ko&|u3Y z=mjn`T3|l(ZfiOFAM@qKZr&>i!MLnWTI}dQDO)PsVRK|7ST$;lK*PNteh&_Kk&k>% z0)z=w{hdP9KqpcwF$n6%AgG@srJRLWAr>g#@#0(r8endK^U> zcs)Qzx0RwfNf;rdTo37UI2%7gli#YVEFP}iF;@P$HF-MoTz1$q(Kqck{`FcWt2;4BFZx_IC zUey=8{F_28wY`KK1k;f)-uLY6srfH}HW|hyXmxOV)!bA*(k(6#=%wFi=w(!l$}pPE zO_CqQvOP+Uk|W$vEZaQli6VEG;u!U*|C^pB;X(f^x~}5h(1qP;Us@F`!~|}esx>h zh#fiD2j+JCyA^kS?A$KNckk!o^Shv>7dsuHrkbJDtgq*Qx}O-(?3u8 z2^;FUIZsMi?63P?JO53S_>_tzI*Wy~tQPN$EP@;K!ur?0P%tyzdFdD-R^32W4%fMe>a?07I7a>RwAFx+Ejs$`2Hl zvq9qVxc#-7GLSOgjdZCv7L1V@Rk{FjsS(7mNBUJFpbU~Q%&TamUWG?oh z_~<%`D(PX8P^}AML)JR^E}H3off80e{pwPr=eCSgZ$^OHVijcbuv1gOO2!bKQ)e@7 zf6B7Nw#g$t`1Pz0$=Bqw&yfTp-$#6tS#NThZ8(pOX0uU23VSz_u_(Jv5+518bj_O| zv!ub@NX|gkLf0Cfo3BJ3SRz2=O?FL)w|m^>g@PPu?p?O&5U%(gHQfg;#R+UJX}53R z)4p@x54`Yj$PzFk`ax<1b6S#X+Xf2p1#^U1!&zoV!qml3-~6L+_lip?w2=aMZhH32 z%&D0<+u$PL4+y=QH06OcaFXyyxU>6bNd~QC?oZGQy=Nrx9NC*Z!-7w}A@)OOB zYMQXSv@%$Q-m~c~fW<;q5r1VHx<>vT1P-qhcfUz9yvhHex#d>$yCKZbaC(|Ec=b->#mRy3$hAH`Yu)-4$)`$iAD;3)D9~0KYf@D? z##p=%U9k*M3T?`QRSTEC`VkVZ;PHAFA(}U%O~-O_3uXy4&vWK4e1?ofwPuJuvH5CN z|FSH^)wWT|LLfG^NU>=pdkOqPBzQm^ib5>NC9f~UUg;;hx${tS zqw|0&_7&%W8*(1X&})K%Ye(3H$gUD+47v~lCs4)?64@IN3*%!3wO%jVVqYc-{{gA+ zU+$`L0fre;12J)>@c*8D7v>J4DKNcZ!<$1rlM4Vre;X7{ZQI#Pauil8Hy-4ypVO#KTR>H=|8S zeOY~G|B(93^?JH~*ITGP-*_%k?~{PZnQPNo^d>+my9O4@G)irOwt#p!FY(g-@f6<` zbX-8{<;JgGfBX%}>oN}c2gI(ai=>6pfyJ^=x>E`fb6*-i4v4t4?b?u&b0I@Yw^h|P zx1?7mBb&Q%(Y|>fDul<$KH;&}FG1ujd@Bp=@bCIiI`QLy0ZYSp#q!Y2L_dM0G!0op zgH>(2_eOJp+tbh;O0+aJx)hk{^WMAMloTNCOpZjp3rvLl7qBO`?R`&5g5!*$8RTl2|XBJCZAU zfzW33^Yw{$2@!y)-}>cGEIGgMeeu3JDd)X2_$eCJzJ2&3@j)O#m#f!DMR%!#D930y zwN3#gI!vR^W3h7%AnpQ$ui~Ah0{zj!AJD{k_<6t>)@E#oN}=O3Qn_s$xEyuBqS(|n zZ9rP$r;rZ89{^sj7M@;)e!;0rCP++xnWRLRY$~xj4R#3`WiY7~j8pz$a7Z0F)t-7_ z%d6}AI0TgK@*U(8+o>j6bPoY8Zk(s{Nm>vQWI8qm$)8zS+{6 zvTBVDkgO3VQ9FXrBfimrWcsd?WL8o$G$6f0KM~x`cO~`W_11M*a^XKLRB3O(tAO+| zq^IKwO^^&!$(epeUWQ^vne89`zg;BXl5d{+RveFlS3H?zo~@;YB1sGRqI7NX35zTO z%1nbY%GbKCD#>+4M6}zn+l7Ei_`{yj3umo=X1GH9t$dBu+pJ)o{zAMP9l?vBb@e1F zf^h($JS8q-r;sIEqhk=nuRCG%=?Gnfi~x$^5foqOGNBeoTrQjaN;UA4_k>%>&qNsa z91+cFnq+M(p`RuN`$hmCzfGU6$&>U9f&FrsoCuhM#SP>GvVFxTgg!*}mi1McCZ#{K zbn(`N6@a^rB@+&%yayamgRR68~sHlz?fZYhU z5lThPXapY<#Q9i_@TkOzMkuI@^Uxy9bPf6Z*H1!_dZ)w?Ep=~iDKb9KVggV{d@x)` zoCX|ZmH6P*I%2!69ads5>ErfFaIB0NYlVXNe8E6D&G8sXrEoVPqFokUXnKCoMD*B> zAg!}6tn(!;J!q?kmCHMVVJ&R&Re(umo9DzYN{o(xe?0fx6yc#PT6|oLltnI0i-T?+ zgU*yYf*&JN*i~%VssL2+7V0VScRpdHf_WYKJw6GW&FzASvX+`9DWiAOP%7DLE8YQh z%U?(^*(2V9Gf*QuGw?e?vfV`XPdd)PwAS9Afv3N3iy0Gj=ZMVUqAhK`i+-~ zD3e7Mm{+oT{MM28#M=%!=s2P1H<&Xo9Vn;EUodN7hLbjI_<* z1h<3lx(R}sH%#!ED-(RrwG*rlPcXJfc!Eb^f?eSWUOl9Hjh?`Naa$4XNqgA0N!MFm z!DatY8$xXIw!D+qY*2`;6r&fDlcp_0WW7XXZbCNszp+FUf_uIW4aCZXRwIu=b=w=Et!dHJ#f`xQ)NPkbNyA?f-i z)I|%=^xr3|I#kvQn*1s%6BzOgR9}E2$1PLcCRaMJjJcd+xagBQLPIi($5ag8sCg$B zfNXi3Ra(E>q(ABNdr81ru|Z1kd?0fx?GirL2o?%;=CY*6XLGQ>D0+I-UXQKW1&l&X zURhR7P9e%MCrT}w6iX=pwY4^_cjbx9Q{)x0+r7Pv7DrH9jJg+D%m3x_u~R2~#wdk& z50#}&FPdRR=A&S}{X8~zx7%}?bzD_I4=*0{)3gGxI0Q|tHc)f9;v)*dO&#IAZ?aIE zkFiQR=Gd}Bn-)6lI8v`@$1{RYj@-%7*LO!>6rWH$BFN=ErH-Sl{f6t@bdd_P`Rzo^YBb3P8xwM5;xgwW~h zGzCVx)v0i!Y1EC^yGcXbez-NQfhIo@r#!u7H5p0Ley}<$E)%*)X`{(x$l5sq@<+pd z`CRym@Rt!^W3O)|QM&jvX!H4@0RnI{Y{oxtb`!Ns4PIYi+|NY_HShgq6vC|E{^Lui zect!z&*GizqvX*U1xO`6gvVztKR?tHFm9Js;BU4`I}RT~|5_7i3`0)T^+cfxDKz%rc3PVgS%69qSU%lp`1ir3te;%K^K9%$R}n!s<-!`XY(3- zdrcVHSa{w0;)Y+u$pEnIAU}E!d{?{q;6k*d3lEcD(Z(u7kdJad-tbn$+b{M4AU$U5 z;;7VMW^+x|5Ztp2xoyN^r@op4z53`S;{D@*0F5PUeQ_=MA;e)7G|)VJdx)U(q=`zj zMDm$10jZJ+NT%AMufZc{54;bGsuqJLXLa-pLXTaH;&ZGJ{QBG}#XeG*SyD(R=jEmY zwh2byP+RY|<4=kskVSY}+=8+A!`~kV>adesDCzzHHO}rS7m5Hb7jFMRxCL2kX%sCe2Y!~MKvW@M++0415hJJnOBv&DEkCdatwP1-VpEPWzX;`{ zw95S2+KTdqT~RHO>|ByIM;QegoB@rvs6S7ps{!;5*1qvDUn#|a8=!3`A&w)_iH;SE zx^{Vlr0DXC%&vFmgNU2BOYL5~f)*)%i#OC(Y|5efN}JnE=hd>OlPTB1(OX~!xON6!`S0>&+>J_1mEd0z3hDnzSC_x2t~ z-LobokZR(QzC&s-KQ|){(1}o^cgNxQ3(rqoxpaYoiEh-DqMq1TE45-u?Gz@?-%#7Q zlN={|HX;eUAt^ggOMB&Q9or9_S^3Je8L=;{P%s%4nN3XOYc$<0LNF1;eF zu`&EgO>$~Z7GB9ZvS?RRd2J1@mBJPxoUlZuHJU3mgf>}BW{^AJ^^AWdo$uZQs?9V$TH=hSTn{ zSFb!3nVg%EsbCzaFWhndL$GfNw=cJez_z%A2hX?lpaOC_$%gt5l0+p_Xa4&n(Z<%q zK2RL(IOi3d_P^-ee;frO5Eeb)g=x9jWd%}5&j3?cpqK%W zN?{4>w(-Ojs62AC^8`}o<=SH7Tak{563(zsuGU&nC>0!Xo%pA@acd+2g1OCB;lY1C z535v^q}z?g>=Lhc$oZ|2oL@hjU!~N?V#=1^=rLCrz}cnsUU&S!zleo@Aq52hU&EpL z?>WC*j7AR}p+U|_0__3B1&v@V!GRfS4Z&ICB1|tQ#bhQYb|KoJztoL)gNv57f0+?NhA*1w6$} zGLU@PPP-{tfz(o_mP-L~KT{~dMTBwk2Jkze+2&6!kK2O)I;1QqLMIn*8jJV{5DAq| zg544oZ~N?_>NK0F1}DW>dC=`BE!Wx55j9pmR`@e<=0g}#iT}~~D6vU8exU-N(vSWV z1g*n_J8{kw7hM|bFnhIbiwoCr`fzL%8=Yf?-z%#OB|xVVeW6iLZ(5uZSCH{A)Vp2z z_$ied;CYk_^d8l{&Taw1c8t}6bOCCKnPoZkRn_Hnb=l>>bmyZ-%AY4Z_k+ykXbd1O zEmit}O(1%$I9bFZ^X1BfN6uI|3AgC53hW}d!&SR9~s4h37_nw74X5%z4JJmCPfnt4#m|oxkV5#!_E<2X&Y4K$_}* zp3$oldhOlShL^$6ZQpdP3rP`HHvR7*M$5twBmOGHsJXL`0HZBIj93X{gmP(kja8Hc zyvXI1N^9)u9bH{w*Gw|9#Ni8Pl_@I&wcb|NBLRuB0VK-NDK(nN&1)*8E>5Q@WINzE z*#hfxZ0(MXgJ7a-D=$@9VN{?YS%P?I9fkn986X1Wwgo)L+B`nVU3lhwKvtTLxA@F% z6&&0w;)ZU9J~0~@tj#3j(o{mfl(J--0%c8A89HD*0cXIWvr92*%rYQwv?hB|DO#d- zR(7?e)TZVsC5)o0pjd+!%YuP2pP?!>DLoag{l9pY&6{7o@+^DpXByBo^U9jbz$lW> zIfTE6(`cUf-Lk1dq}`hYtC3xZoTbJK^Nw^ORvIO56*iSoaW=L#e;*RM9Y*_rC**1=9{6xS-y1@(3|86 ztMBrR0m?RX8~+v;aLp|k!?*A+fo?;$5MHykp*8D1(pC)Th?n<}+GoNu^GfT`vK^cm zb7H}^q$@L%Ak7S(esEL*;q_zlCSO@UR~F>;BYooryZne(*9ysZa1hjV9 zp&^w>iw_{EA#?;XL-ByR);t9dsEz&-_5>^Scjr`=KYP z$RWJWQfP=vNRI=JP0Yt52|aVe8ia(A2d-H=NjIz=R{``qub@UE1Xb5$`$u$j3l&?E9dq_5TfunjX(81LL%<&AsS(OTBh#AA{CZ`roN?K@Nv@5*(l1}ozKdWH_|2+xp| zljAiOwIMD(JppqREzQvj8>KmNh{L{I{56_Szj@Uz48w_UdTL_SR5FE3-evlfW^{IA z5J4LxE{VO=v%kUV1tg?+)|Z4n(|EAnZ*XW3!xsYGXeltPO)=@<*J|G$2dTYn>k}2= zRB5m^@D1p=tmo>bvIDrZIGx|*hOI!Ze+1R*<9C6dA=R47rNTc=8zh7%CzZTc?KJQf zvPDV`Zq#s*fW1@o$o1EwM%lnM4E|Vl_{Y#b_J90iz&Zz67oH2PmUyoEtIu_^wXfN) z$8#C~^>f|u?HfiVptoQ1?R6)&f?9R?&elN|BsULnSfT&=k{;kRbsHK75c#yr7=&ksce9L}5=I?FkLk%( z;yt2l`J#{K7+Db)0?oqev(Q4O_f$#SO$t5CXn!AJ$h9^ojp z^VNf=FB}^;0?#)>oUw5n!jRYORgjG8Aw3caY@LCcsdFLkI&DAoCfCH1XXsiXB11kc z*D#G;BRntf^^jfcTk;xG!&RHk7q}LBkH~cVKKL(g9Km6a(7hT2cNX}teoYX%_QmZX1 zL<-!bl&Gv9fzo^Sa%JoyZ~%QF=)V9XA~UE{2yn`fP9d$rAt(tIG9to6kgq)^Jn*(K z?4sA=(MTlJg`7_N^b7M*dY&rYDQRIx2!H53t6;t$hi8^A^}^Vo)yB{J{D}RhQC`lk z1xcJvm9POh<*4pWRm+jJ8ZJJFzz+1P!f#*Tk;*u)E0t|jt;>O$DOp#!5q$LjUQE(0 zzoKcUgr=Qds*Y1Fwvx{9;mbs1r7%%SkSut;tuWCvzK-coZhfcX`M@{JubQ ztp!2x_e1ly{PM!MGJoOwxp7pl3~K*4QX#AXu-ClByeWNaO5BzrK=%^m>rFQ3dda9R zn;!szI(d|J3l|037ALU_rF*EoA!D3k0D9d+4}7h>WB~%`D;H?6HR4nNotjj9tD~0M zRn7cPTIi?z%94T-B`tIgJP0}>rwRMm4TUq;gDTHwE$D$t zris`q`UO#z*-9K)n7%vHU8sHrNS%QmpeT~SZ=7Ypl!fQ=x99a&=W=a$F2zTor+pW4 zN)MyimO^QBEHDOl0L%9EPC#uk$cpxLS)Zhjxd)gT)Y>~)GvNMq^qSvF4GE5j|sXiHcr81ziS&Hm~uJ2tg zTQA#+oJIYn1y9aV&$zr;uY173#j3Yyq+EwPSltrP?27%Ed`FI70BQEwjR)3O#)PJL zww58|6Ub;lU95L|-xf-peh~2&;IxlEVd6QLV;7CX+5@N*;>kUbUdY%D&F&h?$!y4P06l*QW6 zJSA49|FeA|9v;gL#a=-{A*)4-0UtQ3Z-)piom4PI{0%!zzR|$%Q81sE&Q02!wD3uV zC=(u*qmB%jt{aabEvQ^e008<8Rl}8av*+Zv+fck;ycI~nTpTvzU{gFqc4f;C>UQ4L zaIH7uqQd+uhAZM4!-c^Q8?N1Mvj66W>)MM9*_HkFT09;YAGvnPe5iP}!{Rr$zWK%_ z;ht|Mqu7;8KS9MBbkOQemmjeY=@@sS@FVR`6SY**^ZbS)RVM8ZpZN2Fd4W8n7NyqD ze3D;aQFBmNZ-X9o8%$Kgpoxm+)t>CyoEmYI5Sd$x?lBZr?%cKS4N#1Mk1Puea#z-r zOM-+Hk=*=zpispeF@gwrGQPdk*#~2Gbq&Sx01TwNs6J9^0*;D^d7$Z94uO!gDol0> zx^=G_yDSLOWl5UvZ&-G(MUm%DV7ryMg*jk6oVFhf!IX(Zt z)KaV*nPf$3$i-)l4@coFQnw{Jw=Az5bqFNJ4GQrtO2ey4z(B?acyBq{O=YeI6CLPX6+V%52B1jozB;&Z;lcG# zmyPCSn;CI%+xc^cTJ{3Io}ZSD>J3S&PuM)>>eku&h@}8)Fo$RQyqs09PJ%Nh{*u3uBl)|1wdGsNxAS|S| z+kKtvA-SYt-&nXK$Gw$oq8F=Q7Vlsgm%N+!P4!!7VUu{WehcAb>fE^HGo&14jlomP zmxplN*5?Fra&lD3G40FGp`V)U5~!wk=ud=9S^3`5jzd>BJ4z{1sG3f-n><)HL~GG; zM$knT%btTj`Ws;^Uq)B85HDK?fNugP@!The*&-#lT0LpT4K_=r%V5iLAfWYv9Ma0|u8XzS?%M3S#JVJDUEe}TLfWLw z2T2kV(mVM`l7x``KVNg^9PeG1)aUd0*5~){{eC>pJkOjtb7o#M^P0<RV=LcU z{^owWdZUfD`{Pv=Zq1s4waWw3UK~GW!nj8pBqdIqQ0bEYq;$N{czM?Bg>-`zI=tge z$2;EVjjd_3rY!GEwy6NZJ0r!i;%4nMqFvU%k(GZfyT?S9O}d+qct#&w_uzS4?K@ z44PJ`Ko?y-bu~os1{6Pic{<5>Qe?am#+n5$y>pXm(yjBTD^$#Qc4TrFu}RJb*QJ^( z&%L#`*`cex!6`1xt9g2I=Sg@=ZeRQ_`o1 z>8O~|mo{Wh>ZOPl=4@Q;wvd)jw5X%|NIQ&-RvZE1JF+#J?V)c zyxaEjVy*CEqVdG2Oepzcq~x~-&A8!3FX6>>53N5Qbm_RU zK0AFa%GHx815rrKWLLMeXVSaj!Ly7*)5c>K*r!IFPD7px>XkNl?h6avn3|P?DGfQp zv#DSYK)*ip3oPy_9`tS6`VFrvU)K5EL0C0#srVO2r?5+PLw}B_JG^Dzet2~_f8Okc z!D;iyWDlD7maxt4X&3>ta(p+V|vCCY!EBj z0zksfAufoQ-eYtgmx_-jJbyXRK=&CP*b!Gtj_z``Oe(e?=rSSwtze7~QCs&? z*cRiH#VK{HUi3dkC*Bv-@lrKv31im@b9Dwf=^xW3hV%N&#=3%bf+_}k6CaTeM zO!A|oOeVZEVMfrLsZ(Z8oiT0U0_^W=o_gtOM5ExV|G*ql>OS^!A#79M#T27WshjYK zaYo&g=~&%zpYd7xrP$Nh2QLHG*!uY@JA`>wWvj$T(48kATpnunbUlQ< z0*0NzZkP*)IGd|lyJ6;}zMPuY+YG}drM<_ciY=KJPJI<^<%L)$wvYzEiw;UpOh*s( zCpcL(D?P{BQyO2VMuvP*G&f!u<@q5!?VBjHZqhNcw*n@pD_1M%MMsSoIRf3tqFvM4 zt?5d$UWRoUH3V6$?~=4jjWF2&>)R@y7&l`K>W9&zXoHjmqrQPho~Ty}d6YI}USbxy z&dB&jE}y;p{Asd%B7;hR@V8Rsu$Kn2f0`9yTk0mFx|it!D3oZ^vI{AF$Kl z-{G&aGx#g)d;C>)mi-8Rp8W)V0e^-4jK9h*DlaibnWju*s*3nl+hf)-r2@*D>pWCz$oY8<-8j|6%?Eyph=m{C=|s z__Jnj#?3zF2JpAccVNHSMA*!&=KHYUZhj2D*Zi7srU^Nbv`BVE*e+!g)0#AG*^UKt ze{pa>7T#;{v(K?k{hk>*fDIO|S$;lTmSSqF7EMA}^kZ!s+@CH$=4u5AnO_7ArUVNHb(CPK%Vj_?&@ zB^`fN=7J49S!IU9bXJtz`^+=_hO(jEs}JeUMv7x%_h*JY%aZ%`>Gcdt7so7d%ofLm z{RVdL$CiuZYH{2kj++M#7}lR{A40zE6vw^dcwk8X?t|D7aXc|(NPIPRO2oaSI8uBS zgvpXA9T7^Ln^Hm~Cyo%?=cR>9mpCH#!ew#F#kddh!?Bb&>f&hSaxOTfm;#C13;D;h z1lEW(V{KRm)|K^UgV+d`$WqueHX9#pUW&ZG5pK!-Y-v*d$GJL7nwKrp)f!=g{XI$6 z`8}&dswwwul;#1;EK*;Z(=7KzR#|4zbJl&u_gQ9%ewKZS49hGTV%e9Bm1Z2`8^_8b zKYn8FGe0%=o1d8n%+Jk(<{|U2`GtAJ{L(yX9y5=dUzsP6XBAc!JlPrS4ZhQ$JS^qI_0ov&`;h7qh9^+-zyKHlGH2$b8srhVvG% zX$8AS%r@quW?S|k~@JDE?KolR`&2L6=U1NRmDH-``QAK&yX@NW%Y zz5ngti~a8i|GfWC!FTUJJ%#)wAIT58kKB`+u7&>3m<9jc%z}T*eN)_JxoZit3;wNv zr}1x(e@B?sT~C?akQU|CAJiX^39;)qQ@9VW#8h5|*I<6UHcw!|yb*82LiN`A<19?? zqIYMN^uGG@teQSjAJ6LQsrpOoetnicpFN~6(wDJE^tJkj>E;Y`ra8;#XU;a~nAzrBa~?imzQBCNTxh;(E;3&;7n`q}Ipz{` zskzKtZmuv_nr?HI`G&dLTw|^^-!#{m>&*?uV@5mUaihKQ1h#eTXml_-VcGw4#*;>8 zbAa)b(ZzV$=xPo$o-w)^-HjgRAfu=8tkK)(W%M!nn$Kg$qyEMKW1um}c-|On3^9fp z!;Im^3&sfJMPsBf${1~oF~%C>j6`F+F~OKaMJY&AGz<9-2XuN7HGF~$l8?PHV#u8(xvCLR*tT0v@ zZex}4hOydMW2`maG}amGjSa?I#zy09W0UcYvDw&SY&G6Bwi)jkxyJj(cH;wMhw-72 zXMAMrH2!JqGIkq#jE{}I#wW%;<5Oe5@tJYJ_}n;X95N0YUl>P>FO8$dG2^)Lm2tvo zZ9HPMF&;JAqU3+Z8nXvj6ZRnLg@=$&9%jv13)YggVy)RD(0z}xw(K$1jy=xWvnQYl zJF-sfN%jgf;HzvAdyOq-ud^Js1p06pTaK2&O6F#(&^B1j*08ngO|}l7x8DFw`8M0c z-eH^B7PghWi&n!2>_fJjeat>#hu9bF7}^Lwu`BGlqAHqVDn*sj%DqZC#a}6}1S=Jl zFeO5%s#H_zDfN|x$|XLWzraWE7x_p&ijU@F_*g!UC-U)p0-wl}_#~dpC-W3Og{Sf~ zp3bN84E_?I#xr>qpU!9SnS2(1na}2Pcs8HQ=kfV`0e^)r zevY5#7x+bfnP1`8_;rnGs-|m(c8^w6E3TE)N^AFO=4zr_ZW&gr{RTPCQhEhZ+rIb<1Dt=0U5~PGEp-Q+Cr^G7>N?qkX zr2%isAL7mU!@N0f!CUfHyfuG>x8aZSw)`>Pjz7-Z^Cx%*-jR3WPx8+EDc*%Y&Aakv zcsJgi_u$X+p1c?D&HM1a{5jr__vZunKt6~+&j<4%d?+8rbNEueoUi1o_-ekEuj3o| zM!tz}=3DtTp3ArM-TY(z3ICLT#y{tW_!s<3evE&`zvkcYZ~4FYcl>+)1OJi##DB)y z)?fHl{wvSd6iw4ynyD4hifJXZQd$|UtmdQnX#rZG7Nmt}p<1{WsYPqCT4gOxi`S}a zHMQDWJ=ZMPQlw5e+PkG#S>}hge+E+Szj*Zfi@s_Jrj7W^VVsP#@hz6I6>VJU%F z)r1bGaG^J*evIoTJqLP&T(&ab#(k~eUd0)Osu=aQ@Lcr8@D%bH8U}iwd?D8l6kPXh?ulTY`d^-tS9TssAn^rjbvkyx08{dGmwX8A?GeYosolj zVioF!4X6>eA`kCiJCTF;u>&FppI|4^k30+6Dz4Xo9HtnjfPVqs0jGiQfwRC50NTa| z`j^Hv;8!3YxQ@OU0~CPc9DW%;0Y3xie;Stnw9SoQ0JLKaw8~5k&;bK*0q7r@_W(t4 z9er8jB5)G;7IrF71hI}5dBQHiR>`GuYnN9ltMi?j(yAdTJ?(mt>clOO!22k_pXg2X zhtLNe$r}C}GP@}2P!~#n36y>xQT9WXNF^3!zq(QzHE2VnvC>p&uC!L#D(#g{N*AS@ z(o^ZH3{VCu!x6*3sI<)zka|g=6i^x{1KbOsESlv2AHWyz1N?yipga%=Q~-j2U;riG ztO$exVL&(#ft+6kx_Bb`X(`CBn$W)nQgSAGn3Rsik&^S#Q(lNRPbsu^mP3zFTKXb2 z*E4_7zNvuJ+`@uIiiS{{qE%0+8m{N*AG1jPpni8JD`Py?uFS4A49jDH#58K;f! zjkCrN#yR6hoHu?lel{)`myFBCFUA$)s;QZ}X_zk4H19Es8fT1) z#!2H_Qx*9!(y2}U6M0dEjxt@ZyK-FL8Ue0<8Jg>~fzko30!_#Rc?BNIBX~9RV-%G3 zGAtIltU6*=4>`CoG*NT3Alnww3K(~Rv}*yifjU3}P#35N+y~SL8UPIeF$M$kexNb% z0MG<@5Kk-v&GRJcq#me&o|iSwaI^-;qWw1oEj`iBgS%oP#EOgbq~dxQM;Ek{!$tp0 z!H7wK=*iP~O;@D4_jpaD7_X@<#%r1)XW)nN5e0_Lp<|Gq*2oWCyY2aN&ki2I0>m+- z|1*Q1W0C!z>;D|u2+&sKFOFiQ0KO3?Of~p#gnT!v2Q)+LqXiqto&fKVp9j62z$U{@7BC%{0n7wu z0keVmaJL0+-UW8xJP+6j>;jGeU&8(<%wsT*!~6>7&upMlEZ?J)07?O6fSUPkrB;5P zQX3{(KnlX6WWqiNSc3D_xMv;sdho3<;lHv6I18MI+n->6i4D|V%=c(hfeex`B{ZZQr z=dfSY>Ud6)IuloQ^*J0Ts3|zQ)N!~gMIA3V-Rn{Zc-R}8fiLko+c@6FJ{U~C((1K0)Z0}cX50S|B*I0sw=u90d1sPTl+Ql%^ay{Lo$ zkpT3RQXNPD8Ujs#=0F?BX^*lE@oOlKIvv>+ar}jjO41Gc{0V})=y?6QIHugN$rtto zj+(eC{LB}}>%S`dC}orn@Wf(tynY042#_-3=(_8p?&b3N8@$HaYI#hraT)26;Dh)w z=9A!q_zKp<2Qe0`jZb^YI{S3<=`HLA_zabHBYhGjOZG{ZEX!xM&jR8;i+z?$yVX7$ zB-`whD_Ne;9?ACm9G2|3&q<#%#C^{DT$Xm%eM#fv=Uc>=Mzo3f_y$TA>KiRtoNrCv zdg5v$-=@;8rEgowI{0>xtcP!3$p-ljmu$4}1m6_j4Br{P*}elg1=8?3%xW4~s8t^L~h zb@J=#*VC^b><0Ud@Ec3d@=Nkd^_%853v8a>BEO}6$iaT={5JV*^F!YC+vT^<@1WmN zKabyOzjJ;U{g7AvRe#gJggBmEQoll{~EvtT#de}VsEf24x{YX1%XoBbhs|2+Ra{`>tQasT7~C;iX(p9j0_ ze?348C}K$?pj3d5WPt&pl0^r^NmesJNSU#KMgdaJ0WAZhoMGERy6Y0qL$baBgCrXs zFj}$+0V$GY1k8{uJ7A$?IRPsrTN{8>pkKh&fbEj)3_yC&FW^AH5y?&joRaKpzy-;! zl(z^g?<>Mu*UB13#jmtNatabTzl65NIRkEJtk*f47Ke+q| zi-C(2hM818)v_x;tvphie&y$tUnJSm@pvmvU(L7N!GMNOUc?+=wLC%Ds*vJ z4|%n3g+Y=HuP|D&2^CT#%cw9zvg`^ACCjO>QnIxbHcGa&!gk4aR@f`qfeJ??J5k}3 zWM?Z}knBp3MOct4sFIXHJtXWWN$=U^VlB{b` zPs#cP4YnBW86oY)1|>;jN)`|tB3Wc`Wyz`sCrH*XxQS%VgWE{fKDe`F-GX~dHXwMY zWFvzUB})!YmnLX%qtO7}7GNZAga@C)b7akiPT{86?^8 zkkOJ&2uYDFBV>kT*&z!h%L!R2+1ikel5GvyF4@kIy^RScIbwqm?wwJX+_tZ~I=lC`ebPO?rFyGquxVn4|SR~#YP*osM# zrB<9K*{q85BwJK*sbs4vu9Iw2#ch)9sJKhAeH9N%cC?~LveOmMNp`W~HH(F+p{8Ud zLd!}P5E>#`WN2l{s)r^>)-be*WX(g{NY*~Ivt->udrLMTbf{z_LlY%S4o#OVD|EJG z3qlu5wmfvTWE(;^OO_j&C)u9R{gNFHJucbF&@+;q553G-==CrytVmcX#=?BU0>eVX zqQT7=Itp(c{ zwl!>f*iNv$VF$vFgq;986?Qi4Lf93CLE~^&c(HJ3pK!nMpz!c;XrJ)-@Y><^!yAJ& z3vV6XE*zRBylZ&R@P6UYFX1D?$A%|`r-DrjpA|kYd=c2v@KxdK!Z(3!3*QmGD|{c= z!SJKup77IP=fW?BUyD!~i!dWfM3jvP01JtTjHnz@9V{WDVT4GHh~^P(BHBkF6(YJt z^o|%10og~4j7W?~j)24?vLa?jEQo-dBbG<3j@S?Z=|<#6fJ`F}M;wnh839Q~ zoR7F1aXnIFEV4*ssYsv5K(Nrr=*YOpnqc)J8$~vaYzfvjvO{E-$R1#QBL_tekAw^( zCq$-1W<<^a%Z^+anG?AZY;EMm$gPpv!FEROjXV%}1nflQsmQaD7r?GWu_#wmF~*|G zMEOMpMTLXKM#V?fj;asVII3Ax>!@~Mouax%^^EEVHaKcT)Yzybu+*q&QM01vfh~$! z8nr5F9oVL*ZBaX-c7g4SIv8~{$^&*f>Ri;tsB4TxtI=k3iRiLm0ns7Rk$S4VG%-VBx-ofo|) zdOz6V=;P5RqtAeykG>pzJw{_JrbtYw7@wFxu+W(3n7EjlVD(}e#Wam+3D!2ILrj;L z9$ZTYjoA*iGiGnhftVv;Ct^;;oQ=5v zb|sd@x?+nl7F#CPFE%JP94t0AK33FyvGrpc$2N;aofq3Kwo`1^Sk!c}{bC2lj))x# zmK2*BJ1rJ1t=M_7i(;3?%Jx<4x>)2y`o(UG-C;5CT@FL;w0^M%9d^`V9)}_CTEEzH z4!h{EYc{KdoNfIoNj=0^r4o)^S%)FtTfa&n4vTbHWrv|uSiedM4r}PJCJt-vur>~B z@377e>*g@2*ATt|j@?j)jdWO|!;&4A?yxL}&34!Vhb?y4a)+&U*anAfc37^%q@G0h z_BeL?9d_7Z#~pUkVP_n6-eH#=cHL%`HHQ^(SSg43I4sa%p$>yCwtki499Gj|^&Hm7 zVND&@(qU~K*1=(29M;2OeH}K)VZ$9Z+F=tMmg2Ashs|(Uw!;=WEXQFh9k$kC8y&XQ zVcQ+H(_woZcEDjr9CpHCryO?HVHX^B#b#A(!m7AzTBVr7$~er=VpW2wgi97%CEmGO z+hO$`*4Saq9M;-l?JQQMQ6H zS~Xg-xT-ZJt5>y=WKFBKv>0Pm+d8a+!<@2TwTHZ;Z`DDP4X--daW}zXDGtkU*bIwR z&91sovYe_bovUjdw$WiunXbCsv2)6F)xD0LQ>LpPaqLbw?3BaKI_!eQs$PkgR`IU* zV)oVeG7j^zSbR`?xMZ>M@|O78@%5!$iNAY*YL;$#%r=l5Ai6LCKEBdn|@$oOakbhh22oHJepa zEmqB}Rzk9})dD07sU}NawaV41J9i{Ftf9l2IIOwFsVUqT9##3ZFaQ?j>2W4;E96Q6uhS3l!Fyzr{G?` zDU=T|R&O0|x+^Ik?_}|Ig<|CGTO7H^rsZT$lEqPQZ{I{{rA!1D367Ty!sQF%E=yM4 z;qiV-U*YD02*U-uF;T=D3>@k0fLe*{)kmf<0L%o!s;3&cW%N4&u)C|eQc zy23ZFP_gr5&&~@eO%`q@i+G9L&Nc}9RAHYg>^~I#zbEYNd_`&csjyE~@g_|ny;Cq{ zy~12TCMLPWbRWb;-A7MR_lda6+$ik5LQC(r9QOt6MFPVd{9)UN6vFMAPwk|*-gMA#1( z?uT>oU#2fd7@S;z8AoiLNV|0$@8=!+=WlS4>tszz{xG43*&5Z#InY!Ev|@lr%>WS# zy98lIfRkHDGPXUHVmlX+WNiBzsU~uu8s$}o&K5q`6!EIbQ64cHn99xu%HeihOzGm- zkAS_bmqmW~P`H0jufnBQAEUK%|{~u9^%t-f3!=7jJvgKi={=AmV#KlrP)9FXCm}&+&3k za_tJ!Z$09FYtZ{ zgI)G1jn;{{tm9L`?REmCubp!+uf)kwlrDA}!M!6}xW`%x$}{hayd-NPQFBRst70CX zQ`Ur^vP_}ODv2W2i4+D!wm@j+i&HKrJhOzG>6ZSH_x6Fk6|dpEP+Hryp|H2ZFwk)? zYFI}Oq{nRe6xM<5#dAd+>gWw}Z`&s#oVGq8Jz&cd;c?`Nyyw^#(g}{73zbmEzHo`Q z^Beg;gThv*oRa^x{T<4Jla7VUjwOfTA}wtFhO}_x^Dz9k^AG7jJD&(WDZ+DwW<@#a zLS@!2Q)s2wX)5xGcdkAq?mb2JYGq1awX$f-ofr0^?dNSzvxMMtf^c6-xGyDIWfg>d z1;M>rS=)p^R-4ew74Aj6lcStZmRT$O z@s{v8;YP$u^^zp}f$XhTw_1z1T1&(|ocvcR3V*!ZV;&}jS+sAx{ZYg!W7&T8daiU| zs&IH{P8j?Npl22EN7jQP40Xh_>WFwP6z*FIn^wXmm26ll`HY#N6sj`9eu}W4BJ6X7 zeU7j{DC~u_l}W;WlCa+)>~|1Xe1yG^u%AX8EkFvP=(VY$*QWRiH@;+FpqG?Q{wUe> z+=BMPy=5OsoJWdh$*>ic47(!SmoIFuc(oAJFmj`Y34cn9=av@n^)72x3qj1CQ$H8( zKNs#t3-_ai|J{XscVQnd?5o{uFTxWq+@uJeLL4g&Nd8t`a0~mJ+@GO)WiPmZdttv$ zgx}kK&u#3zpX;3;mWuE!72)@zdsROX20sxmzdPA0-u=oC#B*~+c)a~zBJ7t4KW}G0 zklZK(g?zRP_aEHkk4SeW@??R0xL>&6FWmPL{`3+42<^c{f7V-5*(FHC%e%yt6Y(u4 z{K*&o+RkU?hW#@Kt3!Z?8}JfdTBgGSQ_$s3Xivjls$A;VSA^YQ^>u1?qzb%?;*MId&Ir6?h~yq%uYqD z6|$$9u5=Xf^$w5KPPj<+YBS-!nefNbsGW!xkehe;?IO~vi|~J`xYt|qTwLYii6097 zp>TOZghWWpJBL^!DzYx~4&fEy{w9f8l2I&)1q%NI#k0IPTW7Ue>Bx@IRrZ;16DxQ- z!A0%Bz7sZ=g-uIwe3ExV8i{dBYpl=mjja>**4W(>!hdU=(;8{a6*leZsJ0Wlia1(r zW@}_}hp@3mMJ#*oqvelyt{hh!A>6+rj$^!h6L+cA$;LV=YsJ+Q;%J4ffwc%@Sdr&SPM2 zk0(@tz1^;+@YwBYipy{*Pf9i04awGaLw#p2H?mw7YC6HtCGH*0N!s>vX%yE^DH{E-?FV7y%%dz6E9@CoBR+{WbctAFUlS{h zUc$@|l}T@7wOco=cH6^xVZF?k>;-llYuYAa zz05aQrT-mPYEQ;~6+dF_%>}G;OQS{wn~FUv%CQW^PpQD>C?QHk_KFg>J$bXr&5U zgmw4T*ixmY@(;Egt8bek|389tGjAzfm1o%7%0Oihdq;TzX}Lw2s?1~_i?5uKD`GK8N z&MQBmcDRHUa~IT7YB%Q_>h zY0qmZ$_gz_Td2IRy{2te4r%XdKPx}!#q{E8tX@hltyb29^onYno}kxNtJAt#wWi)u zZ>7>&+M;re6zvpNTI>;4PV5dAAoc}|6+42}5^JYwi(SC#iv7ImiQT&zihaA9U~dym z=?3%$1^`2Wkw7Al45R~CW2wvr766NZ<-lrS1F#v$Wn9Sv_5k~V!@zOiBya{e4_pSW zV}CLYC<2rMe6YSN5N0S44a5O8fqFnApefK2XbW@zx&YW?Kg$PIM%sx7f=i+ z1NZ?!KsXQ!!~?Yf>|VtiGtQdwEIo(poe+vh4tA8XWUG zKNrW2`9I*^e_?I8 za(xbISu6>M%bQofVf7*}44|Jn6(2tUQeJ{}YNxHx_#j;MKX z^-HGoPhuK!{yTc2vCr&RWKWSZMQ0iNie4^WqC`uxpV_a(iIQDQ$CXYjlU-(SnWOh= z_ZGSLI8dbQ>C%bi&iKUnwDD=2jD4jODa_^05GwAhxRZRYxHt4>4RM}tqsOp7@g+bH&ARC5A7t#aqe11cv~K1ryOUL`%cXLQfFE^(Wqn#Z@T zd@_DxwUgnysxOA0IMUx@bcD^;>KAI1s8O~?XpQc-J|-Jx}piPud}I8}F3T~FQf_2TPwsW(sH zz$z{}eT(kfTt7&-Bey{PcJ;f|?~yROP6&{iu*pXKvkgiK4tw&m zK~RJE2K5^BXfQJ26oIbQifgdHwpus6UYCZE1cY`t`AK$U+ULX@)~_FAgZv@?8w62^ z8s;==*=R8R@2`1(bNf$ebN|K%QiD%Eu-f`J32KtiB-h4+E3LnM)iNJE-1H3Lp*_~W z*~W)kdxKIpy640FT4b~YT4%M$XtTLRM%&XZG9C|Xk?};^78xDWJEnIk^5nM8sZX_O zk@4iVr;@vj{ypfDCGsBSzq>@2gs@ znil#O^3S=-x~Fu{?!K=3R{Yo5&#=zmtBQ_t-^ zkM=}P>&|L;dT#1f1plO7J9_u%y{Gpz{K?$Yrzy;9y>a#u zcB-IVDzHO^*qNd@Oxm5IBtSb^2bwGaP0F%vhi@5C_BqHS;sElZD&b z$pWUmlSLM;VFwMdqXq3`LAzMI3w!_|++s%ygj4Klfv_t3fdc^TaPb2mcDcxRtKIWG z>a##^>~cZ7U5o@q17m?iU;>Z?Bm*e`?S}Co_G^G#w2xpyF4{j~LN3~Fn0sL2c^YJ+ zK{j&Nj7+&}MkaR6$i%K0ZtR+oi9H|O*fk>)yJontYept^&2VGaj7;pB;kI_ofO!sn z{0I}_)gd=)_Y9;@BnyRv>i`KrU7#L-5VP?BLd_-u2sxVsAoR!$9<~k01-3&1H38&6 zNYtYsR>~q^GXOUV+$bl3^LYC6A{HLRz=P0x5ONPf?Lmk=2(3r^8u$kIR)h}wm>^`C zH|6)n2y+wQK>#~|ux3DO;1Qq=@F?&Y&<ojv4`wdR?EvLH2)nivSPrZNRspMlwZJ-H0|41-n}E&0R$v>D z3v35=0DFObz8{9Nj~DXE#842D(MalVo-yjvmC(gE)E+M-SpiW!l617;;B}8el$BDik1XNSd!U zOwxGgAqCPpq;W{wkfx#icxW#k(kP@&NRyBjAq|2QCryF9MvLQE5`eF)F`n5JAekVY zQJ&l=Pi~YaH_DS6<;jimwRuouLF-~Su0UwxK(IjJdmET-0sAQ(<&&1cZYi(~SPrZJ z)&g$=>wxvZ24Eu~Lxp!Zf$SsTpTKV5Q{Xe;bKns01!~9QKuMr9a4%rTBn$jq-~#~u zD7atw82AA|EYNuv)Vjh0e~77d-tJDg&L4{qjO{5NEJIE76C$lhO9M4Xj{8l z04;$>fHtsKAooD*35+|%{(KL>AJXyd0Xr2)JG~8T0^R{OBRrLWDnM1B8c-9t*_Ix{ zhnAKHEiI8Jgzg=RFb)Ss0Hk}VhX9=+dIo!8?gOB+Md(Rth=IsQ)_!`_zNQutwXvyH zMD1*98BtrCT1S=?p^FRTuf|TSsE%&}GzHoKcdE;m;QCVFZt8QY%Td>gI{ZVJ9|6=K zM4fwAbvjASuG3$GwCuW^>T&q%^xNz@{9D*jTWT;c5=aJ80GZ}u_d${rNr@x`>4>_$ z(Vt$@3YReN()E8)qDtLSYaNCR3%5nR%Oznn`onwlN=W; z^ef?U3DUE+uDw+VNblOZ*VeyM2j8TJZCyv+4K_l`By_XLtXf$Dv`?ziiIyA5Es1MIed-5#*p1a`Z?ZX4L`1G|mj zm9C<#P`JHtt2TpdH{8^Au(jJ?sSWYqpRV)o)Vlh!T2>F-)FZl8FPGX14+6G^YYR># z_Rk!1Mr*0S81o^V)3`EPL!$Rw8fF>bUZ8L(3t5r}3>!NE`9KX^ewvB8(bgr1P#u^u1I3FE^TR)ghvl zD#lS%r{_}&qX)l#EI$|bY=>MDU@D^4wR$)u3)H|=mhD>CuJ7N5on6CH4@cIf)O&$X zu_ytmGs3E(FBOP>R3N^;Vb|urce9paCwx1d0I7faU;3sf<&=zkt)gcfgr^kBNIs4xd9Y!!%Gw zTTsVHn~svJqm`_e1AGC0pgd3!2m>O3C?E!?1XKa40@Z*TKrNsS@Gzd-0%(P&eaiyz zO`SmFGVluv#J6+;@hzP|d`BnHGysIwEQ*k{|Btjr{+@3j-oAOaDlj%;_oD6ERQ9gy zekjH!sRlg_;O!*Fl0%Umb$|q*E>I6h1jYjsfQdj7FbPNo<^$gKP)*d(wXJ%d-qSRj z9e&a{yBr$5?t_9u%)&c8*4Zuc#TBmuyPJsFVM*y_>#XOSYkU&X*+Wyq;r#1z( z7;|vFveV*ejI+O>{ug7$UgJ2doDwhPQ5FP6)oP_h;w1JM^%f z{P&cT>(B!f?*RW-dc2hD|0?}4ydM#B4(`w=E1(fIZ+%CzG4N$SFAZqV&%kVl|F9;s zbEiFH=`WB=3u#90H)~EVt$A0u^l{{f-#Z#`C;eFrCAkDp3Mc~<9yf`FU1h)?Kk=S@ zLOB5=61RE_F7%G?e_ET_@6?3v&)${1NbCRA#@oEL;3@W)>s#vQ-SxUL(1Wa){{s&N zqTpW)&Y>%L6#)8@R|D!|4Uxflz8g<;^AbSG{9(Kl%rg1Oyj=b;@q|%oKEmY6Pj-z# zc=qOxavjAuj*5GZk$d!QujRX8@5a6g@W;6_539^@g%{7y6IaPyN3f3K4vL*e_&*H( z4}<^1;Quh#4uk)eugN$|W_KSKUBo&9F^kGSjaZ(hSRzz$tO2Xe(U<1s0g^-w0R0n= z{*B%mcm!w*JO-eDuA?T?Q-L%f19%BQyHbA*K6Vk{ir+r-=}jybm64ZH9n0x?V z0I}6Dc4wjwBVt`2W&@xh&#dl5^w zS1jE!B|S1FJu)RdNI?&!o=7p28ME@yfE=PV2AZHoDk|=L2QfPXmvk5I&=_2)(ly2{vRe3%AuT@wl@(fKsnvUNiKKTj-k-LLP7786=Y}cxGX_NLgsr@CDf#0KRNXce4_| zJC1hz?ve5H6pEh*@uPTo5HFIUol;v6vcjQ~sp_3t2%!fd^dN*D$Z$2Ju|`TCYZ)MM zONI(elH>iDsgDr3-iO3pa4A9qDO;gH9>SG8<|m|U)d`tM<4mE&sZJnGPW1w5bE+G1 zVN(6D9X{Lxzsmqt-H_?j4W#XC``3cbFn|DbLHCDprRqb))A+(sL6Ln}+!4SWpzfbhKt zOvSV6ITE4zo9gJM0EK8F@G7tfz?edT)rx4p{jaTXU`2n=s!(Xz;z$v+-{nZ@4Y?%3 z4n1==0hkD&hyMSE6`p9v{Eyc>zqQs1Wx+0$@yOrk|A`z^8)hB!BocsnNbAROPNkH3 zGSh(>0D8e}7BCx_1E4=!U_L3v)o%L+z8Fem37`~E2EfR?5(YCIW(3SzwMnQwVz)&Q zhFiTsDzGyHTBr2R$0_xH&9$b6(14vFE3}sjuM1=;f7jVabvZW=^ECedx&Og7^sjP( z?Z6HIWBOS6C-y#Ib^faJ6#vM3o5Cyf*p$EX|KIulxAy*Esek8^zjMiN^uKe<-?`;(a|`V}#ist9WB$%DzbD7gUS%xf?_BeDuKB&WhIY97 zlXt4>k9_+-xf1Qqm{Exx5^481_Z@df%)s?k0NxX8Yk_sZ24Eww3D^wWVKhKHfa_@I zQtO54qXIi2dT@?is#tYrl$S=o?`GtCDcs?m6;yJ_Y{_ z_#8L{e1Q?W;sCvoEDhWXWB?dZR4~&1yY}!&#;9Hj{H*4T<3E5i8l!9vJOR*K1sYSs z9E1X^lQE}8sRC35ssWfkf>EV^f6tH4Alm~7)#oq|!aM}?Fw8Gt9)Wp>{g*H!)eh|~ zMgWB`2UyLrDJSZhrIOaKn@0A{k73{U_SGxj*3f$M+^ zFaXS7#acwni|}5-=&VpIxP#Z=zqJx_HRjVOp8%f%$ejvurh?M1{I~Yc`!}_cb{Zvx zX2sClN6aljPIj~ZxAskHqF}BRN~;@dhTYoFz(wE^a2ddiQmh|#Yna80HN08HQ>0Hs7nDS^t(~=-6-{Llz2BvyBj6ljZ*GL33sD(yY;%jeWDb*jZ-lH1t7h!X4#E3 z%Wea2S`5tWFc2>T@iGuEly)~tyBnpQO0yfK*^ScdMrn4N2(O9ovMQvzAPEno;DH1@ zh`k3f_aN3D#Mpz_dJt0&V(CE)J&4_J>pIeNq~l1xk!~ZsMmml38R;_8W2D1Ke?fO4 z^d5xXgV1{rdJjThKvxykMZYmKzu??@S_yFLdGw_)7k)pF1rRRGiQmtw0?@O(PX2y< z7BCyg2Id0ufcd~?U`zgfSH=AO_|6Ob#>{?eO#;m)5l?e$XvY3N%qOEOG{>LT2xQ8+ z|3zSrw+O=Cn)^?_-B{sZ`_|FRw~mf)Hg4GT;&S>q!BR9QIwB94S#e2kNc%#|^ zuAacNJ77LyC!9}4_|OWt<$Fcimu3sxybJ7rn>=7AunRz~h#htk#H!w7Fi|^Vx19uh z+i9TKP22nZqE*l5S#z3Lg7Y3R2leL?oUg_`Xw6`Uo&>Q=HvAX+;hu$w+>LKCCE$xp z1MPQ-*u^J7qdk+Uy+!-nQk&}#a2WF&tvWwX*7{+nrEPm&^o~!TaVTV&%a;H(sYYinnA|n`Cl9DflfeI;Mx3TMo*v@c<=nL z&4Iu;U?PwPOa-RpFEcY?X62`u(}9^de+B2uffc|?zzwVdRs(B*wZNOeI$%To*Q}KB z7@p7$XJddYU^*}ZmU`hTGV=1r#6M%GJ7BClB2rL4Y084>4fp>vzzCDHN!Fu-lT75zev{-9WU)d%Ktz%bwt z@FRfwpHn-;2k=FI1N|EoEa#xxGp_&pYnK|JT_o2nVZFc~3^%RNK^tWXVl)+444{R> zF925n^t^C&B-m&m4VVcW2GH)IxMOY5aGZ@qyW$V7dtrvzDnHL`2cV8OJ0ezC`DJ#7 z`82gTaODTGW!!9)GmG0?At_3v!h%>lB3xxhSNKCl3|jUL96C$S2w-VA)Lz)&SePaGK6y(!nq9LT!vMr?_fS<1dGCK%gU?@W=K~< zZwdQRqy1ACbGjPiJDE+eKlnq~kMLoJovYCm>`^{e_A2|7&y>%VWAtTB z`mQE@Ra3dB`l!BYh+0Lhq28xHpf*w4s86b0)m~~J^+k1*I$Hfvy{cYQudDf-bDg`m z$&2t}yf`m`?{1djWq3L6k1uV;@G3lx*W|T%GyX7d$y@P9cpKi9x5KwJJMzx>wq_6B zllSI*cwhb;AH)aqA$%ww&R^g!^3nL_W)e^4lX(iC!c%z~pT@KJbUuU6!M8VG;S2d9 zzL@9mCG-tWzLLB7D*gsv&DZd?{7t@rzr{E5x9Q8Ad^6v|xAJ%SHvS&ZtE<)5 z8fcBRCfbAe7U*DYs5V?1p^el=X=Ai;+IVfEmaL`V`=IGshBi&h(q?G0w3oFxTDCS< zo2Sj!7HF?%uj0$0uWL)S<=RSZm9|=2tF6=4Ya6t;w2j)^+B@17d{gv8El>MM+o}Ci z+okQ+_Go*xecFEQGwp!(xpq)Hq#f42(2i(dYDcwW+HvhG?S%HV_Ko(fc1rs}JE#4q zo!2gCKWi7YOWI}a7ww96RlBDBs$JLeb*3x2s&ie_b={>Kx~boz7txFArS%ZKq8_S; z>EU{W9;rv^(Rz#?t5?!1>s9nPy{aCsSJSKOHT0T#Exop0hrVs9H`5>1o9iv;3#WQ} z{RzE;-cj$QKdE=tpVCL_qx7-*IDGYRyq>D3=^6S<`ZPUL&(f#sGxVAI0{si1C6k${1seGZKyQ#sp)sF~vwV(u|jkX~uM8hB3={ z*_dO@HRc-&jD^Ol#vp)K#R~=W-K>W7^{pojMc^(<4t3|vB7xD*l27r-Z3^C zTa9;(ZN_`X2gVNLLnF`l*w|})YV0>YGY%VH7)OjRjibgfv9&Nb(m^UVe3a&v{b(sY}v%(w7WQHFgJ@!Rphvk}64deIQee7%Krg{i%V z_80^^zdl3zb!RNB17qgL>aX}-DSql|2f6C^td4+f_dUL8Z-J=~p{3M^%vBDwbx*GQMTq%3yYq(u6!$@{? zjlzR8fg5rs(gAKLoVnx{zvcEdx*wd{p0;SOI^=3HOZLJN`-krGIkYgb~Y2eIsPp3p*VS6Ctg$yXCu_5uzwG-p!=4hg^4mx?s0xd__=}ki)(9fot|N1 z1@nddZjvS4&*$J8<;h%@hc7aJDDK&SYb)3q*!f@`pA{yu>kp7z`U9^cZgg)B@E-6s zp1lU<8iuq-TD$?iLWGCnAo4W6;4SQM?nAN_VIY5Rfbk{54TYDUOYzwaPJUW^KAxKf zH_1qQ%VaQpiB>2j>juOpAs;v@?Nq zUX)K|3Fb8?_H3SCDXYu^AHbPA1%cB4f#=8Ws7u0 z9+By5ncuJ`DxnFRfM+n`@OL-z$!^L&;FLG4JhRKXF5UV-&rtgBM)?x?0OtSK-q}Fe zRaJNV+y6j8Bfm%k1*yoTu^6>+#t_ zAM@`5pIn}Jh)!!JYY((JP2gklSYx8g`h7=hE^Du%&-m5yRKIFH3;G8>@t3^}mBqi% zXM*Xl-hEp9?RlBIf%(zAs?A0GqECcxN=HF_SLN~e9^jkl+3_53xw0cJ^W1IEPucT^ z%1rh3PCdi86B2h;hso8#J4lCTlA*1IoK@zh1+AYu8$_uw=-Sio{hgyDaCgUK9O#WPD(F{ zK9+8x{z*QM^1A3ubZ?6vAhxiVm@U0c_VRo)Jcs;;b!+zBfV~1}kGh+(V1t{Ot?{td ze|LBz{<;C#8=_{DyTQkO=5jAO?$w-{t~KSXPHm#o+#A+&R#z~ErtIA^>)-pjACc{@m_?{Y8n0gK~4=0jt9gpZ9zXZb^+EPkRf(7v&bIjLzM zscs}bBlPpS4e=}TEU?cq$5mqUFWoGe9NduD&hRz4v<8GUkYCMT3NU&yt& zkbLG2dLLU=7WnlR&S86@RXj5tEv|W91Bqhx8;yt8J>!+}6usx`@z5CQZ!$8S3iKoK z(VUOO$9zG$7y64X$Q?_b^{=%<4X?VqLqEfx9@9yAv2m6_ap30Nd{hYcd<9X>hw$IIF zkDJ4DXq%J1Exy6%wEwYB^ZfnvT4>%uAK{N`U$ptekA(x|{uumnt9yq0?NQ~7$)|~R z>)bkMHi1`zZ^0&8$$_`lYlm(tbX&m>gP-6$^Dz8`uLrM5{))CM^8PyO>J0d!jGt_5 z@{$XX{|TG>E%5n1_1BX-uF;$%AF}!?z4Csqb?cH7hV%`3*j$pkOAcpglS6#oR9l9A zHTAcsA2{UvA%_lU?4kb1Lw~c6`oYi6_QZ;Hj92{EA%?mGp)|D zUVrGIUk%?|Y;IqToSO}{G50zT-in{x8qUoJfN$k~;Xvq(?jZQv{RMhd$q^s+fLjlO zUlpHve58x20s+HV1%W!?L+_#F72ggrOuEcl`285NyFq?gZ=z1>1izL(tD zLzw@7t#85SKhIgB1qgPROrDc30+V?T{X)C&SJAU%x`sLNb5z^6c20-RufxVz@H_8M z(leviVdB}1sNHw_5k(i%j?z8^YOWIrjzRI9gFU>^{z#_gt<^$ z;%=NzVm}J69}7pPq;Ji2QQvQHZ@$y*GeAc4FN}$E5%u;C#QLb8%ICTxqkna6_8udQ zC-_St*ERXn%cgekuoJ8t{Tj<{tUme3{{PBtm5o#HG4kThS90u6bXwaeajSZr9Atx=6RjBmMlp3dmD)}q~h+_?^# z1N=PN!i(NaC-*C;0V`+sKhWE}2^+_MqR*c})8y$~ZCOOMbv5Pq&cQ)oe|#r*B7#hQ zkfk}1o&nUJ$sgd88aaf10S9`~W;}AAQ(XYH#cP1&`PgjCS^oiYE0ah0KLvq*4K^9Z zOZ>7i2xok1@fE5s(qSpt6o`1c5|y}>nGv0x^2Wu=GickTcJ7p36K~~S2}ou|^OCdp z{Kr11Y9QWjb+$hA900ljd&Vb=Sp9Rq7Cr6M-7Xr#MS9<6GUEu^Y8a>~JKYxztE=ua zQhSk4@wuAz+wczj^<3l|>Wg)v8S3Nt_SEzX^xwr+;aNEF7hlhN06avqCFi5MA{6xs z^u_j2C%R&Nb-y69n#R))=@fMHxVFAtS@pp#de(ej1*`$|UT3|3|EJt@EUVg@=>>6!P0cD~|0FIeDZviRcw(m~J4_dFhI3p81*AJ!iF2D&gFUw;qu z;&~6vH>+=Q?%u{(IHtBG5R zL(da#I1u{$GYuh*ZAX8>mvVWxSH(ZIiMBv5IeE-1)LY$r_o`ZYm522SJk_RLi2LAV z_2AUxg2BU*iwEQ6uYnr}XD2HLCy~oI1KC>7!sW^fMj)msomYB-FJ~-Cjv11WbwU4hVz0kr~ zRy*T`3V6By@6ZD08}GUH{}TT{#QB=xet6(Y$iMpd&+*B;>c5Hn%kLxqy&HDUVD)6>-TR zk2mlNSS0@WKP3{IL!v|KNUJwCzXKIJ6(eM^ezfP>{Ao1W(b|S#flLH|kG*zyh=fq#2d-PD>B`HQy~OH9bGszkm7g?el4`HS{e z9&4sXp9%GDb^T)qpH$UOpLmaJjOZ1$yR)KQ?t{rQT;1`cL4K&ZlI&^yfCZlwtmQghhUL#^svcU$osCD@fLR!ch?K!=Uf+` zttrRX@h?#}Q{L&y(Ixz=u#uaKPUZM=>ObR-Rz2l~&~_PqjD9_oAAk>SKx;$dTEJ6* z3H`nB5p8s{z_Y*|(Xq&7ER<)Wdzs%JdpMp%J&y7Zy06!E?W!g3V9&aP*DO-(zOQBx=EIpGWm>=O#&al}@ddgm>^4?hXZcCqwE=g1HPqOkp zkCA2V_~SXqKGn_B8j+te4`s&J5x4R^8`g_#R)ufM${)+{>FzaEvV#u7@8EY0ra*^Z zX?{)bTD;qxg1t-lZLUd?=V z#Q(r}m$ELeOMi+@nk-iR5}i}m8Ljnm6AvPz89!g;=7YyI%;bA4JJX-?Y_`<;pl>NT zlXbGx&Bo4cwCO#WV%Z{W^$U)&vh;y~vLc_nD8)?F5Fdi{DxEBU&}{5Z%67~A+U;p;`6^L`JZ_thpR z_>alU<#@X*c2S@1&h_!@w;`-o*4@9c?!Fe*T_(RJ*Teih{Za1kwf-6WU4AWpJ{jM5 z%pD$V#k`l9uW~%c$3UAq;zEr+9YJ0_488e#v@#upM`yGu-EDDTF>?lppL`nI&r%%V z`6%0~3bEDpU+FK}5XTfx7P9aCkoE8u`iAwr(>2Bfa=FYb5F;{`I?Qe4}JW^cD9y)=?XN zIx{}j&177Y4Q4~zoBqW5bRk1~!b9!^?4b4e%IM?B9try0^mr=k3@}^tr~eoArw>K% zVGRBJhS)0B<|V8->C($H>)Lcwe37nQ`JS%*$@?R@6Y>L`r1>HyA4T2VcnM>ae&;e* z&8)+FG8<*|$@#X{%Y4Ri7W=&9w#45<&Tpa99&-+TFLC4Hbs23F1lXok;OC-$bSK1B_oefxrq@@{`xoecf;%+A z4`_RO7?jbi>^NUnmIMC_tju_6zd4gKZ~s#=SZluxJO}t2;4c8t^DWRy2Ozq41M`6U zC>y^_=g(5dwRX~sdf4;foD(isX__m75rvsA7w;UzQ;5_hK zoE!WTFwbe?p>hhCGm`aFU&$I{%!0j{uvY?WO#)48O|_@EbCG_m$t4I|AFD zMLswQJdt%Pd{**i_UoJ7Bz$ZVdBLRQ4&Z1v$?Z!Yx3#?mpC<;ec3!1*u9!o8+4dsx z32>SB_?;og#ut@$8x8cfA8CIQJy`kORGt^{+^aZgeDHTZZ=ihiD7ViY51s?4d<4E+ zA`bX=VQAt-(hpwRD;56^rT$?zMRe57WpAEgeKxu|(9Y%GCY%A?9QG#V#odP2xH4nh zV)gVfOr3bgCs}+|Y*>JP%02CT0Y08}z96o++34S$>E-Q$ZLpu&%EqZ!u6zqnp4IRg zR|3l9fU8L$cIkV!6&zC!2riT+L&e#R&OMoqSF{&zLP&X3dt+xg;Fq1e+kk_RnGb!#N`xdyNf_L~gtM zvla(FcIJ`aXpP{f(3x!lhR%;q@r^trKR;;gc9DPVWDR_neSZP={#$o6W#tRwZ6EK& zw|n_Goc7>5IIH#Jd;M;5)Cg{L6M@OJAHn|COzv&_G-b85h?Z~9`u+P^*QEWBHkF&B zb679RdHRW4y~K-8VMFCKifQCm_@eeM?Je_x6B&!orFDP89frl6B9>3fUvy^WZevUw z=ByT9haSh`izA&?M>wOAJ3mA2{1khKokeT0%}stTC4VDM>WrJ!kGGh(m^@Fhv49z` z)`veMkAxLFHU5A@&RUJ**yrw#K^xAb)i~tr^>BSXTC=mK;owtaVa7F;eoY1J&#*P; zR@RI^$1?WlnsgU_tvjYxJI@AR_+8;zpLm{S+dp-eb_sDn_h^27molu}!QU^`!-2jg z9)!41i!)#8tnryp9t{uOAB1~^0%kobzo-lN>k{_IrsR%vKky7N2<#yaMFPu>lpobV zvYLLY>9?7Fo9VZiew))h$kG|2F2HjM(A3b7?gyR$27x{79g%?CY7TH1`O*E#kuv;4 z@OK2{PfLKNcqMtt%Jdmv5ZJ?B8yP4E8v&AUA@^Iu<)W+dy}H1@+te@ zZ4@8|ECHI59oS_DcG-blcF?zz@d3#4a}n&B*^06KTKQ+tJ?Io<<^4kc8a(Dajxs0k zK5DFa33}AS2R!Z?y)`a;-tKmj+59dwXM<#uo5?wD3D6us1{afV2;b3~%E z1i!67?#A6~dpf9o*!Ax+cpNM+KO*M;T7bP1fb$4@Ks){`S+)M4+~4Q_<|kUa26rG^ zXBO6h&NTwgILSyhBL7Y7jB^f{Gfwh#ep!?A5aVk?p2N>vj`enAAMjOQ2Vdvz1vniR z|Ik(ML>kpE@4L%=6Z&v|ejCrc10j#?=RV!Tc(bLqXJ2xXz1!&1Z;p?9%Hs9qv@ef8 zz+TL~JAGu2V*uSTyb(B!=Y>|Ue1f|N;(8zXp={DZo~s;DIbW>1W7eF*H;&VL0QNE6 z7jy#&?Y#!vO)9sAX9qZssqk8aPrpi-w#De8JM?i@Ud8XJuObgp?k|7hgbBVXd)LrM z{;hfHW&eS1`p>}0Dq9`7wT%|UiCn_1-?@mht5knE1V&~M>^?W ziXO4CiB}(VmjjDr17t5?TzWtCQQ9s9E@j>S2EJ!7kB#7^fZnI*Uf^_MRZFJtHvHjm zqw)OS3Xj9#I}^Fxb{^|Vf6KV-?3Ly4;T%*V2jMOnU;l=3`Sdw#eYDZ|b5cqD;AQui zwztUcT5l`)-Few=ZMvO*x6*%mW)=DicDYvr3pF0ifz6Z7x!D~HdYun^79ThU|2+@e zj5GV(XR&wEu)C2F?n~UG>C+>?&SG7(tFj#6ey76wy2^>jKOHz9xzkN%8{>F@XWgG2 zO3tf0V(#i_f2H(-*9O+~v*fFGwxX}@cFrJI)m@(ER{3fyjBl10Z!>MmWdn5T4vg_U zgdI9*FDGwh-X-r;Vh8u{ntOQYeWL86XUX#S)F0=((d#Y-N*=hg8=|>Pc{}%i<@hY_ z8W(Uzx`%vlp547E&TR)i1uWG#G#;KW176RV5(8gW-6HrO688*Nu+s|Qvkh;5ZUDHk zp&cKZ<+>Z3>mFAIQhfa))}i3d?l^;}MA83hauEyER!dC2oRcEL;U zrj7$o5B-uG;RWzcp^NtsUJmqDw}LyDFeaT>C7(9aeID91l!~W{Mz*kWC-!C%wgw{^-%e zCvRlaHf&Kn{>{{{;5X`;ynJxC=_7j&jlGp|3#K!d6O3=r_1oADkWI7tj;eOqG|cz^ zFdwo>FFL;&UfDb_me5zaNN*34FIxnVO|UaDMf-tb+%jUMf3KMB;TBuMGdVYNX28DT z-^Y&d4wdy%d~doh1LeLtW3;<-pt+@CjavdVH9SbZ z`5<}RgXEhJrhCRUx=7G)1MN4^elP9!(ta=P_mZ!rv-%A^S zoIgemamsxGxLClLJF$_S9fTV$usEapKfA}GPUm}jccFHTk#ajZ*zmhC-AT|7z8%CY zy{E6bzakdtOs)H^SF*3^d5X~zqrfHdh;g({G25(RE}QLML3d~+?i)orm433Ny`xqi z+SfV#?rA;yKK0tBlgMjvK;bp*5pqVl0J9yp6LTsd% zq1a%1vYSo&I|1#Zikn9Q;-|a7z6{Sbzg6szABGq;1^-oyVokD!U!gT@{;u`yeck)8 z)?5~|BpV)zA&O~Jl9L&q;s|TXVwUb$!y2i@S~HR#Vo8RP9h>|tmY9CxAJ$GS_Ogd! zs@8La?!u><&267%pD790!y5T564~bcfY#dgfeV1Y&&sm9>@RzLQ137Br8DW= 33.5: - return idx - - return -1 - #}}} - -def write_dimension_table(latex, registry, mode):#{{{ - latex_missing_string = '{\\bf \color{red} MISSING}' - dimension_table_header = '{\\bf Name} & {\\bf Units} & {\\bf Description}' - - latex.write('\section{Dimensions}\n') - latex.write('\label{sec:%s_dimensions}\n'%(mode)) - latex.write('{\small\n') - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{1.2in} || p{1.0in} | p{4.0in} |}\n') - latex.write(' \hline \n') - latex.write(' %s \\endfirsthead\n'%dimension_table_header) - latex.write(' \hline \n') - latex.write(' %s (Continued) \\endhead\n'%dimension_table_header) - latex.write(' \hline \n') - latex.write(' \hline \n') - for dims in registry.iter("dims"): - for dim in dims.iter("dim"): - dim_name = dim.attrib['name'] - try: - dim_description = dim.attrib['description'] - except: - dim_description = latex_missing_string - - try: - dim_units = dim.attrib['units'] - if dim_units == "": - dim_units = latex_missing_string - else: - dim_units = "$%s$"%dim_units.replace(' ', '$ $') - except: - dim_units = latex_missing_string - - if dim_description == "": - dim_description = latex_missing_string - else: - equations = dim_description.find('$') - if equations != -1: - dim_desc_split = dim_description.split("$") - - if dim_description.replace('_','')[0] == "$": - replace = False - dim_description = "$" - else: - replace = True - dim_description = "" - - for part in dim_desc_split: - if replace: - dim_description = "%s %s"%(dim_description, part.replace('_','\_')) - replace = False - else: - dim_description = "%s $%s$"%(dim_description, part) - replace = True - else: - dim_description = "%s"%dim_description.replace('_','\_') - - latex.write(' %s & %s & %s \\\\ \n'%(dim_name.replace('_','\_'), dim_units.replace('_','\_'), dim_description.replace('_','\_'))) - latex.write(' \hline\n') - - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.write('}\n') -#}}} - -def write_namelist_table(latex, registry, mode):#{{{ - latex_missing_string = '{\\bf \color{red} MISSING}' - namelist_table_header = '{\\bf Name} & {\\bf Description}' - - latex.write('\section[Namelist options]{\hyperref[chap:namelist_sections]{Namelist options}}\n') - latex.write('\label{sec:%s_namelist_tables}\n'%(mode)) - latex.write('Embedded links point to more detailed namelist information in the appendix.\n') - for nml_rec in registry.iter("nml_record"): - rec_name = nml_rec.attrib['name'] - #latex.write('\subsection[%s]{\hyperref[sec:nm_sec_%s]{%s}}\n'%(rec_name.replace('_','\_'), rec_name, rec_name.replace('_','\_'))) - latex.write('\subsection[%s]{%s}\n'%(rec_name.replace('_','\_'), rec_name.replace('_','\_'))) - latex.write('\label{subsec:%s_nm_tab_%s}\n'%(mode, rec_name)) - - # Add input line if file exists. - try: - junk_file = open('%s/%s.tex'%(options.latex_dir,rec_name), 'r') - latex.write('\input{%s/%s.tex}\n'%(options.latex_path, rec_name)) - junk_file.close() - except: - latex.write('') - - latex.write('\\vspace{0.5in}\n') - latex.write('{\small\n') - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} || p{4.0in} |}\n') - latex.write(' \hline\n') - latex.write(' %s \\endfirsthead\n'%namelist_table_header) - latex.write(' \hline \n') - latex.write(' %s (Continued) \\endhead\n'%namelist_table_header) - latex.write(' \hline\n') - latex.write(' \hline\n') - - for nml_opt in nml_rec.iter("nml_option"): - opt_name = nml_opt.attrib['name'] - - try: - opt_description = nml_opt.attrib['description'] - except: - opt_description = latex_missing_string - - if opt_description == "": - opt_description = latex_missing_string - else: - equations = opt_description.find('$') - if equations != -1: - opt_desc_split = opt_description.split("$") - - if opt_description.replace(' ','')[0] == "$": - replace = False - opt_description = "$" - else: - replace = True - opt_description = "" - - for part in opt_desc_split: - if replace: - opt_description = "%s %s"%(opt_description, part.replace('_','\_')) - replace = False - else: - opt_description = "%s $%s$"%(opt_description, part) - replace = True - else: - opt_description = "%s"%opt_description.replace('_','\_') - - idx = break_string(opt_name) - if idx >= 29: - latex.write(' \hyperref[sec:nm_sec_%s]{%s-}\hyperref[sec:nm_sec_%s]{%s}& %s \\\\\n'%(opt_name, opt_name[0:idx].replace('_','\_'), opt_name, opt_name[idx:].replace('_','\_'), opt_description)) - else: - latex.write(' \hyperref[sec:nm_sec_%s]{%s} & %s \\\\\n'%(opt_name, opt_name.replace('_','\_'), opt_description)) - latex.write(' \hline\n') - - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.write('}\n') -#}}} - -def write_variable_table(latex, registry, mode):#{{{ - latex_missing_string = '{\\bf \color{red} MISSING}' - variable_table_header = '{\\bf Name} & {\\bf Description}' - - latex.write('\section[Variable definitions]{\hyperref[chap:variable_sections]{Variable definitions}}\n') - latex.write('\label{sec:%s_variable_tables}\n'%mode) - latex.write('Embedded links point to more detailed variable information in the appendix.\n') - for var_struct in registry.iter("var_struct"): - struct_name = var_struct.attrib['name'] - latex.write('\subsection[%s]{\hyperref[sec:var_sec_%s]{%s}}\n'%(struct_name.replace('_','\_'),struct_name,struct_name.replace('_','\_'))) - latex.write('\label{subsec:%s_var_tab_%s}\n'%(mode, struct_name)) - - try: - junk_file = open('%s/%s_struct.tex'%(options.latex_dir,struct_name), 'r') - latex.write('\input{%s/%s_struct.tex}\n'%(options.latex_path, struct_name)) - junk_file.close() - except: - latex.write('') - - latex.write('\\vspace{0.5in}\n') - latex.write('{\small\n') - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} | p{4.0in} |}\n') - latex.write(' \hline\n') - latex.write(' %s \\endfirsthead\n'%variable_table_header) - latex.write(' \hline \n') - latex.write(' %s (Continued) \\endhead\n'%variable_table_header) - latex.write(' \hline\n') - - for node in var_struct.getchildren(): - if node.tag == 'var_array': - for var in node.iter("var"): - var_name = var.attrib['name'] - var_description = var.attrib['description'] - - if var_description == "": - var_description = latex_missing_string.replace('_','\_') - else: - equations = var_description.find('$') - if equations != -1: - var_desc_split = var_description.split("$") - - if var_description.replace('_','')[0] == "$": - replace = False - var_description = "$" - else: - replace = True - var_description = "" - - for part in var_desc_split: - if replace: - var_description = "%s %s"%(var_description, part.replace('_','\_')) - replace = False - else: - var_description = "%s $%s$"%(var_description, part) - replace = True - else: - var_description = "%s"%var_description.replace('_','\_') - - idx = break_string(var_name) - if idx > -1: - latex.write(' \hyperref[subsec:var_sec_%s_%s]{%s-}\hyperref[subsec:var_sec_%s_%s]{%s} & %s \\\\\n'%(struct_name, var_name, var_name[0:idx].replace('_','\_'), struct_name, var_name, var_name[idx:].replace('_','\_'), var_description)) - else: - latex.write(' \hyperref[subsec:var_sec_%s_%s]{%s} & %s \\\\\n'%(struct_name, var_name, var_name.replace('_','\_'), var_description)) - latex.write(' \hline\n') - elif node.tag == 'var': - var = node - var_name = var.attrib['name'] - var_description = var.attrib['description'] - - if var_description == "": - var_description = latex_missing_string.replace('_','\_') - else: - equations = var_description.find('$') - if equations != -1: - var_desc_split = var_description.split("$") - - if var_description.replace('_','')[0] == "$": - replace = False - var_description = "$" - else: - replace = True - var_description = "" - - for part in var_desc_split: - if replace: - var_description = "%s %s"%(var_description, part.replace('_','\_')) - replace = False - else: - var_description = "%s $%s$"%(var_description, part) - replace = True - else: - var_description = "%s"%var_description.replace('_','\_') - - idx = break_string(var_name) - if idx > -1: - latex.write(' \hyperref[subsec:var_sec_%s_%s]{%s-}\hyperref[subsec:var_sec_%s_%s]{%s }& %s \\\\\n'%(struct_name, var_name, var_name[0:idx].replace('_','\_'), struct_name, var_name, var_name[idx:].replace('_','\_'), var_description)) - else: - latex.write(' \hyperref[subsec:var_sec_%s_%s]{%s} & %s \\\\\n'%(struct_name, var_name, var_name.replace('_','\_'), var_description)) - latex.write(' \hline\n') - - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.write('}\n') -#}}} - -def write_namelist_sections(latex, sorted_opts, forward_registry, analysis_registry):#{{{ - latex_missing_string = '{\\bf \color{red} MISSING}' - - #latex.write('\chapter[Namelist options]{\hyperref[chap:namelist_tables]{Namelist options}}\n') - latex.write('\chapter[Namelist options]{Namelist options}\n') - latex.write('\label{chap:namelist_sections}\n') -# latex.write('Embedded links point to information in chapter \\ref{chap:namelist_tables}\n') - - for opt in sorted_opts: - found = False - in_forward = False - in_analysis = False - forward_rec_name = "" - analysis_rec_name = "" - - # Search forward registry - for nml_rec in forward_registry.iter("nml_record"):#{{{ - for nml_opt in nml_rec.iter("nml_option"): - opt_name = nml_opt.attrib["name"] - - if(in_forward == False and opt_name == opt): - in_forward = True - forward_rec_name = nml_rec.attrib["name"] - if(not found): - found = True - opt_type = nml_opt.attrib["type"] - opt_value = nml_opt.attrib["default_value"] - - try: - opt_possible_values = nml_opt.attrib["possible_values"] - except: - opt_possible_values = latex_missing_string - - try: - opt_units = nml_opt.attrib["units"] - if opt_units == "": - opt_units = latex_missing_string - else: - opt_units = "$%s$"%opt_units.replace(' ', '$ $') - except: - opt_units = latex_missing_string - - try: - opt_description = nml_opt.attrib["description"] - except: - opt_description = latex_missing_string - - if opt_possible_values == "": - opt_possible_values = latex_missing_string - - - if opt_description == "": - opt_description = latex_missing_string.replace('_','\_') - else: - equations = opt_description.find('$') - if equations != -1: - opt_desc_split = opt_description.split("$") - - if opt_description.replace('_','')[0] == "$": - replace = False - opt_description = "$" - else: - replace = True - opt_description = "" - - for part in opt_desc_split: - if replace: - opt_description = "%s %s"%(opt_description, part.replace('_','\_')) - replace = False - else: - opt_description = "%s $%s$"%(opt_description, part) - replace = True - else: - opt_description = "%s"%opt_description.replace('_','\_') -#}}} - - # Search analysis registry if not found yet - for nml_rec in analysis_registry.iter("nml_record"):#{{{ - for nml_opt in nml_rec.iter("nml_option"): - opt_name = nml_opt.attrib["name"] - - if(in_analysis == False and opt_name == opt): - in_analysis = True - analysis_rec_name = nml_rec.attrib["name"] - if(not found): - found = True - opt_type = nml_opt.attrib["type"] - opt_value = nml_opt.attrib["default_value"] - - try: - opt_possible_values = nml_opt.attrib["possible_values"] - except: - opt_possible_values = latex_missing_string - - try: - opt_units = nml_opt.attrib["units"] - if opt_units == "": - opt_units = latex_missing_string - else: - opt_units = "$%s$"%opt_units.replace(' ', '$ $') - except: - opt_units = latex_missing_string - - try: - opt_description = nml_opt.attrib["description"] - except: - opt_description = latex_missing_string - - if opt_possible_values == "": - opt_possible_values = latex_missing_string - - - if opt_description == "": - opt_description = latex_missing_string.replace('_','\_') - else: - equations = opt_description.find('$') - if equations != -1: - opt_desc_split = opt_description.split("$") - - if opt_description.replace('_','')[0] == "$": - replace = False - opt_description = "$" - else: - replace = True - opt_description = "" - - for part in opt_desc_split: - if replace: - opt_description = "%s %s"%(opt_description, part.replace('_','\_')) - replace = False - else: - opt_description = "%s $%s$"%(opt_description, part) - replace = True - else: - opt_description = "%s"%opt_description.replace('_','\_') -#}}} - - # If option has been found, write it out as a section. - if(found): - opt_name = opt - #latex.write('\section[%s]{\hyperref[sec:nm_tab_%s]{%s}}\n'%(opt_name.replace('_','\_'),rec_name,opt_name.replace('_','\_'))) - latex.write('\section[%s]{%s}\n'%(opt_name.replace('_','\_'),opt_name.replace('_','\_'))) - latex.write('\label{sec:nm_sec_%s}\n'%opt_name) - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} || p{4.0in} |}\n') - latex.write(' \hline\n') - latex.write(' In build modes: & ') - if(in_forward): - latex.write('\hyperref[subsec:forward_nm_tab_%s]{forward} '%(forward_rec_name)) - if(in_analysis): - latex.write('\hyperref[subsec:analysis_nm_tab_%s]{analysis} '%(analysis_rec_name)) - latex.write('\\\\\n') - - latex.write(' \hline\n') - latex.write(' Type: & %s \\\\\n'%opt_type.replace('_','\_')) - latex.write(' \hline\n') - latex.write(' Units: & %s \\\\\n'%opt_units.replace('_','\_')) - latex.write(' \hline\n') - latex.write(' Default Value: & %s \\\\\n'%opt_value.replace('_','\_')) - latex.write(' \hline\n') - latex.write(' Possible Values: & %s \\\\\n'%opt_possible_values.replace('_','\_')) - latex.write(' \hline\n') - latex.write(' \caption{%s: %s}\n'%(opt_name.replace('_','\_'), opt_description)) - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') -#}}} - -def write_variable_sections(latex, sorted_structs, forward_registry, analysis_registry):#{{{ - latex_missing_string = '{\\bf \color{red} MISSING}' - - #latex.write('\chapter[Variable definitions]{\hyperref[chap:variable_tables]{Variable definitions}}\n') - latex.write('\chapter[Variable definitions]{Variable definitions}\n') - latex.write('\label{chap:variable_sections}\n') -# latex.write('Embedded links point to information in chapter \\ref{chap:variable_tables}\n') - - for struct in sorted_structs: - ##latex.write('\section[%s]{\hyperref[sec:var_tab_%s]{%s}}\n'%(struct.replace('_','\_'),struct, struct.replace('_','\_'))) - latex.write('\section[%s]{%s}\n'%(struct.replace('_','\_'), struct.replace('_','\_'))) - latex.write('\label{sec:var_sec_%s}\n'%struct) - - unique_vars = []; - # Determine all variables in the current var struct from the forward mode - for var_struct in forward_registry.iter("var_struct"):#{{{ - struct_name = var_struct.attrib["name"] - if(struct_name == struct): - for var_arr in var_struct.iter("var_array"): - for var in var_arr.iter("var"): - name = var.attrib["name"] - if(unique_vars.count(name) == 0): - unique_vars.append(name) - for var in var_struct.iter("var"): - name = var.attrib["name"] - if(unique_vars.count(name) == 0): - unique_vars.append(name) -#}}} - - # Determine all variables in the current var struct from the analysis mode - for var_struct in analysis_registry.iter("var_struct"):#{{{ - struct_name = var_struct.attrib["name"] - if(struct_name == struct): - for var_arr in var_struct.iter("var_array"): - for var in var_arr.iter("var"): - name = var.attrib["name"] - if(unique_vars.count(name) == 0): - unique_vars.append(name) - for var in var_struct.iter("var"): - name = var.attrib["name"] - if(unique_vars.count(name) == 0): - unique_vars.append(name) - #}}} - - sorted_vars = sorted(unique_vars) - del unique_vars - - for var_name in sorted_vars: - found = False - in_forward = False - in_analysis = False - in_var_array = False - - # Try to extract var from forward mode - for var_struct in forward_registry.iter("var_struct"): #{{{ - struct_name = var_struct.attrib["name"] - if(struct_name == struct): - for var_arr in var_struct.iter("var_array"): - for var in var_arr.iter("var"): - name = var.attrib["name"] - if(name == var_name): - in_forward = True - - if(not found): - found = True - in_var_array = True - - struct_time_levs = var_struct.attrib['time_levs'] - var_arr_name = var_arr.attrib["name"] - var_type = var_arr.attrib["type"] - var_dims = var_arr.attrib["dimensions"] - try: - var_time_levels = var_arr.attrib["time_levs"] - except: - var_time_levels = var_struct.attrib["time_levs"] - - # Extract var persistence#{{{ - try: - var_persistence = var_arr.attrib['persistence'] - except: - var_persistence = 'persistent' -#}}} - - # Extract var units#{{{ - try: - var_units = var.attrib['units'] - if var_units == "": - var_units = latex_missing_string - else: - var_units = "$%s$"%var_units.replace(' ', '$ $') - except: - var_units = latex_missing_string -#}}} - - var_arr_group = var.attrib["array_group"] - - # Extract name in code, and build var_path#{{{ - try: - var_name_in_code = var.attrib['name_in_code'] - except: - var_name_in_code = var_name - - var_path = "%s"%(var_arr_name) -#}}} - - # Extract var description#{{{ - try: - var_description = var.attrib['description'] - except: - var_description = latex_missing_string.replace('_','\_') - - if var_description == "": - var_description = latex_missing_string.replace('_','\_') - else: - equations = var_description.find('$') - if equations != -1: - var_desc_split = var_description.split("$") - - if var_description.replace('_','')[0] == "$": - replace = False - var_description = "$" - else: - replace = True - var_description = "" - - for part in var_desc_split: - if replace: - var_description = "%s %s"%(var_description, part.replace('_','\_')) - replace = False - else: - var_description = "%s $%s$"%(var_description, part) - replace = True - else: - var_description = "%s"%var_description.replace('_','\_') -#}}} - - for var in var_struct.iter("var"): - name = var.attrib["name"] - if(name == var_name): - in_forward = True - - if(not found): - found = True - in_var_array = False - struct_time_levs = var_struct.attrib['time_levs'] - var_type = var.attrib["type"] - var_dims = var.attrib["dimensions"] - try: - var_time_levels = var.attrib["time_levs"] - except: - var_time_levels = var_struct.attrib["time_levs"] - - # Extract var persistence#{{{ - try: - var_persistence = var_arr.attrib['persistence'] - except: - var_persistence = 'persistent' -#}}} - - # Extract var units#{{{ - try: - var_units = var.attrib['units'] - if var_units == "": - var_units = latex_missing_string - else: - var_units = "$%s$"%var_units.replace(' ', '$ $') - except: - var_units = latex_missing_string -#}}} - - # Extract name in code, and build var_path#{{{ - try: - var_name_in_code = var.attrib['name_in_code'] - except: - var_name_in_code = var_name - - var_path = "%s"%(var_name) -#}}} - - # Extract var description#{{{ - try: - var_description = var.attrib['description'] - except: - var_description = latex_missing_string.replace('_','\_') - - if var_description == "": - var_description = latex_missing_string.replace('_','\_') - else: - equations = var_description.find('$') - if equations != -1: - var_desc_split = var_description.split("$") - - if var_description.replace('_','')[0] == "$": - replace = False - var_description = "$" - else: - replace = True - var_description = "" - - for part in var_desc_split: - if replace: - var_description = "%s %s"%(var_description, part.replace('_','\_')) - replace = False - else: - var_description = "%s $%s$"%(var_description, part) - replace = True - else: - var_description = "%s"%var_description.replace('_','\_') -#}}} -#}}} - - # Try to extract var from analysis mode - for var_struct in analysis_registry.iter("var_struct"): #{{{ - struct_name = var_struct.attrib["name"] - if(struct_name == struct): - for var_arr in var_struct.iter("var_array"): - for var in var_arr.iter("var"): - name = var.attrib["name"] - if(name == var_name): - in_analysis = True - - if(not found): - found = True - in_var_array = True - - struct_time_levs = var_struct.attrib['time_levs'] - var_arr_name = var_arr.attrib["name"] - var_type = var_arr.attrib["type"] - var_dims = var_arr.attrib["dimensions"] - try: - var_time_levels = var_arr.attrib["time_levs"] - except: - var_time_levels = var_struct.attrib["time_levs"] - - # Extract var persistence#{{{ - try: - var_persistence = var_arr.attrib['persistence'] - except: - var_persistence = 'persistent' -#}}} - - # Extract var units#{{{ - try: - var_units = var.attrib['units'] - if var_units == "": - var_units = latex_missing_string - else: - var_units = "$%s$"%var_units.replace(' ', '$ $') - except: - var_units = latex_missing_string -#}}} - - var_arr_group = var.attrib["array_group"] - - # Extract name in code, and build var_path#{{{ - try: - var_name_in_code = var.attrib['name_in_code'] - except: - var_name_in_code = var_name - - if int(struct_time_levs) > 1: - var_index = "domain %% blocklist %% %s %% index_%s"%(struct_name, var_name_in_code.replace('_','\_')) - var_path = "%s"%(var_arr_name) - else: - var_index = "domain %% blocklist %% %s %% index_%s"%(struct_name, var_name_in_code.replace('_','\_')) - var_path = "%s"%(var_arr_name) -#}}} - - # Extract var description#{{{ - try: - var_description = var.attrib['description'] - except: - var_description = latex_missing_string.replace('_','\_') - - if var_description == "": - var_description = latex_missing_string.replace('_','\_') - else: - equations = var_description.find('$') - if equations != -1: - var_desc_split = var_description.split("$") - - if var_description.replace('_','')[0] == "$": - replace = False - var_description = "$" - else: - replace = True - var_description = "" - - for part in var_desc_split: - if replace: - var_description = "%s %s"%(var_description, part.replace('_','\_')) - replace = False - else: - var_description = "%s $%s$"%(var_description, part) - replace = True - else: - var_description = "%s"%var_description.replace('_','\_') -#}}} - - for var in var_struct.iter("var"): - name = var.attrib["name"] - if(name == var_name): - in_analysis = True - - if(not found): - found = True - in_var_array = False - struct_time_levs = var_struct.attrib['time_levs'] - var_type = var.attrib["type"] - var_dims = var.attrib["dimensions"] - try: - var_time_levels = var.attrib["time_levs"] - except: - var_time_levels = var_struct.attrib["time_levs"] - - # Extract var persistence#{{{ - try: - var_persistence = var_arr.attrib['persistence'] - except: - var_persistence = 'persistent' -#}}} - - # Extract var units#{{{ - try: - var_units = var.attrib['units'] - if var_units == "": - var_units = latex_missing_string - else: - var_units = "$%s$"%var_units.replace(' ', '$ $') - except: - var_units = latex_missing_string -#}}} - - # Extract name in code, and build var_path#{{{ - try: - var_name_in_code = var.attrib['name_in_code'] - except: - var_name_in_code = var_name - - if int(struct_time_levs) > 1: - var_path = "domain %% blocklist %% %s %% time_levs(:) %% %s %% %s"%(struct_name, struct_name, var_name) - else: - var_path = "domain %% blocklist %% %s %% %s"%(struct_name, var_name) -#}}} - - # Extract var description#{{{ - try: - var_description = var.attrib['description'] - except: - var_description = latex_missing_string.replace('_','\_') - - if var_description == "": - var_description = latex_missing_string.replace('_','\_') - else: - equations = var_description.find('$') - if equations != -1: - var_desc_split = var_description.split("$") - - if var_description.replace('_','')[0] == "$": - replace = False - var_description = "$" - else: - replace = True - var_description = "" - - for part in var_desc_split: - if replace: - var_description = "%s %s"%(var_description, part.replace('_','\_')) - replace = False - else: - var_description = "%s $%s$"%(var_description, part) - replace = True - else: - var_description = "%s"%var_description.replace('_','\_') -#}}} -#}}} - - # Build stream list from forward mode - forward_streams = ""#{{{ - if(in_forward): - for streams in forward_registry.iter("streams"): - for stream in streams.iter("stream"): - for var in stream.iter("var"): - name = var.attrib["name"] - if(name == var_name): - forward_streams = "%s %s"%(forward_streams, stream.attrib["name"]) - #}}} - - # Build stream list from analysis mode - analysis_streams = ""#{{{ - if(in_analysis): - for streams in analysis_registry.iter("streams"): - for stream in streams.iter("stream"): - for var in stream.iter("var"): - name = var.attrib["name"] - if(name == var_name): - analysis_streams = "%s %s"%(analysis_streams, stream.attrib["name"]) - #}}} - - if(found): - struct_name = struct - #latex.write('\subsection[%s]{\hyperref[sec:var_tab_%s]{%s}}\n'%(var_name.replace('_','\_'),struct_name, var_name.replace('_','\_'))) - latex.write('\subsection[%s]{%s}\n'%(var_name.replace('_','\_'), var_name.replace('_','\_'))) - latex.write('\label{subsec:var_sec_%s_%s}\n'%(struct_name,var_name)) - # Tabular Format: - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} | p{4.0in} |}\n') - latex.write(' \hline \n') - latex.write(' In build modes: & ') - if(in_forward): - latex.write('\hyperref[subsec:forward_var_tab_%s]{forward} '%(struct)) - if(in_analysis): - latex.write('\hyperref[subsec:analysis_var_tab_%s]{analysis} '%(struct)) - latex.write('\\\\\n') - latex.write(' \hline \n') - latex.write(' Type: & %s \\\\\n'%var_type) - latex.write(' \hline \n') - latex.write(' Units: & %s \\\\\n'%var_units) - latex.write(' \hline \n') - latex.write(' Dimension: & %s \\\\\n'%var_dims) - latex.write(' \hline \n') - latex.write(' Persistence: & %s \\\\\n'%var_persistence) - latex.write(' \hline \n') - latex.write(' Number of time levels: & %s \\\\\n'%var_time_levels) - latex.write(' \hline \n') - - if(in_var_array): - latex.write(" Index in `%s' Array: & `index\_%s' in `%s' pool \\\\\n"%(var_path.replace('_','\_'), var_name.replace('_', '\_'), struct_name.replace('_','\_'))) - latex.write(' \hline \n') - pool_path="`%s' in `%s' pool"%(var_path, struct_name) - latex.write(' Pool path: & %s \\\\\n'%pool_path.replace('_', '\_')) - latex.write(' \hline \n') - if(in_var_array): - latex.write(' Array Group: & %s \\\\\n'%var_arr_group.replace('_','\_')) - latex.write(' \hline \n') - latex.write(' \caption{%s: %s}\n'%(var_name.replace('_','\_'),var_description)) - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - -#}}} - -if not options.forward_registry_path: - parser.error("The forward mode's Registry file is required") - -if not options.analysis_registry_path: - parser.error("The analysis mode's Registry file is required") - -if not options.latex_dir: - print 'Directory with group latex files is missing. Skipping addition of latex files.' - extra_latex = False -else: - if not options.latex_path: - parser.error('Need latex path with latex directory.') - extra_latex = True - -latex_missing_string = '{\\bf \color{red} MISSING}' -dimension_table_header = '{\\bf Name} & {\\bf Units} & {\\bf Description}' -variable_table_header = '{\\bf Name} & {\\bf Description}' -namelist_table_header = '{\\bf Name} & {\\bf Description}' - -forward_registry_path = options.forward_registry_path -analysis_registry_path = options.analysis_registry_path - -try: - forward_registry_tree = ET.parse(forward_registry_path) -except: - parser.error('%s does not exist or is not parsable. Exiting.'%forward_registry_path) - -try: - analysis_registry_tree = ET.parse(analysis_registry_path) -except: - parser.error('%s does not exist or is not parsable. Exiting.'%analysis_registry_path) - -forward_registry = forward_registry_tree.getroot() -analysis_registry = analysis_registry_tree.getroot() - -# Build dimension lists -forward_dims = []; -analysis_dims = []; -unique_dims = []; - -for dims in forward_registry.iter("dims"): - for dim in dims.iter("dim"): - dim_name = dim.attrib["name"] - forward_dims.append(dim_name) - if(unique_dims.count(dim_name) == 0): - unique_dims.append(dim_name) - -for dims in analysis_registry.iter("dims"): - for dim in dims.iter("dim"): - dim_name = dim.attrib["name"] - analysis_dims.append(dim_name) - if(unique_dims.count(dim_name) == 0): - unique_dims.append(dim_name) - -sorted_dims = sorted(unique_dims) -del unique_dims - -# Build structure lists -forward_structs = []; -analysis_structs = []; -unique_structs = []; - -for struct in forward_registry.iter("var_struct"): - struct_name = struct.attrib["name"] - forward_structs.append(struct_name) - if(unique_structs.count(struct_name) == 0): - unique_structs.append(struct_name) - -for struct in analysis_registry.iter("var_struct"): - struct_name = struct.attrib["name"] - analysis_structs.append(struct_name) - if(unique_structs.count(struct_name) == 0): - unique_structs.append(struct_name) - -sorted_structs = sorted(unique_structs) -del unique_structs - - -# Build Namelist lists -forward_opts = []; -analysis_opts = []; -unique_opts = []; - -for nml_rec in forward_registry.iter("nml_record"): - for nml_opt in nml_rec.iter("nml_option"): - opt_name = nml_opt.attrib["name"] - forward_opts.append(opt_name) - if(unique_opts.count(opt_name) == 0): - unique_opts.append(opt_name) - -for nml_rec in analysis_registry.iter("nml_record"): - for nml_opt in nml_rec.iter("nml_option"): - opt_name = nml_opt.attrib["name"] - analysis_opts.append(opt_name) - if(unique_opts.count(opt_name) == 0): - unique_opts.append(opt_name) - -sorted_opts = sorted(unique_opts) -del unique_opts - -# Write file that defines version string for model. -latex = open('define_version.tex', 'w+') -try: - version_string = forward_registry.attrib['version'] -except: - version_string = '{\\bf MISSING}' -latex.write('\\newcommand{\\version}{%s}\n'%version_string) -latex.close() - -# Write file to include for forward mode -# It should have sections for dimensions, namelist options/records, and -# variable and their structures. -latex = open('mode_forward_sections.tex', 'w+') -mode = 'forward' -write_dimension_table(latex, forward_registry, mode) -write_namelist_table(latex, forward_registry, mode) -write_variable_table(latex, forward_registry, mode) -latex.close() - -# Write file to include for analysis mode -# It should have sections for dimensions, namelist options/records, and -# variable and their structures. -latex = open('mode_analysis_sections.tex', 'w+') -mode = 'analysis' -write_dimension_table(latex, analysis_registry, mode) -write_namelist_table(latex, analysis_registry, mode) -write_variable_table(latex, analysis_registry, mode) -latex.close() - - -latex = open('namelist_sections.tex', 'w+') -write_namelist_sections(latex, sorted_opts, forward_registry, analysis_registry) -latex.close() - -latex = open('variable_sections.tex', 'w+') -write_variable_sections(latex, sorted_structs, forward_registry, analysis_registry) -latex.close() - - diff --git a/python_scripts/namelist_generation/parse_xml_registry.py b/python_scripts/namelist_generation/parse_xml_registry.py deleted file mode 100755 index 648abd4bc..000000000 --- a/python_scripts/namelist_generation/parse_xml_registry.py +++ /dev/null @@ -1,609 +0,0 @@ -#!/usr/bin/env python - -""" -This script parses a MPAS Registry.xml file to generates documentation for a -users or developers guide. - -Typical usage is as follows:: - - # set the core, one of ocean, landice, cice, etc. - export CORE= - # Set your repo directories: - export MPAS_REPO=~/repos/MPAS - export MPAS_TOOLS_REPO=~/repos/MPAS-Tools - export MPAS_DOCUMENTS_REPO=~/repos/MPAS-Documents - cd $MPAS_REPO - # Compile MPAS so you have a src/core_ocean/Registry_processed.xml file. - # Change the compiler as needed. - make CORE=$CORE gfortran - cd $MPAS_DOCUMENTS_REPO/users_guide/$CORE - # clean up blank lines at the top of the XML file - sed '/./,$!d' $MPAS_REPO/src/core_${CORE}/Registry_processed.xml > \ - Registry_cleaned.xml - $MPAS_TOOLS_REPO/python_scripts/namelist_generation/parse_xml_registry.py \ - -f Registry_cleaned.xml -d section_descriptions \ - -p ${CORE}/section_descriptions - cd .. - make clean CORE=$CORE - make CORE=$CORE - -The -f flag points to the processed registry file (typically with a full path). - -The -d flag points to the local or full path to .tex files that containing -section descriptions for providing additional information in the output latex -documentation. - -Section descriptions are required to be named whatever the section is. For -example, in a namelist, there might be a namelist record named -"&time_management". The script searches the directory listed with the -d -flag for a latex file named time_management.tex, and adds an input line to -the output latex documentation to include this file. - -The -p flag specifies the relative path inside the latex documentation where -the file should be input from. As an example, one might -run it as follows to generate the ocean core's documentation:: - - ./parse_xml_registry.xml -f mpas_root/src/core_ocean/Registry.xml \ - -d mpas_doc_root/users_guide/ocean/section_descriptions \ - -p ocean/section_descriptions - -On output, several files are created which are listed below. - namelist.input.generated - A default namelist.input file for the core that - owns the Registry.xml file. - dimensions.tex - A tabulated description of the dimensions for the core. - namelist_table_documentation.tex - A tabulated description of the namelist - options for the core. - namelist_section_documentation.tex - A more detailed section format - description of the namelist options - for the core. - variable_table_documentation.tex - A tabulated description of the variables - in the core. - variable_section_documentation.tex - A more detailed section formate - description of the variable in the - core. - define_version.tex - A simple file which can be included to define \version - inside the users guide. - -Authors: -======== -Doug Jacobsen, Xylar Asay-Davis -""" - - -import os -from optparse import OptionParser -import xml.etree.ElementTree as ET -from collections import OrderedDict -from PIL import ImageFont -import pkg_resources - - -def break_string(string, maxLength=150., font='cmunrm.otf', fontSize=10): - # {{{ - - # Note: max_length is in points, so 144. corresponds to 2 inches, the - # column width for namelist and variable names in tables in the user guide - - # font defaults to LaTex font (Computer Modern), and user guide font size - # in tables - - # if an absolute path to the font was not supplied, look relative to this - # script - if not os.path.isabs(font): - font = pkg_resources.resource_filename(__name__, font) - - font = ImageFont.truetype(font, fontSize) - size = font.getsize(string) - if size[0] <= maxLength: - # no need to split - return None - - bestBreakPoints = [] - - # first alpha-numeric character after a non-alpha-numeric character - for index in range(1, len(string)): - if not string[index-1].isalnum() and string[index].isalnum(): - bestBreakPoints.append(index) - - # find uppercase following lowercase or number - for index in range(1, len(string)): - if string[index-1].isalnum() and string[index-1].islower() \ - and string[index].isalpha() and string[index].isupper(): - bestBreakPoints.append(index) - - bestBreakPoints.append(len(string)) - - bestBreakPoints = sorted(bestBreakPoints) - - for index in range(1, len(bestBreakPoints)): - breakPoint = bestBreakPoints[index] - size = font.getsize(string[:breakPoint]) - if size[0] > maxLength: - breakPoint = bestBreakPoints[index-1] - return breakPoint - - # there is no good break point so we have to find an arbitrary one - print "Warning: no good breakpoint found for {}".format(string) - for breakPoint in range(1, len(string)+1): - breakPoint = bestBreakPoints[index] - size = font.getsize(string[:breakPoint]) - if size[0] > maxLength: - breakPoint = breakPoint-1 - return breakPoint - - raise ValueError("Could not find a breakpoint for {}".format(string)) - # }}} - - -def write_namelist_input_generated(): - # Write default namelist - namelist = open('namelist.input.generated', 'w') - for nml_rec in registry.iter("nml_record"): - namelist.write('&%s\n' % nml_rec.attrib['name']) - for nml_opt in nml_rec.iter("nml_option"): - if nml_opt.attrib['type'] == "character": - namelist.write('\t%s = "%s"\n' % ( - nml_opt.attrib['name'], - nml_opt.attrib['default_value'])) - else: - namelist.write('\t%s = %s\n' % ( - nml_opt.attrib['name'], - nml_opt.attrib['default_value'])) - - namelist.write('/\n') - - -def escape_underscore(string): - has_math_mode = (string.find('$') != -1) - if has_math_mode: - dim_desc_split = string.split("$") - replace = True - string = "" - for part in dim_desc_split: - if replace: - part = part.replace('_', '\_') - string = "{}{}".format(string, part) - replace = False - else: - string = "{}${}$".format(string, part) - replace = True - else: - string = string.replace('_', '\_') - return string - - -def get_attrib(element, attributeName, missingValue=None): - if missingValue is None: - missingValue = latex_missing_string - try: - attrib = element.attrib[attributeName] - except KeyError: - attrib = missingValue - if attrib == "": - attrib = missingValue - return attrib - - -def get_units(element): - units = get_attrib(element, 'units') - if units != latex_missing_string: - # units with the siunitx package - units = "\si{{{}}}".format(units.replace(' ', '.')) - units = escape_underscore(units) - return units - - -def get_description(element): - description = get_attrib(element, 'description') - description = escape_underscore(description) - return description - - -def get_linked_name(name, link): - indices = [] - index = 0 - while True: - newIndex = break_string(name[index:]) - if newIndex is None: - break - index += newIndex - indices.append(index) - - indices.append(len(name)) - newName = escape_underscore(name[0:indices[0]]) - for start, end in zip(indices[0:-1], indices[1:]): - namePiece = escape_underscore(name[start:end]) - newName = '{}\\-{}'.format(newName, namePiece) - - return '\hyperref[subsec:%s]{%s}' % (link, newName) - - -def write_var_struct_to_table(latex, var_struct, struct_name): - for node in var_struct: - if node.tag == 'var_struct': - write_var_struct_to_table(latex, node, struct_name) - elif node.tag == 'var_array': - write_var_array_to_table(latex, node, struct_name) - elif node.tag == 'var': - write_var_to_table(latex, node, struct_name) - - -def write_var_array_to_table(latex, var_array, struct_name): - for var in var_array.iter("var"): - write_var_to_table(latex, var, struct_name) - - -def write_var_to_table(latex, var, struct_name): - var_name = var.attrib['name'] - var_description = get_description(var) - - link = 'var_sec_{}_{}'.format(struct_name, var_name) - linkedName = get_linked_name(var_name, link) - - latex.write(' {} & {} \\\\\n'.format(linkedName, - var_description)) - latex.write(' \hline\n') - - -def get_var_structs(): - # use a dictionary to create lists of all top-level var_structs with the - # same name (e.g. state, tracers, mesh) - var_structs = OrderedDict() - for var_struct in registry: - if var_struct.tag != "var_struct": - continue - struct_name = var_struct.attrib['name'] - if struct_name in var_structs.keys(): - var_structs[struct_name].append(var_struct) - else: - var_structs[struct_name] = [var_struct] - return var_structs - - -def write_var_struct_section(latex, var_struct, struct_name, has_time): - for node in var_struct: - if node.tag == 'var_struct': - write_var_struct_section(latex, node, struct_name, has_time) - elif node.tag == 'var_array': - write_var_array_section(latex, node, struct_name, has_time) - elif node.tag == 'var': - write_var_section(latex, node, struct_name, has_time) - - -def write_var_array_section(latex, var_array, struct_name, has_time): - for var in var_array.iter("var"): - write_var_section(latex, var, struct_name, has_time, var_array) - - -def write_var_section(latex, var, struct_name, has_time, var_array=None): - var_name = var.attrib['name'] - var_name_escaped = escape_underscore(var_name) - if var_array is None: - var_type = var.attrib['type'] - dimensions = var.attrib['dimensions'] - else: - var_arr_name = escape_underscore(var_array.attrib['name']) - var_type = var_array.attrib['type'] - dimensions = var_array.attrib['dimensions'] - - persistence = get_attrib(var, "persistence", missingValue='persistent') - name_in_code = get_attrib(var, "name_in_code", missingValue=var_name) - units = get_units(var) - description = get_description(var) - - if has_time: - var_path = "domain % blocklist % {} % time_levs(:) % {} % {}".format( - struct_name, struct_name, var_name) - else: - var_path = "domain % blocklist % {} % {}".format(struct_name, var_name) - - var_path = escape_underscore(var_path).replace('%', '\%') - - latex.write('\subsection[%s]{\hyperref[sec:var_tab_%s]{%s}}\n' % ( - var_name_escaped, struct_name, var_name_escaped)) - latex.write('\label{subsec:var_sec_%s_%s}\n' % (struct_name, var_name)) - # Tabular Format: - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} | p{4.0in} |}\n') - latex.write(' \hline \n') - latex.write(' Type: & %s \\\\\n' % var_type) - latex.write(' \hline \n') - latex.write(' Units: & %s \\\\\n' % units) - latex.write(' \hline \n') - latex.write(' Dimension: & %s \\\\\n' % dimensions) - latex.write(' \hline \n') - latex.write(' Persistence: & %s \\\\\n' % (persistence)) - latex.write(' \hline \n') - - if var_array is not None: - array_group = escape_underscore(var.attrib['array_group']) - index = "domain % blocklist % {} % index_{}".format(struct_name, - name_in_code) - index = escape_underscore(index).replace('%', '\%') - - latex.write(' Index in %s Array: & %s \\\\\n' % (var_arr_name, - index)) - latex.write(' \hline \n') - - latex.write(' Location in code: & %s \\\\\n' % (var_path)) - latex.write(' \hline \n') - - if var_array is not None: - latex.write(' Array Group: & %s \\\\\n' % (array_group)) - latex.write(' \hline \n') - - latex.write(' \caption{%s: %s}\n' % (var_name_escaped, description)) - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - - -def write_dimension_table_documentation(): - # Write dimension table documentation latex file. - latex = open('dimension_table_documentation.tex', 'w') - latex.write('\chapter{Dimensions}\n') - latex.write('\label{chap:dimensions}\n') - latex.write('{\small\n') - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{1.0in} || p{1.0in} | p{4.0in} |}\n') - latex.write(' \hline \n') - latex.write(' {} \\endfirsthead\n'.format(dimension_table_header)) - latex.write(' \hline \n') - latex.write(' {} (Continued) \\endhead\n'.format( - dimension_table_header)) - latex.write(' \hline \n') - latex.write(' \hline \n') - for dims in registry.iter("dims"): - for dim in dims.iter("dim"): - name = dim.attrib['name'] - name = escape_underscore(name) - units = get_units(dim) - description = get_description(dim) - - latex.write(' {} & {} & {} \\\\ \n'.format( - name, units, description)) - latex.write(' \hline\n') - - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.write('}\n') - latex.close() - - -def write_namelist_table_documentation(): - # Write namelist table documentation latex file. - latex = open('namelist_table_documentation.tex', 'w') - latex.write('\chapter[Namelist options]{\hyperref[chap:namelist_sections]' - '{Namelist options}}\n') - latex.write('\label{chap:namelist_tables}\n') - latex.write('Embedded links point to more detailed namelist information ' - 'in the appendix.\n') - for nml_rec in registry.iter("nml_record"): - rec_name = nml_rec.attrib['name'] - rec_name_escaped = escape_underscore(rec_name) - latex.write('\section[%s]{\hyperref[sec:nm_sec_%s]{%s}}\n' % ( - rec_name_escaped, rec_name, rec_name_escaped)) - latex.write('\label{sec:nm_tab_%s}\n' % (rec_name)) - - # Add input line if file exists. - if os.path.exists('%s/%s.tex' % (options.latex_dir, rec_name)): - latex.write('\input{%s/%s.tex}\n' % (options.latex_path, rec_name)) - else: - print 'Warning, namelist description latex file not found: ' \ - '%s/%s.tex' % (options.latex_dir, rec_name) - latex.write('') - - latex.write('\\vspace{0.5in}\n') - latex.write('{\small\n') - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} || p{4.0in} |}\n') - latex.write(' \hline\n') - latex.write(' %s \\endfirsthead\n' % namelist_table_header) - latex.write(' \hline \n') - latex.write(' %s (Continued) \\endhead\n' % namelist_table_header) - latex.write(' \hline\n') - latex.write(' \hline\n') - - for nml_opt in nml_rec.iter("nml_option"): - name = nml_opt.attrib['name'] - - description = get_description(nml_opt) - - link = 'nm_sec_{}'.format(name) - linkedName = get_linked_name(name, link) - - latex.write(' {} & {} \\\\\n'.format(linkedName, description)) - latex.write(' \hline\n') - - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.write('}\n') - latex.close() - - -def write_namelist_section_documentation(): - # Write namelist section documentation latex file. - latex = open('namelist_section_documentation.tex', 'w') - latex.write('\chapter[Namelist options]{\hyperref[chap:namelist_tables]' - '{Namelist options}}\n') - latex.write('\label{chap:namelist_sections}\n') - latex.write('Embedded links point to information in chapter ' - '\\ref{chap:namelist_tables}\n') - for nml_rec in registry.iter("nml_record"): - rec_name = nml_rec.attrib["name"] - rec_name_escaped = escape_underscore(rec_name) - - latex.write('\section[%s]{\hyperref[sec:nm_tab_%s]{%s}}\n' % ( - rec_name_escaped, rec_name, rec_name_escaped)) - latex.write('\label{sec:nm_sec_%s}\n' % rec_name) - - for nml_opt in nml_rec.iter("nml_option"): - name = nml_opt.attrib["name"] - name_escaped = escape_underscore(name) - opt_type = escape_underscore(nml_opt.attrib["type"]) - default_value = escape_underscore(get_attrib(nml_opt, - "default_value")) - possible_values = escape_underscore(get_attrib(nml_opt, - "possible_values")) - units = get_units(nml_opt) - description = get_description(nml_opt) - - try: - opt_icepack_name = nml_opt.attrib["icepack_name"] - except KeyError: - opt_icepack_name = None - - latex.write('\subsection[%s]{\hyperref[sec:nm_tab_%s]{%s}}\n' % ( - name_escaped, rec_name, name_escaped)) - latex.write('\label{subsec:nm_sec_%s}\n' % name) - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} || p{4.0in} |}\n') - latex.write(' \hline\n') - latex.write(' Type: & %s \\\\\n' % opt_type) - latex.write(' \hline\n') - latex.write(' Units: & %s \\\\\n' % units) - latex.write(' \hline\n') - latex.write(' Default Value: & %s \\\\\n' % default_value) - latex.write(' \hline\n') - latex.write(' Possible Values: & %s \\\\\n' % possible_values) - latex.write(' \hline\n') - if (opt_icepack_name is not None): - latex.write(' Icepack name: & \\verb+%s+ \\\\\n' % - opt_icepack_name) - latex.write(' \hline\n') - latex.write(' \caption{%s: %s}\n' % (name_escaped, description)) - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.close() - - -def write_variable_table_documentation(): - - # Write variable table documentation latex file - latex = open('variable_table_documentation.tex', 'w') - latex.write('\chapter[Variable definitions]' - '{\hyperref[chap:variable_sections]' - '{Variable definitions}}\n') - latex.write('\label{chap:variable_tables}\n') - latex.write('Embedded links point to more detailed variable information ' - 'in the appendix.\n') - - var_structs = get_var_structs() - - for struct_name, var_struct_list in var_structs.items(): - struct_name_escaped = escape_underscore(struct_name) - latex.write('\section[%s]{\hyperref[sec:var_sec_%s]{%s}}\n' % ( - struct_name_escaped, struct_name, struct_name_escaped)) - latex.write('\label{sec:var_tab_%s}\n' % struct_name) - - if os.path.exists('%s/%s_struct.tex' % (options.latex_dir, - struct_name)): - latex.write('\input{%s/%s_struct.tex}\n' % (options.latex_path, - struct_name)) - else: - print 'Warning, variable section description latex file not ' \ - 'found: %s/%s_struct.tex' % (options.latex_dir, struct_name) - latex.write('') - - latex.write('\\vspace{0.5in}\n') - latex.write('{\small\n') - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} | p{4.0in} |}\n') - latex.write(' \hline\n') - latex.write(' %s \\endfirsthead\n' % variable_table_header) - latex.write(' \hline \n') - latex.write(' %s (Continued) \\endhead\n' % variable_table_header) - latex.write(' \hline\n') - - for var_struct in var_struct_list: - write_var_struct_to_table(latex, var_struct, struct_name) - - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.write('}\n') - latex.close() - - -def write_variable_section_documentation(): - - # Write variable section documentation latex file - latex = open('variable_section_documentation.tex', 'w') - latex.write('\chapter[Variable definitions]' - '{\hyperref[chap:variable_tables]' - '{Variable definitions}}\n') - latex.write('\label{chap:variable_sections}\n') - latex.write('Embedded links point to information in chapter ' - '\\ref{chap:variable_tables}\n') - - var_structs = get_var_structs() - - for struct_name, var_struct_list in var_structs.items(): - struct_name_escaped = escape_underscore(struct_name) - - latex.write('\section[%s]{\hyperref[sec:var_tab_%s]{%s}}\n' % ( - struct_name_escaped, struct_name, struct_name_escaped)) - latex.write('\label{sec:var_sec_%s}\n' % struct_name) - - for var_struct in var_struct_list: - try: - struct_time_levs = var_struct.attrib['time_levs'] - has_time = int(struct_time_levs) > 1 - except KeyError: - has_time = False - - write_var_struct_section(latex, var_struct, struct_name, has_time) - - latex.close() - - -parser = OptionParser() -parser.add_option("-f", "--file", dest="registry_path", - help="Path to Registry file", metavar="FILE") -parser.add_option("-d", "--tex_dir", dest="latex_dir", - help="Path to directory with latex addition files.", - metavar="DIR") -parser.add_option("-p", "--tex_path", dest="latex_path", - help="Path to latex input files that will be written to " - "generated latex.", metavar="PATH") - -options, args = parser.parse_args() - -if not options.registry_path: - parser.error("Registry file is required") - -if not options.latex_dir: - parser.error('Directory with group latex files is missing. Skipping ' - 'addition of latex files.') -if not options.latex_path: - parser.error('Need latex path with latex directory.') - -latex_missing_string = '{\\bf \color{red} MISSING}' -dimension_table_header = '{\\bf Name} & {\\bf Units} & {\\bf Description}' -variable_table_header = '{\\bf Name} & {\\bf Description}' -namelist_table_header = '{\\bf Name} & {\\bf Description}' - -registry_path = options.registry_path - -registry_tree = ET.parse(registry_path) - -registry = registry_tree.getroot() - -write_namelist_input_generated() - -# Write file that defines version string for model. -latex = open('define_version.tex', 'w') -try: - version_string = registry.attrib['version'] -except KeyError: - version_string = '{\\bf MISSING}' -latex.write('\\newcommand{\\version}{%s}\n' % version_string) -latex.close() - -write_dimension_table_documentation() - -write_namelist_table_documentation() - -write_namelist_section_documentation() - -write_variable_table_documentation() - -write_variable_section_documentation() From 93f28bf7b12e787cc40b84438b4fa40b97afea5b Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:09:20 -0600 Subject: [PATCH 014/180] Remove visualization/cross_section --- visualization/cross_section/ColdHot.m | 44 ---- visualization/cross_section/cross_section.m | 219 ------------------ visualization/cross_section/exampleSections.m | 143 ------------ .../cross_section/find_cell_weights.m | 165 ------------- .../cross_section/load_large_variables.m | 82 ------- .../cross_section/script_average_netcdf | 124 ---------- .../cross_section/sub_plot_cross_sections.m | 210 ----------------- .../sub_plot_section_locations.m | 97 -------- visualization/cross_section/triArea.m | 24 -- 9 files changed, 1108 deletions(-) delete mode 100644 visualization/cross_section/ColdHot.m delete mode 100644 visualization/cross_section/cross_section.m delete mode 100644 visualization/cross_section/exampleSections.m delete mode 100644 visualization/cross_section/find_cell_weights.m delete mode 100644 visualization/cross_section/load_large_variables.m delete mode 100755 visualization/cross_section/script_average_netcdf delete mode 100644 visualization/cross_section/sub_plot_cross_sections.m delete mode 100644 visualization/cross_section/sub_plot_section_locations.m delete mode 100644 visualization/cross_section/triArea.m diff --git a/visualization/cross_section/ColdHot.m b/visualization/cross_section/ColdHot.m deleted file mode 100644 index c93e0629f..000000000 --- a/visualization/cross_section/ColdHot.m +++ /dev/null @@ -1,44 +0,0 @@ -function B = ColdHot(m) -% A colormap for blue cold, white zero, Hot positives. - -if nargin < 1, m = 256; end - -n = fix(m/8); - -% Create cold part: -A = [ - 102 0 102; - 0 41 253; - 102 153 255; - 41 255 255; - 255 255 255]/255; -%A = ones(size(A)) - A; - -v = [n-1 n n n]; - -cold = linspacev(A,v); - -% Create hot part: -A = [ - 255 255 255; - 255 255 0; - 255 102 41; - 255 0 0; - 102 41 0]/255; - -v = [n n n n-1]; -myhot = linspacev(A,v); - - -B = [cold; myhot]; - -%B = [B; flipud(hot(fix(m/2)))]; - - -% Original cold part, 8/2/02: -A = [ - 102 0 102; - 41 0 153; - 0 0 204; - 42 102 255; - 255 255 255]/255; \ No newline at end of file diff --git a/visualization/cross_section/cross_section.m b/visualization/cross_section/cross_section.m deleted file mode 100644 index b6cf8bb10..000000000 --- a/visualization/cross_section/cross_section.m +++ /dev/null @@ -1,219 +0,0 @@ -%function cross_section - -% Plot cross-sections of MPAS fields. -% -% This is the main function, where the user can specify data files, -% coordinates and text, then call functions to find sections, load -% data, and plot cross-sections. -% -% The final product is a set of plots as jpg files, a latex file, -% and a compiled pdf file of the plots, if desired. -% -% Mark Petersen, MPAS-Ocean Team, LANL, March 2014 - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify data files -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% all plots are placed in the f directory. Comment out if not needed. -unix('mkdir -p f docs'); - -% The text string [wd '/' sim(i).dir '/' sim(i).netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. - -wd = '/var/tmp/mpeterse/runs'; - -% These files only need to contain a small number of variables. -% You may need to reduce the file size before copying to a local -% machine using, and also add the variables you want to plot. -% ncks -v nAverage,latVertex,lonVertex,verticesOnEdge,edgesOnVertex,refLayerThickness,dvEdge,latCell,lonCell,refBottomDepth,areaCell,xCell,yCell,zCell,xVertex,yVertex,zVertex,cellsOnVertex,maxLevelCell \ -% file_in.nc file_out.nc - -sim(1).dir = 'm91a'; -sim(1).netcdf_file = ['output.0010-02-01_00.00.00.nc_transport_vars.nc']; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify section coordinates and text -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% see exampleSections.m for more example sections. - -% sectionText a cell array with text describing each section -sectionText = { -'ACC 0E lon',... -'ACC 30E lon',... -'ACC 60E lon',... -'ACC 90E lon',... -'Drake Pass 65W lon',... -'ACC Tasman 147E lon',... - }; - -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% Traverse from south to north, and from east to west. -% Then positive velocities are eastward and northward. -coord = [... - -67 0 -35 0;... % S Oc Tas - -67 30 -35 30;... % S Oc Tas - -67 60 -35 60;... % S Oc Tas - -67 90 -35 90;... % S Oc Tas - -65 -65 -55 -65;... % Drake - -67 147 -43.5 147;... % S Oc Tas - ]; -nSections = size(coord,1); - -% number of points to plot for each figure -nPoints = 300; - -% plotDepth(nSections) depth to which to plot each section, in m -plotDepth = 5000*ones(1,size(coord,1)); - -% For plotting, only four plots are allowed per row. -% Choose sections above for each page. -% page.name name of this group of sections -% sectionID section numbers for each row of this page -page(1).name = 'ACC'; -page(1).sectionID = [1:nSections]; - -% coord range may need alteration to match lonVertex: -% If lonVertex is between 0 and 2*pi, ensure the coordinate range is 0 to 360. -%coord(:,2) = mod(coord(:,2),360); -%coord(:,4) = mod(coord(:,4),360); -% If lonVertex is between -pi and pi, ensure the coordinate range is -180 to 180. -coord(:,2) = mod(coord(:,2)+180,360)-180; -coord(:,4) = mod(coord(:,4)+180,360)-180; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify variables to view -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% see exampleSections.m for more example variables - -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% var_conv_factor multiply each variable by this unit conversion. -% var_lims(nVars,3) contour line definition: min, max, interval - -var_name = {... -'avgVelocityZonal',... -'avgVelocityMeridional',... -'keAvgVelocity'}; -var_conv_factor = [100 100 1]; % convert m/s to cm/s for velocities -var_lims = [-20 20 2.0; -10 10 1.0; 0 20 2.5]; - -var_name = {... -'temperature',... -}; -var_conv_factor = [1]; -var_lims = [-1 14 1]; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify actions to be taken -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -find_cell_weights_flag = true ; -plot_section_locations_flag = true ; -load_large_variables_flag = true ; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Begin main code. Normally this does not need to change. -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -for iSim = 1:length(sim) - - fprintf(['**** simulation: ' sim(iSim).dir '\n']) - fid_latex = fopen('temp.tex','w'); - fprintf(fid_latex,['%% file created by plot_mpas_cross_sections, ' date '\n\n']); - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Find cells that connect beginning and end points of section - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if find_cell_weights_flag - [sim(iSim).cellsOnVertexSection, sim(iSim).cellWeightsSection, ... - sim(iSim).latSection,sim(iSim).lonSection, ... - refMidDepth, refBottomDepth, sim(iSim).maxLevelCellSection] ... - = find_cell_weights(wd,sim(iSim).dir,sim(iSim).netcdf_file, ... - sectionText,coord,nPoints); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Plot cell section locations on world map - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if plot_section_locations_flag - sub_plot_section_locations(sim(iSim).dir,coord, ... - sim(iSim).latSection,sim(iSim).lonSection,fid_latex) - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Load large variables from netcdf file - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if load_large_variables_flag - [sim(iSim).sectionData] = load_large_variables ... - (wd,sim(iSim).dir,sim(iSim).netcdf_file, var_name, var_conv_factor, ... - sim(iSim).cellsOnVertexSection, sim(iSim).cellWeightsSection); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Plot data on cross-sections - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - for iPage = 1:length(page) - - sub_plot_cross_sections(sim(iSim).dir,sim(iSim).netcdf_file,sectionText, ... - page(iPage).name, page(iPage).sectionID,sim(iSim).sectionData,refMidDepth,refBottomDepth,... - sim(iSim).latSection,sim(iSim).lonSection,sim(iSim).maxLevelCellSection,coord,plotDepth,... - var_name,var_lims,fid_latex) - - end % iPage - fprintf(fid_latex,['\n\\end{document}\n\n']); - fclose(fid_latex); - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Latex Compilation -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% This matlab script will invoke a latex compiler in order to -% produce a pdf file. Specify the unix command-line latex -% executable, or 'none' to not compile the latex document. - -% latex_command = 'latex'; - -% doc_dir = ['docs/' regexprep(sim(iSim).dir,'/','_') '_' ... -% sim(iSim).netcdf_file '_dir' ]; -% unix(['mkdir -p ' doc_dir '/f']); -% unix(['mv f/*jpg ' doc_dir '/f']); - -% filename = [ regexprep(sim(iSim).dir,'/','_') '_' sim(iSim).netcdf_file '.tex']; -% unix(['cat mpas_sections.head.tex temp.tex > ' doc_dir '/' filename ]); - -% if not(strcmp(latex_command,'none')) -% fprintf('*** Compiling latex document \n') -% cd(doc_dir); -% unix([latex_command ' ' filename]); -% cd('../..'); -% end - -end % iSim - - diff --git a/visualization/cross_section/exampleSections.m b/visualization/cross_section/exampleSections.m deleted file mode 100644 index e6302c10f..000000000 --- a/visualization/cross_section/exampleSections.m +++ /dev/null @@ -1,143 +0,0 @@ -% example_sections.m - -% This file simply contains example cross sections with text names. -% -% Mark Petersen, MPAS-Ocean Team, LANL, March 2014 - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify section coordinates and text -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% sectionText a cell array with text describing each section -sectionText = { -'N Atlantic 26N lat',... -'N Atlantic 36N lat',... -'N Atlantic 41N lat',... -'N Atlantic 46N lat',... -'N Atlantic 56N lat',... -'N Atlantic 70W lon',... -'N Atlantic 65W lon',... -'N Atlantic 60W lon',... -'N Atlantic 50W lon',... -'N Atlantic 40W lon',... -'N Atlantic 30W lon',... -'Eq Pacific 140W lon',... -'Eq Pacific 0 lat ',... -'Drake Pass 65W lon',... -'ACC Tasman 147E lon',... - }; - -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% Traverse from south to north, and from east to west. -% Then positive velocities are eastward and northward. -coord = [... - 26 -80 26 -15;... % N Atl Zonal - 36 -76 36 -10;... % N Atl Zonal - 41 -72 41 -10;... % N Atl Zonal - 46 -60 46 -10;... % N Atl Zonal - 56 -60 56 -10;... % N Atl Zonal - 20 -70 44 -70;... % N Atl Meridional - 19 -65 44 -65;... % N Atl Meridional - 8.5 -60 46 -60;... % N Atl Meridional - 1.8 -50 62 -50;... % N Atl Meridional - -3 -40 65 -40;... % N Atl Meridional - -5 -30 68.2 -30;... % N Atl Meridional - -8 -140 8 -140;... % Eq Pac Meridional - 0 140 0 -95;... % Eq Pac Zonal - -65 -65 -55 -65;... % Drake - -67 147 -43.5 147;... % S Oc Tas - ]; - - -% These are the cross-sections for the current plots: -% sectionText a cell array with text describing each section -sectionText = { -'N Atlantic 70W lon',... -'N Atlantic 65W lon',... -'N Atlantic 60W lon',... -'N Atlantic 50W lon',... -'N Atlantic 40W lon',... -'N Atlantic 30W lon',... - }; - -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% Traverse from south to north, and from east to west. -% Then positive velocities are eastward and northward. -coord = [... - 20 -70 44 -70;... % N Atl Meridional - 19 -65 44 -65;... % N Atl Meridional - 8.5 -60 46 -60;... % N Atl Meridional - 1.8 -50 62 -50;... % N Atl Meridional - -3 -40 65 -40;... % N Atl Meridional - -5 -30 68.2 -30;... % N Atl Meridional - ]; - -sectionText = { -'N Atlantic 77W lon',... -'N Atlantic 76.4W lon',... -'N Atlantic 76W lon',... -'N Atlantic 75W lon',... -'N Atlantic 26N lat',... -'N Atlantic 26N lat',... - }; - -coord = [... - 21 283. 32 283.;... % DWBC N Atl meridional section - 21 283.6 32 283.6;... % DWBC N Atl meridional section - 21 284 32 284;... % DWBC N Atl meridional section - 21 285 32 285;... % DWBC N Atl meridional section - 26.5 -77.1 26.5 -75;... % DWBC N Atl zonal section - 26.5 -80 26.5 -14;... % DWBC N Atl zonal section - ]; - -% sectionText a cell array with text describing each section -sectionText = { -'ACC 0E lon',... -'ACC 30E lon',... -'ACC 60E lon',... -'ACC 90E lon',... -'Drake Pass 65W lon',... -'ACC Tasman 147E lon',... - }; - -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% Traverse from south to north, and from east to west. -% Then positive velocities are eastward and northward. -coord = [... - -67 0 -43.5 0;... % S Oc Tas - -67 30 -43.5 30;... % S Oc Tas - -67 60 -43.5 60;... % S Oc Tas - -67 90 -43.5 90;... % S Oc Tas - -65 -65 -55 -65;... % Drake - -67 147 -43.5 147;... % S Oc Tas - ]; - - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify variables to view -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% var_conv_factor multiply each variable by this unit conversion. -% var_lims(nVars,3) contour line definition: min, max, interval - -var_name = {... -'avgVelocityZonal',... -'avgVelocityMeridional',... -'ke_fromAvgVelocity'}; -var_conv_factor = [100 100 1]; % convert m/s to cm/s for velocities -var_lims = [-20 20 2.0; -10 10 1.0; 0 20 2.5]; - -var_name = {... -'temperature',... -}; -var_conv_factor = [1]; -var_lims = [-2 12 .5]; diff --git a/visualization/cross_section/find_cell_weights.m b/visualization/cross_section/find_cell_weights.m deleted file mode 100644 index bf9779059..000000000 --- a/visualization/cross_section/find_cell_weights.m +++ /dev/null @@ -1,165 +0,0 @@ -function [cellsOnVertexSection, cellWeightsSection, latSection,lonSection, ... - refMidDepth, refBottomDepth, maxLevelCellSection] = find_cell_weights ... - (wd,dir,netcdf_file,sectionText,coord,nPoints) - -% This function reads grid data from an MPAS-Ocean grid or restart -% netCDF file, and finds a path of cells that connect the endpoints -% specified in coord. The path is forced to travel through cells -% that are closest to the line connecting the beginning and end -% cells. -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% The text string [wd '/' dir '/' netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. -% sectionText a cell array with text describing each section -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% -%%%%%%%%%% output arguments %%%%%%%%% -% cellsOnVertexSection(vertexDegree,nPoints,nSections) cells neighboring nearest vertex -% cellWeightsSection(vertexDegree,nPoints,nSections) weights for each cell -% latSection(nPoints,nSections) lat coordinates of each section -% lonSection(nPoints,nSections) lon coordinates of each section -% depth(nVertLevels) depth of center of each layer, for plotting -% latCellDeg(nCells) lat arrays for all cells -% lonCellDeg(nCells) lon arrays for all cells - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Read data from file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf(['** find_cell_sections, simulation: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ] -ncid = netcdf.open(filename,'nc_nowrite'); - -xCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'xCell')); -yCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'yCell')); -zCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'zCell')); -latVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'latVertex')); -lonVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'lonVertex')); -xVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'xVertex')); -yVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'yVertex')); -zVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'zVertex')); -cellsOnVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'cellsOnVertex')); -refLayerThickness = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'refLayerThickness')); -refBottomDepth = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'refBottomDepth')); -maxLevelCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'maxLevelCell')); -sphere_radius = netcdf.getAtt(ncid,netcdf.getConstant('NC_GLOBAL'),'sphere_radius'); -[dimname,nVertices]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertices')); -[dimname,vertexDegree]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'vertexDegree')); -[dimname,nVertLevels]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertLevels')); -netcdf.close(ncid) - -nSections = size(coord,1); - -% Compute depth of center of each layer, for plotting -refMidDepth(1) = refLayerThickness(1)/2; -for i=2:nVertLevels - refMidDepth(i) = refMidDepth(i-1) + 0.5*(refLayerThickness(i) + refLayerThickness(i-1)); -end - -latSection = zeros(nPoints,nSections); -lonSection = zeros(nPoints,nSections); -latVertexSection = zeros(nPoints,nSections); -lonVertexSection = zeros(nPoints,nSections); -maxLevelCellSection = zeros(nPoints,nSections); -nearestVertexSection = zeros(nPoints,nSections); -cellsOnVertexSection = zeros(vertexDegree,nPoints,nSections); -cellWeightsSection = zeros(vertexDegree,nPoints,nSections); -margin=.5; - -for iSection=1:nSections - fprintf('Finding nearest vertex for Section %g \n',iSection) - latSection(:,iSection) = linspace(coord(iSection,1),coord(iSection,3),nPoints); - lonSection(:,iSection) = linspace(coord(iSection,2),coord(iSection,4),nPoints); - - maxLat = (max(latSection(:,iSection))+margin)*pi/180; - minLat = (min(latSection(:,iSection))-margin)*pi/180; - maxLon = (max(lonSection(:,iSection))+margin)*pi/180; - minLon = (min(lonSection(:,iSection))-margin)*pi/180; - - vInd = find(latVertex>minLat&latVertexminLon&lonVertex=cvalue(jnew))),nc)-1,1); - cmapnew(jnew-1,:) = cmap_orig_short(jold,:); - end - cmapnew(nc_inc,:) = cmap_orig_short(nc-1,:); - - colormap(cmapnew) - %colorbarf_spec(cout,h,'vert',contour_lims); - %xlabel('Distance (km) along 26.5N east of 77W') % for DWBC only - - axis tight - %set(gca,'YLim',[0 plotDepth(iSection)],'XLim',[0 175]) % for DWBC only - %set(gca,'YTick',[0:1000:5000],'XTick',[0:25:175]) - set(gca,'YLim',[0 plotDepth(iRow)]) - %set(gca,'YTick',[0:100:400]) - xlabel(xtext) - % set(gca,'XTick',-1*[80:.5:70]) - %%%%%% special commands for DWBC mrp end - - %%%%%% special commands for EUC mrp end - %if iRow==2 - % set(gca,'XTick',[143 156 165 180 190 205 220 235 250 265]) - % set(gca,'XTickLabel',{'143' '156' '165E' '180' '170W' '155' '140' '125' '110' '95'}) - %end - - %%%%%% special commands for EUC mrp end - - set(gca,'YDir','reverse') - title([temptext ', ' char(var_name(iVar))]) - ylabel('depth, m') - grid on - set(gca,'layer','top'); - h=colorbar; - - % mrp draw bottom based on zero contour - hold on - n = nPoints; - % old way: maxLevelCell=zeros(1,n); - x(2:n) = (xaxis(1:n-1)+xaxis(2:n))/2; - x(1) = xaxis(1) - (xaxis(2)-xaxis(1))/2; - x(n+1) = xaxis(n) + (xaxis(n)-xaxis(n-1))/2; - b = max(refBottomDepth); - for j=1:n - % old way: maxLevelCell(j)=max(min(find(sectionData(:,j,iSection,iVar)==0.0))-1,1); - depthline(j) = refBottomDepth(maxLevelCellSection(j,iSection)); - % do not draw land at bottom right now. - %h=patch([x(j) x(j+1) x(j+1) x(j) x(j)],... - % [b b depthline(j) depthline(j) b], [.5 .5 .5]); - %set(h,'LineStyle','none') - end - - % mrp draw bottom based on zero contour end - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 5.5 3.2]) - subplot('position',[0 .95 1 .05]); axis off - title_txt = [regexprep(char(var_name(iVar)),'_','\\_') ', ' regexprep(dir,'_','\\_')]; -% h=text(.55,.4,title_txt); -% set(h,'HorizontalAlignment','center','FontWeight','bold','FontSize',14) -% text(.005,.7,[ date ]); - - unix(['mkdir -p f/' dir ]); - temp=['f/' dir '/' netcdfFile '_' pageName num2str(iRow) '_var' num2str(iVar)]; - filename = regexprep(temp,'\.','_'); - print('-djpeg',[filename '.jpg']); - print('-depsc2',[filename '.eps']); - unix(['epstopdf ' filename '.eps --outfile=' filename '.pdf']); - fprintf(fid_latex,['\\begin{figure}[btp] \\center \n \\includegraphics[width=7.5in]{'... - filename '.jpg} \n\\end{figure} \n']); - - % print('-depsc2',[filename '.eps']) - - end - - -end - diff --git a/visualization/cross_section/sub_plot_section_locations.m b/visualization/cross_section/sub_plot_section_locations.m deleted file mode 100644 index f69137f4e..000000000 --- a/visualization/cross_section/sub_plot_section_locations.m +++ /dev/null @@ -1,97 +0,0 @@ -function sub_plot_section_locations(dir,coord, ... - latSection,lonSection,fid_latex) - -% Plot section locations on world map - -% Mark Petersen, MPAS-Ocean Team, LANL, Sep 2012 - -%%%%%%%%%% input arguments %%%%%%%%% -% dir text string, name of simulation -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% latSection(nPoints,nSections) lat coordinates of each section -% lonSection(nPoints,nSections) lon coordinates of each section -% fid_latex file ID of latex file - -fprintf(['** sub_plot_cell_sections, on figure 1.\n']) - -nSections = size(coord,1); - -figure(1); clf - - minLon = -180.0; % may be 0 or -180 - latTrans = 360.0; - - % plot topo data of the earth. This is just low-rez one deg - % data for visual reference. - load('topo.mat','topo','topomap1'); - if minLon==-180 - topoNew(:,1:180) = topo(:,181:360); - topoNew(:,181:360) = topo(:,1:180); - image([-180 180],[-90 90],topoNew,'CDataMapping', 'scaled'); - else - image([0 360],[-90 90],topo,'CDataMapping', 'scaled'); - end - - colormap(topomap1); - set(gca,'YDir','normal') - - hold on - - % world - axis tight - set(gca,'XTick',30*[-10:12]) - set(gca,'YTick',15*[-20:20]) - - % half world -% axis([-360+latTrans 0+latTrans -80 70]) -% set(gca,'XTick',20*[-10:20]) -% set(gca,'YTick',10*[-20:20]) - - % N Atlantic -% axis([-90+latTrans -5+latTrans -5 70]) -% set(gca,'XTick',[-100:5:360]) -% set(gca,'YTick',[-90:5:90]) - - % Drake passage -% axis([-90+latTrans,-50+latTrans,-75,-50]) -% set(gca,'XTick',[-100:2:360]) - % set(gca,'YTick',[-200:2:200]) - - % Pacific -% axis([130 260 -10 10]) -% set(gca,'XTick',[0:1:300]) -% set(gca,'YTick',[-20:.1:20]) - - hold on - grid on - - for iSection=1:nSections - h=plot(lonSection(:,iSection),latSection(:,iSection),'y-'); - h=text(lonSection(1,iSection),latSection(1,iSection), ... - num2str(iSection)); - - set(h,'Color',[1 1 1],'FontWeight','bold') - %h=plot(lonSection(:,iSection),latSection(:,iSection),'y.'); - %set(h,'Color','y','LineWidth',1) - end - - ylabel('latitude') - xlabel('longitude') - title(['Domain: ' regexprep(dir,'_','\\_') ' Cells in cross sections. ']) - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 8 8]) - - subplot('position',[0 .95 1 .05]); axis off - text(.005,.7,[ date ]); - - dir_name1 = regexprep(dir,'\.','_'); - dir_name2 = regexprep(dir_name1,'/','_'); - filename=['f/' dir_name2 '_cell_map' ]; - print('-djpeg',[filename '.jpg']) - - % put printing text in a latex file - fprintf(fid_latex,... - ['\\begin{figure}[btp] \\center \n \\includegraphics[width=7.5in]{'... - filename '.jpg} \n\\end{figure} \n']); diff --git a/visualization/cross_section/triArea.m b/visualization/cross_section/triArea.m deleted file mode 100644 index 8734c8dc1..000000000 --- a/visualization/cross_section/triArea.m +++ /dev/null @@ -1,24 +0,0 @@ -function area=triArea(A,B,C,R) -% - This function calculates the area of the triangle A,B,C on the -% surface of a sphere. -% -% Input: A, B, C -% A: vertex 1 of triangle -% B: vertex 2 of triangle -% C: vertex 3 of triangle -% R: radius of sphere -% Output: (returned value area) -% area: surface area of triangle on sphere. - -R2inv = 1/R/R; - -a = acos(dot(B,C)*R2inv); -b = acos(dot(C,A)*R2inv); -c = acos(dot(A,B)*R2inv); - -s = 0.5*(a+b+c); - -tanqe = sqrt(tan(0.5*s)*tan(0.5*(s-a))*tan(0.5*(s-b))*tan(0.5*(s-c))); - -area = abs(4.0*atan(tanqe)); - From b9c7d9bb34f206780e12647e52385d0644af7827 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:10:00 -0600 Subject: [PATCH 015/180] Remove visualization/dx --- visualization/dx/edge.dx | 18 - visualization/dx/triangle.dx | 172 ---- visualization/dx/visual_vector.cfg | 266 ------ visualization/dx/visual_vector.net | 1238 ---------------------------- visualization/dx/voronoi.dx | 236 ------ 5 files changed, 1930 deletions(-) delete mode 100644 visualization/dx/edge.dx delete mode 100644 visualization/dx/triangle.dx delete mode 100644 visualization/dx/visual_vector.cfg delete mode 100644 visualization/dx/visual_vector.net delete mode 100644 visualization/dx/voronoi.dx diff --git a/visualization/dx/edge.dx b/visualization/dx/edge.dx deleted file mode 100644 index 9fe861ba3..000000000 --- a/visualization/dx/edge.dx +++ /dev/null @@ -1,18 +0,0 @@ -object "positions list" class array type float rank 1 shape 3 items 120000 -ascii data file edge.position.data - -object 0 class array type float rank 1 shape 3 items 120000 -ascii data file normal.data -attribute "dep" string "positions" - -object 1 class array type float rank 1 shape 3 items 120000 -ascii data file tangent.data -attribute "dep" string "positions" - -object "normal" class field -component "positions" "positions list" -component "data" 0 - -object "tangent" class field -component "positions" "positions list" -component "data" 1 diff --git a/visualization/dx/triangle.dx b/visualization/dx/triangle.dx deleted file mode 100644 index c1b454a2d..000000000 --- a/visualization/dx/triangle.dx +++ /dev/null @@ -1,172 +0,0 @@ - -object "positions list" class array type float rank 1 shape 3 items 240000 -ascii data file tri.position.data - -object "edge list" class array type int rank 0 items 240000 -ascii data file tri.edge.data -attribute "ref" string "positions" - -object "loops list" class array type int rank 0 items 80000 -ascii data file tri.loop.data -attribute "ref" string "edges" - -object "face list" class array type int rank 0 items 80000 -ascii data file tri.face.data -attribute "ref" string "loops" - -object 0 class array type float rank 0 items 80000 -data file tri.area.data -attribute "dep" string "faces" - -object 1 class array type float rank 0 items 80000 -data file ./output/divDT.data -attribute "dep" string "faces" - -object 2 class array type float rank 0 items 80000 -data file ./output/ke.data -attribute "dep" string "faces" - -object 3 class array type float rank 0 items 80000 -data file ./output/relDT.data -attribute "dep" string "faces" - -object 4 class array type float rank 0 items 80000 -data file ./output/thicknessc.data -attribute "dep" string "faces" - -object 5 class array type float rank 0 items 80000 -data file ./output/thicknesscdiff.data -attribute "dep" string "faces" - -object 6 class array type float rank 0 items 80000 -data file ./output/kediff.data -attribute "dep" string "faces" - -object 7 class array type float rank 0 items 80000 -data file ./output/relDTdiff.data -attribute "dep" string "faces" - -object 8 class array type float rank 0 items 80000 -data file ./output/residDT.data -attribute "dep" string "faces" - -object 9 class array type float rank 0 items 80000 -data file ./output/chi.data -attribute "dep" string "faces" - -object 10 class array type float rank 0 items 80000 -data file ./output/divAVG.data -attribute "dep" string "faces" - -object 11 class array type float rank 0 items 80000 -data file ./output/beta.data -attribute "dep" string "faces" - -object 12 class array type float rank 0 items 80000 -data file ./output/jtw.data -attribute "dep" string "faces" - -object 13 class array type float rank 0 items 80000 -data file ./output/pv.data -attribute "dep" string "faces" - - - -object "areac" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 0 - -object "divDT" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 1 - -object "ke" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 2 - -object "relDT" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 3 - -object "thicknessc" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 4 - -object "thicknesscdiff" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 5 - -object "kediff" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 6 - -object "relDTdiff" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 7 - -object "residDT" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 8 - -object "chi" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 9 - -object "divAVG" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 10 - -object "beta" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 11 - -object "jtw" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 12 - -object "pv" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 13 - diff --git a/visualization/dx/visual_vector.cfg b/visualization/dx/visual_vector.cfg deleted file mode 100644 index 8b8de65b5..000000000 --- a/visualization/dx/visual_vector.cfg +++ /dev/null @@ -1,266 +0,0 @@ -// -// time: Fri Aug 7 15:29:44 2009 -// -// version: 3.2.0 (format), 4.4.4 (DX) -// -// inaccessible panels: -// inaccessible groups: -// -// panel[0]: position = (0.0156,0.7275), size = 0.2375x0.1546, startup = 0, devstyle = 1, screen = 0 -// title: value = VD String -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[1]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[2]: position = (0.0437,0.0539), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[3]: position = (0.1344,0.2155), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[4]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[5]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[6]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[7]: position = (0.0051,0.0349), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[8]: position = (0.0051,0.0349), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[9]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[11]: position = (0.0051,0.0070), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[12]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[13]: position = (0.1562,0.5894), size = 0.1738x0.1629, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[14]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[15]: position = (0.0281,0.1356), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[16]: position = (0.2605,0.4563), size = 0.3328x0.4880, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[17]: position = (0.0059,0.4930), size = 0.2746x0.4392, startup = 1, devstyle = 1, screen = 0 -// title: value = Collected -// -// workspace: width = 430, height = 471 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[18]: position = (0.2535,0.2668), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// interactor FileSelector[2]: num_components = 1, value = "/Users/todd/Desktop/cgrid_model/run/dx/normals.dx" -// instance: panel = 2, x = 5, y = 5, style = FileSelector, vertical = 1, size = 180x55 -// instance: panel = 17, x = 0, y = 416, style = FileSelector, vertical = 1, size = 180x55 -// filter = /Users/todd/Desktop/cgrid_model/run/dx/*.dx -// -// interactor String[2]: num_components = 1, value = "velocity" -// instance: panel = 3, x = 5, y = 5, style = Text, vertical = 1, size = 119x51 -// instance: panel = 17, x = 0, y = 134, style = Text, vertical = 1, size = 119x51 -// -// interactor Integer[5]: num_components = 1, value = 0 -// component[0]: minimum = -1e+06, maximum = 1e+06, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 9, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 17, x = 300, y = 0, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// interactor FileSelector[1]: num_components = 1, value = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/voronoi.dx" -// instance: panel = 1, x = 5, y = 5, style = FileSelector, vertical = 1, size = 180x55 -// instance: panel = 17, x = 4, y = 289, style = FileSelector, vertical = 1, size = 180x55 -// filter = /Users/todd/Desktop/svn/cvt/unstructured/run/40962.A.Cgrid/dx/*.dx -// -// interactor String[1]: num_components = 1, value = "ke.1.0" -// instance: panel = 0, x = 4, y = 14, style = Text, vertical = 1, size = 119x51 -// instance: panel = 16, x = 5, y = 5, style = Text, vertical = 1, size = 119x51 -// instance: panel = 17, x = 0, y = 0, style = Text, vertical = 1, size = 119x51 -// -// interactor Integer[1]: num_components = 1, value = 1 -// component[0]: minimum = -1e+06, maximum = 1e+06, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 8, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 17, x = 150, y = 0, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// node Colormap[1]: -// input[1]: defaulting = 0, value = { [0.0 0.74683544] [1.0 0.0] } -// input[2]: defaulting = 0, value = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] } -// input[3]: defaulting = 0, value = { [0.84699454 1.0] } -// input[4]: defaulting = 0, value = { [0.84972678 1.0] } -// input[5]: defaulting = 0, value = "Colormap_1" -// input[7]: defaulting = 1, value = 0.0 -// input[8]: defaulting = 1, value = 80737.0 -// input[9]: defaulting = 1, value = 20 -// input[12]: defaulting = 0, value = { 0.0 80737.0 } -// input[17]: defaulting = 0, value = 0.0 -// input[18]: defaulting = 0, value = 80737.0 -// window: position = (0.0605,0.4696), size = 0.4297x0.4284, screen = 0 -// -// interactor String[7]: num_components = 1, value = "white" -// instance: panel = 18, x = 5, y = 5, style = Text, vertical = 1, size = 126x51 -// instance: panel = 17, x = 304, y = 136, style = Text, vertical = 1, size = 126x51 -// -// interactor Integer[7]: num_components = 1, value = 0 -// component[0]: minimum = -1e+06, maximum = 1e+06, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 7, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// interactor FileSelector[4]: num_components = 1, value = "/Users/todd/Desktop/svn/cvt/unstructured/run/40962.A.Dgrid/dx/topography.dx" -// instance: panel = 5, x = 5, y = 5, style = FileSelector, vertical = 1, size = 180x55 -// -// interactor String[4]: num_components = 1, value = "tpg" -// instance: panel = 6, x = 5, y = 5, style = Text, vertical = 1, size = 119x51 -// -// interactor Integer[6]: num_components = 1, value = 0 -// component[0]: minimum = -1e+06, maximum = 1e+06, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 4, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 17, x = 147, y = 134, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// interactor Integer[10]: num_components = 1, value = 0 -// component[0]: minimum = -1e+06, maximum = 1e+06, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 14, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 17, x = 300, y = 65, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// interactor FileSelector[6]: num_components = 1, value = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/triangle.dx" -// instance: panel = 12, x = 5, y = 5, style = FileSelector, vertical = 1, size = 180x55 -// instance: panel = 17, x = 0, y = 350, style = FileSelector, vertical = 1, size = 180x55 -// filter = /Users/todd/Desktop/cgrid_model/run/dx/*.dx -// -// interactor String[6]: num_components = 1, value = "vorticity.1.12" -// instance: panel = 13, x = 5, y = 5, style = Text, vertical = 1, size = 119x51 -// instance: panel = 17, x = 1, y = 66, style = Text, vertical = 1, size = 119x51 -// -// interactor Integer[9]: num_components = 1, value = 0 -// component[0]: minimum = -1e+06, maximum = 1e+06, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 15, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 17, x = 150, y = 69, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// node Colormap[2]: -// input[1]: defaulting = 0, value = { [0.0 0.74683544] [1.0 0.0] } -// input[2]: defaulting = 0, value = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] } -// input[3]: defaulting = 0, value = { [0.84699454 1.0] } -// input[4]: defaulting = 0, value = { [0.84972678 1.0] } -// input[5]: defaulting = 0, value = "Colormap_2" -// input[7]: defaulting = 1, value = -0.22230303 -// input[8]: defaulting = 1, value = 72107.0 -// input[9]: defaulting = 1, value = 20 -// input[12]: defaulting = 0, value = { -0.22230303 72107.0 } -// input[17]: defaulting = 0, value = -0.22230303 -// input[18]: defaulting = 0, value = 72107.0 -// window: position = (0.0605,0.4696), size = 0.4297x0.4284, screen = 0 -// -// node Image[2]: -// depth: value = 24 -// window: position = (0.2855,0.0399), size = 0.5680x0.8980, screen = 0 -// input[1]: defaulting = 0, value = "Image_2" -// input[4]: defaulting = 0, value = 1 -// input[5]: defaulting = 0, value = [0 0 0] -// input[6]: defaulting = 0, value = [0 -6.8363 0] -// input[7]: defaulting = 0, value = 2.50853 -// input[8]: defaulting = 0, value = 1440 -// input[9]: defaulting = 0, value = 0.953 -// input[10]: defaulting = 0, value = [0 0 1] -// input[11]: defaulting = 1, value = 20.793 -// input[12]: defaulting = 0, value = 0 -// input[14]: defaulting = 0, value = 1 -// input[15]: defaulting = 1, value = "none" -// input[16]: defaulting = 1, value = "none" -// input[17]: defaulting = 1, value = 1 -// input[18]: defaulting = 1, value = 1 -// input[19]: defaulting = 0, value = 0 -// input[22]: defaulting = 0, value = "black" -// input[25]: defaulting = 0, value = "/Users/todd/Desktop/ke.tiff" -// input[26]: defaulting = 0, value = "tiff gamma=1" -// input[27]: defaulting = 0, value = 750 -// input[28]: defaulting = 1, value = 1.0 -// input[29]: defaulting = 0, value = 0 -// input[30]: defaulting = 0, value = {"x axis", "y axis", ""} -// input[31]: defaulting = 0, value = { -15 -15 15 } -// input[34]: defaulting = 0, value = 1 -// input[37]: defaulting = 0, value = {"grey30", "grey5", "yellow", "white"} -// input[38]: defaulting = 0, value = {"background", "grid", "ticks", "labels"} -// input[39]: defaulting = 0, value = 0.5 -// input[41]: defaulting = 0, value = "none" -// internal caching: 1 diff --git a/visualization/dx/visual_vector.net b/visualization/dx/visual_vector.net deleted file mode 100644 index efddd0d38..000000000 --- a/visualization/dx/visual_vector.net +++ /dev/null @@ -1,1238 +0,0 @@ -// -// time: Wed Sep 9 16:22:00 2009 -// -// version: 3.2.0 (format), 4.4.4 (DX) -// -// -// MODULE main -// workspace: width = 999, height = 1204 -// layout: snap = 0, width = 50, height = 50, align = NN -// -macro main( -) -> ( -) { - // - // node FileSelector[2]: x = 414, y = 91, inputs = 0, label = FileSelector - // output[1]: visible = 1, type = 32, value = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/edge.dx" - // output[2]: visible = 1, type = 32, value = "edge.dx" - // - // - // node String[2]: x = 554, y = 91, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "normal" - // - // - // node Import[2]: x = 501, y = 182, inputs = 6, label = Import - // input[1]: defaulting = 1, visible = 1, type = 32, value = "geodesic.c40962.dx" - // input[2]: defaulting = 1, visible = 1, type = 32, value = "isurf" - // input[3]: defaulting = 0, visible = 1, type = 32, value = "dx" - // -main_Import_2_out_1 = - Import( - main_FileSelector_2_out_1, - main_String_2_out_1, - main_Import_2_in_3, - main_Import_2_in_4, - main_Import_2_in_5, - main_Import_2_in_6 - ) [instance: 2, cache: 1]; - // - // node AutoGlyph[2]: x = 483, y = 261, inputs = 7, label = AutoGlyph - // input[2]: defaulting = 0, visible = 1, type = 32, value = "standard" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // input[4]: defaulting = 0, visible = 1, type = 5, value = 0.1 - // input[5]: defaulting = 1, visible = 1, type = 5, value = 0.1 - // -main_AutoGlyph_2_out_1 = - AutoGlyph( - main_Import_2_out_1, - main_AutoGlyph_2_in_2, - main_AutoGlyph_2_in_3, - main_AutoGlyph_2_in_4, - main_AutoGlyph_2_in_5, - main_AutoGlyph_2_in_6, - main_AutoGlyph_2_in_7 - ) [instance: 2, cache: 1]; - // - // node Integer[5]: x = 61, y = 420, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_5" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 1, visible = 0, type = 1, value = -1000000 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1000000 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 1 - // - // - // node FileSelector[1]: x = 66, y = 17, inputs = 0, label = FileSelector - // output[1]: visible = 1, type = 32, value = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/voronoi.dx" - // output[2]: visible = 1, type = 32, value = "voronoi.dx" - // - // - // node String[1]: x = 187, y = 20, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "area" - // - // - // node Import[1]: x = 120, y = 93, inputs = 6, label = Import - // input[1]: defaulting = 1, visible = 1, type = 32, value = "/disk5/gdc/swm/run.02562/dx/hexagon.dx" - // input[2]: defaulting = 1, visible = 1, type = 32, value = "center_area" - // input[3]: defaulting = 0, visible = 1, type = 32, value = "dx" - // -main_Import_1_out_1 = - Import( - main_FileSelector_1_out_1, - main_String_1_out_1, - main_Import_1_in_3, - main_Import_1_in_4, - main_Import_1_in_5, - main_Import_1_in_6 - ) [instance: 1, cache: 1]; - // - // node ShowConnections[2]: x = 65, y = 261, inputs = 1, label = ShowConnections - // -main_ShowConnections_2_out_1 = - ShowConnections( - main_Import_1_out_1 - ) [instance: 2, cache: 1]; - // - // node Color[3]: x = 102, y = 345, inputs = 5, label = Color - // input[2]: defaulting = 0, visible = 1, type = 32, value = "yellow" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_3_out_1 = - Color( - main_ShowConnections_2_out_1, - main_Color_3_in_2, - main_Color_3_in_3, - main_Color_3_in_4, - main_Color_3_in_5 - ) [instance: 3, cache: 1]; - // - // node Switch[7]: x = 138, y = 463, inputs = 2, label = Switch - // -main_Switch_7_out_1 = - Switch( - main_Integer_5_out_1, - main_Color_3_out_1 - ) [instance: 7, cache: 1]; - // - // node Integer[1]: x = 229, y = 448, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_1" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[5]: defaulting = 1, visible = 0, type = 1, value = -1000000 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1000000 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 0 - // - // - // node Colormap[1]: x = 254, y = 167, inputs = 19, label = Colormap - // input[1]: defaulting = 0, visible = 0, type = 16777224, value = { [0.0 0.74683544] [1.0 0.0] } - // input[2]: defaulting = 0, visible = 0, type = 16777224, value = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] } - // input[3]: defaulting = 0, visible = 0, type = 16777224, value = { [0.84699454 1.0] } - // input[4]: defaulting = 0, visible = 0, type = 16777224, value = { [0.84972678 1.0] } - // input[5]: defaulting = 0, visible = 0, type = 32, value = "Colormap_1" - // input[7]: defaulting = 1, visible = 0, type = 5, value = 866025.38 - // input[8]: defaulting = 1, visible = 0, type = 5, value = 866025.38 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 20 - // input[12]: defaulting = 0, visible = 0, type = 16777221, value = { 866025.38 866025.38 } - // input[17]: defaulting = 0, visible = 0, type = 5, value = 866025.38 - // input[18]: defaulting = 0, visible = 0, type = 5, value = 866025.38 - // window: position = (0.0605,0.4696), size = 0.4297x0.4284, screen = 0 - // -main_Colormap_1_out_1[cache: 2], -main_Colormap_1_out_2[cache: 2] = - Colormap( - main_Colormap_1_in_1, - main_Colormap_1_in_2, - main_Colormap_1_in_3, - main_Colormap_1_in_4, - main_Colormap_1_in_5, - main_Import_1_out_1, - main_Colormap_1_in_7, - main_Colormap_1_in_8, - main_Colormap_1_in_9, - main_Colormap_1_in_10, - main_Colormap_1_in_11, - main_Colormap_1_in_12, - main_Colormap_1_in_13, - main_Colormap_1_in_14, - main_Colormap_1_in_15, - main_Colormap_1_in_16, - main_Colormap_1_in_17, - main_Colormap_1_in_18, - main_Colormap_1_in_19 - ) [instance: 1, cache: 1]; - // - // node String[7]: x = 9, y = 180, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "white" - // - // - // node ColorBar[1]: x = 65, y = 176, inputs = 16, label = ColorBar - // input[2]: defaulting = 0, visible = 1, type = 8, value = [0.05 0.15] - // input[3]: defaulting = 0, visible = 1, type = 8, value = [200 15] - // input[4]: defaulting = 0, visible = 1, type = 3, value = 0 - // input[9]: defaulting = 1, visible = 1, type = 16777248, value = {"white"} - // -main_ColorBar_1_out_1 = - ColorBar( - main_Colormap_1_out_1, - main_ColorBar_1_in_2, - main_ColorBar_1_in_3, - main_ColorBar_1_in_4, - main_ColorBar_1_in_5, - main_ColorBar_1_in_6, - main_ColorBar_1_in_7, - main_ColorBar_1_in_8, - main_String_7_out_1, - main_ColorBar_1_in_10, - main_ColorBar_1_in_11, - main_ColorBar_1_in_12, - main_ColorBar_1_in_13, - main_ColorBar_1_in_14, - main_ColorBar_1_in_15, - main_ColorBar_1_in_16 - ) [instance: 1, cache: 1]; - // - // node Color[5]: x = 275, y = 280, inputs = 5, label = Color - // input[2]: defaulting = 1, visible = 1, type = 32, value = "black" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_5_out_1 = - Color( - main_Import_1_out_1, - main_Colormap_1_out_1, - main_Color_5_in_3, - main_Color_5_in_4, - main_Color_5_in_5 - ) [instance: 5, cache: 1]; - // - // node Collect[3]: x = 245, y = 353, inputs = 2, label = Collect - // -main_Collect_3_out_1 = - Collect( - main_ColorBar_1_out_1, - main_Color_5_out_1 - ) [instance: 3, cache: 1]; - // - // node Switch[1]: x = 309, y = 436, inputs = 2, label = Switch - // -main_Switch_1_out_1 = - Switch( - main_Integer_1_out_1, - main_Collect_3_out_1 - ) [instance: 1, cache: 1]; - // - // node Integer[7]: x = 71, y = 1096, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_7" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[5]: defaulting = 1, visible = 0, type = 1, value = -1000000 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1000000 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 0 - // - // - // node FileSelector[4]: x = 61, y = 768, inputs = 0, label = FileSelector - // output[1]: visible = 1, type = 32, value = "/Users/todd/Desktop/svn/cvt/unstructured/run/40962.A.Dgrid/dx/topography.dx" - // output[2]: visible = 1, type = 32, value = "topography.dx" - // - // - // node String[4]: x = 180, y = 771, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "tpg" - // - // - // node Import[4]: x = 114, y = 844, inputs = 6, label = Import - // input[1]: defaulting = 1, visible = 1, type = 32, value = "/disk5/gdc/swm/run.02562/dx/hexagon.dx" - // input[2]: defaulting = 1, visible = 1, type = 32, value = "center_area" - // input[3]: defaulting = 0, visible = 1, type = 32, value = "dx" - // -main_Import_4_out_1 = - Import( - main_FileSelector_4_out_1, - main_String_4_out_1, - main_Import_4_in_3, - main_Import_4_in_4, - main_Import_4_in_5, - main_Import_4_in_6 - ) [instance: 4, cache: 1]; - // - // node ShowConnections[3]: x = 97, y = 940, inputs = 1, label = ShowConnections - // -main_ShowConnections_3_out_1 = - ShowConnections( - main_Import_4_out_1 - ) [instance: 3, cache: 1]; - // - // node Color[8]: x = 134, y = 1024, inputs = 5, label = Color - // input[2]: defaulting = 0, visible = 1, type = 32, value = "black" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_8_out_1 = - Color( - main_ShowConnections_3_out_1, - main_Color_8_in_2, - main_Color_8_in_3, - main_Color_8_in_4, - main_Color_8_in_5 - ) [instance: 8, cache: 1]; - // - // node Tube[3]: x = 248, y = 1080, inputs = 4, label = Tube - // input[2]: defaulting = 0, visible = 1, type = 5, value = 0.01 - // -main_Tube_3_out_1 = - Tube( - main_Color_8_out_1, - main_Tube_3_in_2, - main_Tube_3_in_3, - main_Tube_3_in_4 - ) [instance: 3, cache: 1]; - // - // node Switch[9]: x = 170, y = 1142, inputs = 2, label = Switch - // -main_Switch_9_out_1 = - Switch( - main_Integer_7_out_1, - main_Tube_3_out_1 - ) [instance: 9, cache: 1]; - // - // node Integer[6]: x = 416, y = 363, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_6" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 1, visible = 0, type = 1, value = -1000000 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1000000 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 1 - // - // - // node Color[7]: x = 511, y = 335, inputs = 5, label = Color - // input[2]: defaulting = 0, visible = 1, type = 32, value = "black" - // -main_Color_7_out_1 = - Color( - main_AutoGlyph_2_out_1, - main_Color_7_in_2, - main_Color_7_in_3, - main_Color_7_in_4, - main_Color_7_in_5 - ) [instance: 7, cache: 1]; - // - // node Switch[8]: x = 493, y = 406, inputs = 2, label = Switch - // -main_Switch_8_out_1 = - Switch( - main_Integer_6_out_1, - main_Color_7_out_1 - ) [instance: 8, cache: 1]; - // - // node Integer[10]: x = 699, y = 470, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_10" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 1, visible = 0, type = 1, value = -1000000 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1000000 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 1 - // - // - // node FileSelector[6]: x = 704, y = 67, inputs = 0, label = FileSelector - // output[1]: visible = 1, type = 32, value = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/triangle.dx" - // output[2]: visible = 1, type = 32, value = "triangle.dx" - // - // - // node String[6]: x = 824, y = 70, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "areac" - // - // - // node Import[6]: x = 758, y = 143, inputs = 6, label = Import - // input[1]: defaulting = 1, visible = 1, type = 32, value = "/disk5/gdc/swm/run.02562/dx/hexagon.dx" - // input[2]: defaulting = 1, visible = 1, type = 32, value = "center_area" - // input[3]: defaulting = 0, visible = 1, type = 32, value = "dx" - // -main_Import_6_out_1 = - Import( - main_FileSelector_6_out_1, - main_String_6_out_1, - main_Import_6_in_3, - main_Import_6_in_4, - main_Import_6_in_5, - main_Import_6_in_6 - ) [instance: 6, cache: 1]; - // - // node ShowConnections[4]: x = 703, y = 311, inputs = 1, label = ShowConnections - // -main_ShowConnections_4_out_1 = - ShowConnections( - main_Import_6_out_1 - ) [instance: 4, cache: 1]; - // - // node Color[11]: x = 740, y = 395, inputs = 5, label = Color - // input[2]: defaulting = 0, visible = 1, type = 32, value = "yellow" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_11_out_1 = - Color( - main_ShowConnections_4_out_1, - main_Color_11_in_2, - main_Color_11_in_3, - main_Color_11_in_4, - main_Color_11_in_5 - ) [instance: 11, cache: 1]; - // - // node Switch[12]: x = 776, y = 513, inputs = 2, label = Switch - // -main_Switch_12_out_1 = - Switch( - main_Integer_10_out_1, - main_Color_11_out_1 - ) [instance: 12, cache: 1]; - // - // node Integer[9]: x = 867, y = 498, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_9" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 1, visible = 0, type = 1, value = -1000000 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1000000 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 1 - // - // - // node Colormap[2]: x = 892, y = 217, inputs = 19, label = Colormap - // input[1]: defaulting = 0, visible = 0, type = 16777224, value = { [0.0 0.74683544] [1.0 0.0] } - // input[2]: defaulting = 0, visible = 0, type = 16777224, value = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] } - // input[3]: defaulting = 0, visible = 0, type = 16777224, value = { [0.84699454 1.0] } - // input[4]: defaulting = 0, visible = 0, type = 16777224, value = { [0.84972678 1.0] } - // input[5]: defaulting = 0, visible = 0, type = 32, value = "Colormap_2" - // input[7]: defaulting = 1, visible = 0, type = 5, value = 433012.69 - // input[8]: defaulting = 1, visible = 0, type = 5, value = 433012.69 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 20 - // input[12]: defaulting = 0, visible = 0, type = 16777221, value = { 433012.69 433012.69 } - // input[17]: defaulting = 0, visible = 0, type = 5, value = 433012.69 - // input[18]: defaulting = 0, visible = 0, type = 5, value = 433012.69 - // window: position = (0.0605,0.4696), size = 0.4297x0.4284, screen = 0 - // -main_Colormap_2_out_1[cache: 2], -main_Colormap_2_out_2[cache: 2] = - Colormap( - main_Colormap_2_in_1, - main_Colormap_2_in_2, - main_Colormap_2_in_3, - main_Colormap_2_in_4, - main_Colormap_2_in_5, - main_Import_6_out_1, - main_Colormap_2_in_7, - main_Colormap_2_in_8, - main_Colormap_2_in_9, - main_Colormap_2_in_10, - main_Colormap_2_in_11, - main_Colormap_2_in_12, - main_Colormap_2_in_13, - main_Colormap_2_in_14, - main_Colormap_2_in_15, - main_Colormap_2_in_16, - main_Colormap_2_in_17, - main_Colormap_2_in_18, - main_Colormap_2_in_19 - ) [instance: 2, cache: 1]; - // - // node ColorBar[3]: x = 701, y = 226, inputs = 16, label = ColorBar - // input[2]: defaulting = 0, visible = 1, type = 8, value = [0.05 0.075] - // input[3]: defaulting = 0, visible = 1, type = 8, value = [200 15] - // input[4]: defaulting = 0, visible = 1, type = 3, value = 0 - // input[9]: defaulting = 1, visible = 1, type = 16777248, value = {"white"} - // input[11]: defaulting = 0, visible = 0, type = 5, value = 1.5 - // -main_ColorBar_3_out_1 = - ColorBar( - main_Colormap_2_out_1, - main_ColorBar_3_in_2, - main_ColorBar_3_in_3, - main_ColorBar_3_in_4, - main_ColorBar_3_in_5, - main_ColorBar_3_in_6, - main_ColorBar_3_in_7, - main_ColorBar_3_in_8, - main_String_7_out_1, - main_ColorBar_3_in_10, - main_ColorBar_3_in_11, - main_ColorBar_3_in_12, - main_ColorBar_3_in_13, - main_ColorBar_3_in_14, - main_ColorBar_3_in_15, - main_ColorBar_3_in_16 - ) [instance: 3, cache: 1]; - // - // node Color[10]: x = 913, y = 330, inputs = 5, label = Color - // input[2]: defaulting = 1, visible = 1, type = 32, value = "black" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_10_out_1 = - Color( - main_Import_6_out_1, - main_Colormap_2_out_1, - main_Color_10_in_3, - main_Color_10_in_4, - main_Color_10_in_5 - ) [instance: 10, cache: 1]; - // - // node Collect[4]: x = 883, y = 403, inputs = 2, label = Collect - // -main_Collect_4_out_1 = - Collect( - main_ColorBar_3_out_1, - main_Color_10_out_1 - ) [instance: 4, cache: 1]; - // - // node Switch[11]: x = 947, y = 486, inputs = 2, label = Switch - // -main_Switch_11_out_1 = - Switch( - main_Integer_9_out_1, - main_Collect_4_out_1 - ) [instance: 11, cache: 1]; - // - // node Collect[2]: x = 406, y = 576, inputs = 7, label = Collect - // -main_Collect_2_out_1 = - Collect( - main_Switch_7_out_1, - main_Switch_1_out_1, - main_Switch_9_out_1, - main_Switch_8_out_1, - main_Collect_2_in_5, - main_Switch_12_out_1, - main_Switch_11_out_1 - ) [instance: 2, cache: 1]; - // - // node ColorBar[2]: x = 354, y = 928, inputs = 16, label = ColorBar - // input[2]: defaulting = 0, visible = 1, type = 8, value = [0.05 0.225] - // input[3]: defaulting = 0, visible = 1, type = 8, value = [200 15] - // input[4]: defaulting = 0, visible = 1, type = 3, value = 0 - // input[9]: defaulting = 0, visible = 0, type = 16777248, value = {"white"} - // -main_ColorBar_2_out_1 = - ColorBar( - main_ColorBar_2_in_1, - main_ColorBar_2_in_2, - main_ColorBar_2_in_3, - main_ColorBar_2_in_4, - main_ColorBar_2_in_5, - main_ColorBar_2_in_6, - main_ColorBar_2_in_7, - main_ColorBar_2_in_8, - main_ColorBar_2_in_9, - main_ColorBar_2_in_10, - main_ColorBar_2_in_11, - main_ColorBar_2_in_12, - main_ColorBar_2_in_13, - main_ColorBar_2_in_14, - main_ColorBar_2_in_15, - main_ColorBar_2_in_16 - ) [instance: 2, cache: 1]; - // - // node Image[2]: x = 453, y = 651, inputs = 49, label = Image - // input[1]: defaulting = 0, visible = 0, type = 67108863, value = "Image_2" - // input[4]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 0, visible = 0, type = 8, value = [1855.82 170889 0] - // input[6]: defaulting = 0, visible = 0, type = 8, value = [1855.82 170889 524001] - // input[7]: defaulting = 0, visible = 0, type = 5, value = 8190.35 - // input[8]: defaulting = 0, visible = 0, type = 1, value = 1440 - // input[9]: defaulting = 0, visible = 0, type = 5, value = 0.955 - // input[10]: defaulting = 0, visible = 0, type = 8, value = [0 1 0] - // input[11]: defaulting = 1, visible = 0, type = 5, value = 0.895538 - // input[12]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[14]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[15]: defaulting = 1, visible = 0, type = 32, value = "none" - // input[16]: defaulting = 1, visible = 0, type = 32, value = "none" - // input[17]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[18]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[19]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[22]: defaulting = 0, visible = 0, type = 32, value = "black" - // input[25]: defaulting = 0, visible = 0, type = 32, value = "/Users/todd/Desktop/ke.tiff" - // input[26]: defaulting = 0, visible = 0, type = 32, value = "tiff gamma=1" - // input[27]: defaulting = 0, visible = 0, type = 1, value = 750 - // input[28]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[29]: defaulting = 0, visible = 0, type = 3, value = 0 - // input[30]: defaulting = 0, visible = 0, type = 16777248, value = {"x axis", "y axis", ""} - // input[31]: defaulting = 0, visible = 0, type = 16777217, value = { -15 -15 15 } - // input[34]: defaulting = 0, visible = 0, type = 3, value = 1 - // input[37]: defaulting = 0, visible = 0, type = 16777248, value = {"grey30", "grey5", "yellow", "white"} - // input[38]: defaulting = 0, visible = 0, type = 16777248, value = {"background", "grid", "ticks", "labels"} - // input[39]: defaulting = 0, visible = 0, type = 5, value = 0.5 - // input[41]: defaulting = 0, visible = 0, type = 32, value = "none" - // depth: value = 24 - // window: position = (0.2492,0.0760), size = 0.5680x0.8980, screen = 0 - // internal caching: 1 - // -main_Image_2_out_1, -main_Image_2_out_2, -main_Image_2_out_3 = - Image( - main_Image_2_in_1, - main_Collect_2_out_1, - main_Image_2_in_3, - main_Image_2_in_4, - main_Image_2_in_5, - main_Image_2_in_6, - main_Image_2_in_7, - main_Image_2_in_8, - main_Image_2_in_9, - main_Image_2_in_10, - main_Image_2_in_11, - main_Image_2_in_12, - main_Image_2_in_13, - main_Image_2_in_14, - main_Image_2_in_15, - main_Image_2_in_16, - main_Image_2_in_17, - main_Image_2_in_18, - main_Image_2_in_19, - main_Image_2_in_20, - main_Image_2_in_21, - main_Image_2_in_22, - main_Image_2_in_23, - main_Image_2_in_24, - main_Image_2_in_25, - main_Image_2_in_26, - main_Image_2_in_27, - main_Image_2_in_28, - main_Image_2_in_29, - main_Image_2_in_30, - main_Image_2_in_31, - main_Image_2_in_32, - main_Image_2_in_33, - main_Image_2_in_34, - main_Image_2_in_35, - main_Image_2_in_36, - main_Image_2_in_37, - main_Image_2_in_38, - main_Image_2_in_39, - main_Image_2_in_40, - main_Image_2_in_41, - main_Image_2_in_42, - main_Image_2_in_43, - main_Image_2_in_44, - main_Image_2_in_45, - main_Image_2_in_46, - main_Image_2_in_47, - main_Image_2_in_48, - main_Image_2_in_49 - ) [instance: 2, cache: 1]; - // - // node Tube[2]: x = 11, y = 345, inputs = 4, label = Tube - // input[2]: defaulting = 1, visible = 1, type = 5, value = 0.0025 - // input[3]: defaulting = 0, visible = 0, type = 1, value = 16 - // -main_Tube_2_out_1 = - Tube( - main_Tube_2_in_1, - main_Tube_2_in_2, - main_Tube_2_in_3, - main_Tube_2_in_4 - ) [instance: 2, cache: 1]; -// network: end of macro body -CacheScene(main_Image_2_in_1, main_Image_2_out_1, main_Image_2_out_2); -} -main_FileSelector_2_out_1 = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/edge.dx"; -main_String_2_out_1 = "normal"; -main_Import_2_in_3 = "dx"; -main_Import_2_in_4 = NULL; -main_Import_2_in_5 = NULL; -main_Import_2_in_6 = NULL; -main_Import_2_out_1 = NULL; -main_AutoGlyph_2_in_2 = "standard"; -main_AutoGlyph_2_in_3 = 1.0; -main_AutoGlyph_2_in_4 = 0.1; -main_AutoGlyph_2_in_5 = NULL; -main_AutoGlyph_2_in_6 = NULL; -main_AutoGlyph_2_in_7 = NULL; -main_AutoGlyph_2_out_1 = NULL; -main_Integer_5_in_1 = "Integer_5"; -main_Integer_5_in_2 = NULL; -main_Integer_5_in_3 = 1 ; -main_Integer_5_in_4 = NULL; -main_Integer_5_in_5 = NULL; -main_Integer_5_in_6 = NULL; -main_Integer_5_in_7 = NULL; -main_Integer_5_in_8 = NULL; -main_Integer_5_in_9 = NULL; -main_Integer_5_in_10 = NULL; -main_Integer_5_in_11 = NULL; -main_Integer_5_out_1 = 1 ; -main_FileSelector_1_out_1 = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/voronoi.dx"; -main_String_1_out_1 = "area"; -main_Import_1_in_3 = "dx"; -main_Import_1_in_4 = NULL; -main_Import_1_in_5 = NULL; -main_Import_1_in_6 = NULL; -main_Import_1_out_1 = NULL; -main_ShowConnections_2_out_1 = NULL; -main_Color_3_in_2 = "yellow"; -main_Color_3_in_3 = 1.0; -main_Color_3_in_4 = NULL; -main_Color_3_in_5 = NULL; -main_Color_3_out_1 = NULL; -main_Switch_7_out_1 = NULL; -main_Integer_1_in_1 = "Integer_1"; -main_Integer_1_in_2 = NULL; -main_Integer_1_in_3 = 0 ; -main_Integer_1_in_4 = NULL; -main_Integer_1_in_5 = NULL; -main_Integer_1_in_6 = NULL; -main_Integer_1_in_7 = NULL; -main_Integer_1_in_8 = NULL; -main_Integer_1_in_9 = NULL; -main_Integer_1_in_10 = NULL; -main_Integer_1_in_11 = NULL; -main_Integer_1_out_1 = 0 ; -main_Colormap_1_in_1 = { [0.0 0.74683544] [1.0 0.0] }; -main_Colormap_1_in_2 = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] }; -main_Colormap_1_in_3 = { [0.84699454 1.0] }; -main_Colormap_1_in_4 = { [0.84972678 1.0] }; -main_Colormap_1_in_5 = "Colormap_1"; -main_Colormap_1_in_7 = NULL; -main_Colormap_1_in_8 = NULL; -main_Colormap_1_in_9 = NULL; -main_Colormap_1_in_10 = NULL; -main_Colormap_1_in_11 = NULL; -main_Colormap_1_in_12 = { 866025.38 866025.38 }; -main_Colormap_1_in_13 = NULL; -main_Colormap_1_in_14 = NULL; -main_Colormap_1_in_15 = NULL; -main_Colormap_1_in_16 = NULL; -main_Colormap_1_in_17 = 866025.38; -main_Colormap_1_in_18 = 866025.38; -main_Colormap_1_in_19 = NULL; -main_Colormap_1_out_1 = NULL; -main_String_7_out_1 = "white"; -main_ColorBar_1_in_2 = [0.05 0.15]; -main_ColorBar_1_in_3 = [200 15]; -main_ColorBar_1_in_4 = 0; -main_ColorBar_1_in_5 = NULL; -main_ColorBar_1_in_6 = NULL; -main_ColorBar_1_in_7 = NULL; -main_ColorBar_1_in_8 = NULL; -main_ColorBar_1_in_10 = NULL; -main_ColorBar_1_in_11 = NULL; -main_ColorBar_1_in_12 = NULL; -main_ColorBar_1_in_13 = NULL; -main_ColorBar_1_in_14 = NULL; -main_ColorBar_1_in_15 = NULL; -main_ColorBar_1_in_16 = NULL; -main_ColorBar_1_out_1 = NULL; -main_Color_5_in_3 = 1.0; -main_Color_5_in_4 = NULL; -main_Color_5_in_5 = NULL; -main_Color_5_out_1 = NULL; -main_Collect_3_out_1 = NULL; -main_Switch_1_out_1 = NULL; -main_Integer_7_in_1 = "Integer_7"; -main_Integer_7_in_2 = NULL; -main_Integer_7_in_3 = 0 ; -main_Integer_7_in_4 = NULL; -main_Integer_7_in_5 = NULL; -main_Integer_7_in_6 = NULL; -main_Integer_7_in_7 = NULL; -main_Integer_7_in_8 = NULL; -main_Integer_7_in_9 = NULL; -main_Integer_7_in_10 = NULL; -main_Integer_7_in_11 = NULL; -main_Integer_7_out_1 = 0 ; -main_FileSelector_4_out_1 = "/Users/todd/Desktop/svn/cvt/unstructured/run/40962.A.Dgrid/dx/topography.dx"; -main_String_4_out_1 = "tpg"; -main_Import_4_in_3 = "dx"; -main_Import_4_in_4 = NULL; -main_Import_4_in_5 = NULL; -main_Import_4_in_6 = NULL; -main_Import_4_out_1 = NULL; -main_ShowConnections_3_out_1 = NULL; -main_Color_8_in_2 = "black"; -main_Color_8_in_3 = 1.0; -main_Color_8_in_4 = NULL; -main_Color_8_in_5 = NULL; -main_Color_8_out_1 = NULL; -main_Tube_3_in_2 = 0.01; -main_Tube_3_in_3 = NULL; -main_Tube_3_in_4 = NULL; -main_Tube_3_out_1 = NULL; -main_Switch_9_out_1 = NULL; -main_Integer_6_in_1 = "Integer_6"; -main_Integer_6_in_2 = NULL; -main_Integer_6_in_3 = 1 ; -main_Integer_6_in_4 = NULL; -main_Integer_6_in_5 = NULL; -main_Integer_6_in_6 = NULL; -main_Integer_6_in_7 = NULL; -main_Integer_6_in_8 = NULL; -main_Integer_6_in_9 = NULL; -main_Integer_6_in_10 = NULL; -main_Integer_6_in_11 = NULL; -main_Integer_6_out_1 = 1 ; -main_Color_7_in_2 = "black"; -main_Color_7_in_3 = NULL; -main_Color_7_in_4 = NULL; -main_Color_7_in_5 = NULL; -main_Color_7_out_1 = NULL; -main_Switch_8_out_1 = NULL; -main_Integer_10_in_1 = "Integer_10"; -main_Integer_10_in_2 = NULL; -main_Integer_10_in_3 = 1 ; -main_Integer_10_in_4 = NULL; -main_Integer_10_in_5 = NULL; -main_Integer_10_in_6 = NULL; -main_Integer_10_in_7 = NULL; -main_Integer_10_in_8 = NULL; -main_Integer_10_in_9 = NULL; -main_Integer_10_in_10 = NULL; -main_Integer_10_in_11 = NULL; -main_Integer_10_out_1 = 1 ; -main_FileSelector_6_out_1 = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/triangle.dx"; -main_String_6_out_1 = "areac"; -main_Import_6_in_3 = "dx"; -main_Import_6_in_4 = NULL; -main_Import_6_in_5 = NULL; -main_Import_6_in_6 = NULL; -main_Import_6_out_1 = NULL; -main_ShowConnections_4_out_1 = NULL; -main_Color_11_in_2 = "yellow"; -main_Color_11_in_3 = 1.0; -main_Color_11_in_4 = NULL; -main_Color_11_in_5 = NULL; -main_Color_11_out_1 = NULL; -main_Switch_12_out_1 = NULL; -main_Integer_9_in_1 = "Integer_9"; -main_Integer_9_in_2 = NULL; -main_Integer_9_in_3 = 1 ; -main_Integer_9_in_4 = NULL; -main_Integer_9_in_5 = NULL; -main_Integer_9_in_6 = NULL; -main_Integer_9_in_7 = NULL; -main_Integer_9_in_8 = NULL; -main_Integer_9_in_9 = NULL; -main_Integer_9_in_10 = NULL; -main_Integer_9_in_11 = NULL; -main_Integer_9_out_1 = 1 ; -main_Colormap_2_in_1 = { [0.0 0.74683544] [1.0 0.0] }; -main_Colormap_2_in_2 = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] }; -main_Colormap_2_in_3 = { [0.84699454 1.0] }; -main_Colormap_2_in_4 = { [0.84972678 1.0] }; -main_Colormap_2_in_5 = "Colormap_2"; -main_Colormap_2_in_7 = NULL; -main_Colormap_2_in_8 = NULL; -main_Colormap_2_in_9 = NULL; -main_Colormap_2_in_10 = NULL; -main_Colormap_2_in_11 = NULL; -main_Colormap_2_in_12 = { 433012.69 433012.69 }; -main_Colormap_2_in_13 = NULL; -main_Colormap_2_in_14 = NULL; -main_Colormap_2_in_15 = NULL; -main_Colormap_2_in_16 = NULL; -main_Colormap_2_in_17 = 433012.69; -main_Colormap_2_in_18 = 433012.69; -main_Colormap_2_in_19 = NULL; -main_Colormap_2_out_1 = NULL; -main_ColorBar_3_in_2 = [0.05 0.075]; -main_ColorBar_3_in_3 = [200 15]; -main_ColorBar_3_in_4 = 0; -main_ColorBar_3_in_5 = NULL; -main_ColorBar_3_in_6 = NULL; -main_ColorBar_3_in_7 = NULL; -main_ColorBar_3_in_8 = NULL; -main_ColorBar_3_in_10 = NULL; -main_ColorBar_3_in_11 = 1.5; -main_ColorBar_3_in_12 = NULL; -main_ColorBar_3_in_13 = NULL; -main_ColorBar_3_in_14 = NULL; -main_ColorBar_3_in_15 = NULL; -main_ColorBar_3_in_16 = NULL; -main_ColorBar_3_out_1 = NULL; -main_Color_10_in_3 = 1.0; -main_Color_10_in_4 = NULL; -main_Color_10_in_5 = NULL; -main_Color_10_out_1 = NULL; -main_Collect_4_out_1 = NULL; -main_Switch_11_out_1 = NULL; -main_Collect_2_in_5 = NULL; -main_Collect_2_out_1 = NULL; -main_ColorBar_2_in_1 = NULL; -main_ColorBar_2_in_2 = [0.05 0.225]; -main_ColorBar_2_in_3 = [200 15]; -main_ColorBar_2_in_4 = 0; -main_ColorBar_2_in_5 = NULL; -main_ColorBar_2_in_6 = NULL; -main_ColorBar_2_in_7 = NULL; -main_ColorBar_2_in_8 = NULL; -main_ColorBar_2_in_9 = {"white"}; -main_ColorBar_2_in_10 = NULL; -main_ColorBar_2_in_11 = NULL; -main_ColorBar_2_in_12 = NULL; -main_ColorBar_2_in_13 = NULL; -main_ColorBar_2_in_14 = NULL; -main_ColorBar_2_in_15 = NULL; -main_ColorBar_2_in_16 = NULL; -macro Image( - id, - object, - where, - useVector, - to, - from, - width, - resolution, - aspect, - up, - viewAngle, - perspective, - options, - buttonState = 1, - buttonUpApprox = "none", - buttonDownApprox = "none", - buttonUpDensity = 1, - buttonDownDensity = 1, - renderMode = 0, - defaultCamera, - reset, - backgroundColor, - throttle, - RECenable = 0, - RECfile, - RECformat, - RECresolution, - RECaspect, - AAenable = 0, - AAlabels, - AAticks, - AAcorners, - AAframe, - AAadjust, - AAcursor, - AAgrid, - AAcolors, - AAannotation, - AAlabelscale, - AAfont, - interactionMode, - title, - AAxTickLocs, - AAyTickLocs, - AAzTickLocs, - AAxTickLabels, - AAyTickLabels, - AAzTickLabels, - webOptions) -> ( - object, - camera, - where) -{ - ImageMessage( - id, - backgroundColor, - throttle, - RECenable, - RECfile, - RECformat, - RECresolution, - RECaspect, - AAenable, - AAlabels, - AAticks, - AAcorners, - AAframe, - AAadjust, - AAcursor, - AAgrid, - AAcolors, - AAannotation, - AAlabelscale, - AAfont, - AAxTickLocs, - AAyTickLocs, - AAzTickLocs, - AAxTickLabels, - AAyTickLabels, - AAzTickLabels, - interactionMode, - title, - renderMode, - buttonUpApprox, - buttonDownApprox, - buttonUpDensity, - buttonDownDensity) [instance: 1, cache: 1]; - autoCamera = - AutoCamera( - object, - "front", - object, - resolution, - aspect, - [0,1,0], - perspective, - viewAngle, - backgroundColor) [instance: 1, cache: 1]; - realCamera = - Camera( - to, - from, - width, - resolution, - aspect, - up, - perspective, - viewAngle, - backgroundColor) [instance: 1, cache: 1]; - coloredDefaultCamera = - UpdateCamera(defaultCamera, - background=backgroundColor) [instance: 1, cache: 1]; - nullDefaultCamera = - Inquire(defaultCamera, - "is null + 1") [instance: 1, cache: 1]; - resetCamera = - Switch( - nullDefaultCamera, - coloredDefaultCamera, - autoCamera) [instance: 1, cache: 1]; - resetNull = - Inquire( - reset, - "is null + 1") [instance: 2, cache: 1]; - reset = - Switch( - resetNull, - reset, - 0) [instance: 2, cache: 1]; - whichCamera = - Compute( - "($0 != 0 || $1 == 0) ? 1 : 2", - reset, - useVector) [instance: 1, cache: 1]; - camera = Switch( - whichCamera, - resetCamera, - realCamera) [instance: 3, cache: 1]; - AAobject = - AutoAxes( - object, - camera, - AAlabels, - AAticks, - AAcorners, - AAframe, - AAadjust, - AAcursor, - AAgrid, - AAcolors, - AAannotation, - AAlabelscale, - AAfont, - AAxTickLocs, - AAyTickLocs, - AAzTickLocs, - AAxTickLabels, - AAyTickLabels, - AAzTickLabels) [instance: 1, cache: 1]; - switchAAenable = Compute("$0+1", - AAenable) [instance: 2, cache: 1]; - object = Switch( - switchAAenable, - object, - AAobject) [instance:4, cache: 1]; - SWapproximation_options = - Switch( - buttonState, - buttonUpApprox, - buttonDownApprox) [instance: 5, cache: 1]; - SWdensity_options = - Switch( - buttonState, - buttonUpDensity, - buttonDownDensity) [instance: 6, cache: 1]; - HWapproximation_options = - Format( - "%s,%s", - buttonDownApprox, - buttonUpApprox) [instance: 1, cache: 1]; - HWdensity_options = - Format( - "%d,%d", - buttonDownDensity, - buttonUpDensity) [instance: 2, cache: 1]; - switchRenderMode = Compute( - "$0+1", - renderMode) [instance: 3, cache: 1]; - approximation_options = Switch( - switchRenderMode, - SWapproximation_options, - HWapproximation_options) [instance: 7, cache: 1]; - density_options = Switch( - switchRenderMode, - SWdensity_options, - HWdensity_options) [instance: 8, cache: 1]; - renderModeString = Switch( - switchRenderMode, - "software", - "hardware")[instance: 9, cache: 1]; - object_tag = Inquire( - object, - "object tag")[instance: 3, cache: 1]; - annoted_object = - Options( - object, - "send boxes", - 0, - "cache", - 1, - "object tag", - object_tag, - "ddcamera", - whichCamera, - "rendering approximation", - approximation_options, - "render every", - density_options, - "button state", - buttonState, - "rendering mode", - renderModeString) [instance: 1, cache: 1]; - RECresNull = - Inquire( - RECresolution, - "is null + 1") [instance: 4, cache: 1]; - ImageResolution = - Inquire( - camera, - "camera resolution") [instance: 5, cache: 1]; - RECresolution = - Switch( - RECresNull, - RECresolution, - ImageResolution) [instance: 10, cache: 1]; - RECaspectNull = - Inquire( - RECaspect, - "is null + 1") [instance: 6, cache: 1]; - ImageAspect = - Inquire( - camera, - "camera aspect") [instance: 7, cache: 1]; - RECaspect = - Switch( - RECaspectNull, - RECaspect, - ImageAspect) [instance: 11, cache: 1]; - switchRECenable = Compute( - "$0 == 0 ? 1 : (($2 == $3) && ($4 == $5)) ? ($1 == 1 ? 2 : 3) : 4", - RECenable, - switchRenderMode, - RECresolution, - ImageResolution, - RECaspect, - ImageAspect) [instance: 4, cache: 1]; - NoRECobject, RECNoRerenderObject, RECNoRerHW, RECRerenderObject = Route(switchRECenable, annoted_object); - Display( - NoRECobject, - camera, - where, - throttle) [instance: 1, cache: 1]; - image = - Render( - RECNoRerenderObject, - camera) [instance: 1, cache: 1]; - Display( - image, - NULL, - where, - throttle) [instance: 2, cache: 1]; - WriteImage( - image, - RECfile, - RECformat) [instance: 1, cache: 1]; - rec_where = Display( - RECNoRerHW, - camera, - where, - throttle) [instance: 1, cache: 0]; - rec_image = ReadImageWindow( - rec_where) [instance: 1, cache: 1]; - WriteImage( - rec_image, - RECfile, - RECformat) [instance: 1, cache: 1]; - RECupdateCamera = - UpdateCamera( - camera, - resolution=RECresolution, - aspect=RECaspect) [instance: 2, cache: 1]; - Display( - RECRerenderObject, - camera, - where, - throttle) [instance: 1, cache: 1]; - RECRerenderObject = - ScaleScreen( - RECRerenderObject, - NULL, - RECresolution, - camera) [instance: 1, cache: 1]; - image = - Render( - RECRerenderObject, - RECupdateCamera) [instance: 2, cache: 1]; - WriteImage( - image, - RECfile, - RECformat) [instance: 2, cache: 1]; -} -main_Image_2_in_1 = "Image_2"; -main_Image_2_in_3 = "X24,,"; -main_Image_2_in_4 = 1; -main_Image_2_in_5 = [1855.82 170889 0]; -main_Image_2_in_6 = [1855.82 170889 524001]; -main_Image_2_in_7 = 8190.35; -main_Image_2_in_8 = 1440; -main_Image_2_in_9 = 0.955; -main_Image_2_in_10 = [0 1 0]; -main_Image_2_in_11 = NULL; -main_Image_2_in_12 = 0; -main_Image_2_in_13 = NULL; -main_Image_2_in_14 = 1; -main_Image_2_in_15 = NULL; -main_Image_2_in_16 = NULL; -main_Image_2_in_17 = NULL; -main_Image_2_in_18 = NULL; -main_Image_2_in_19 = 0; -main_Image_2_in_20 = NULL; -main_Image_2_in_21 = NULL; -main_Image_2_in_22 = "black"; -main_Image_2_in_23 = NULL; -main_Image_2_in_25 = "/Users/todd/Desktop/ke.tiff"; -main_Image_2_in_26 = "tiff gamma=1"; -main_Image_2_in_27 = 750; -main_Image_2_in_28 = NULL; -main_Image_2_in_29 = 0; -main_Image_2_in_30 = {"x axis", "y axis", ""}; -main_Image_2_in_31 = { -15 -15 15 }; -main_Image_2_in_32 = NULL; -main_Image_2_in_33 = NULL; -main_Image_2_in_34 = 1; -main_Image_2_in_35 = NULL; -main_Image_2_in_36 = NULL; -main_Image_2_in_37 = {"grey30", "grey5", "yellow", "white"}; -main_Image_2_in_38 = {"background", "grid", "ticks", "labels"}; -main_Image_2_in_39 = 0.5; -main_Image_2_in_40 = NULL; -main_Image_2_in_41 = "none"; -main_Image_2_in_42 = NULL; -main_Image_2_in_43 = NULL; -main_Image_2_in_44 = NULL; -main_Image_2_in_45 = NULL; -main_Image_2_in_46 = NULL; -main_Image_2_in_47 = NULL; -main_Image_2_in_48 = NULL; -main_Image_2_in_49 = NULL; -main_Tube_2_in_1 = NULL; -main_Tube_2_in_2 = NULL; -main_Tube_2_in_3 = 16; -main_Tube_2_in_4 = NULL; -Executive("product version 4 4 4"); -$sync -main(); diff --git a/visualization/dx/voronoi.dx b/visualization/dx/voronoi.dx deleted file mode 100644 index 38b492df9..000000000 --- a/visualization/dx/voronoi.dx +++ /dev/null @@ -1,236 +0,0 @@ -object "positions list" class array type float rank 1 shape 3 items 240000 -ascii data file vor.position.data - -object "edge list" class array type int rank 0 items 240000 -ascii data file vor.edge.data -attribute "ref" string "positions" - -object "loops list" class array type int rank 0 items 40000 -ascii data file vor.loop.data -attribute "ref" string "edges" - -object "face list" class array type int rank 0 items 40000 -ascii data file vor.face.data -attribute "ref" string "loops" - -object 0 class array type float rank 0 items 40000 -data file vor.index.data -attribute "dep" string "faces" - -object 1 class array type float rank 0 items 40000 -data file vor.block.data -attribute "dep" string "faces" - -object 2 class array type float rank 0 items 40000 -data file vor.area.data -attribute "dep" string "faces" - -object 3 class array type float rank 0 items 40000 -data file ./output/div.data -attribute "dep" string "faces" - -object 4 class array type float rank 0 items 40000 -data file ./output/vor.data -attribute "dep" string "faces" - -object 5 class array type float rank 0 items 40000 -data file scalar.data -attribute "dep" string "faces" - -object 6 class array type float rank 0 items 40000 -data file div_analy.data -attribute "dep" string "faces" - -object 7 class array type float rank 0 items 40000 -data file curl_analy.data -attribute "dep" string "faces" - -object 8 class array type float rank 0 items 40000 -data file gradmag_analy.data -attribute "dep" string "faces" - -object 9 class array type float rank 0 items 40000 -data file gradang_analy.data -attribute "dep" string "faces" - -object 10 class array type float rank 0 items 40000 -data file gradmag.data -attribute "dep" string "faces" - -object 11 class array type float rank 0 items 40000 -data file ./output/relative.data -attribute "dep" string "faces" - -object 12 class array type float rank 0 items 40000 -data file ./output/height.data -attribute "dep" string "faces" - -object 13 class array type float rank 0 items 40000 -data file ./output/thickness.data -attribute "dep" string "faces" - -object 14 class array type float rank 0 items 40000 -data file ./output/tracer1.data -attribute "dep" string "faces" - -object 15 class array type float rank 0 items 40000 -data file ./output/tracer2.data -attribute "dep" string "faces" - -object 16 class array type float rank 0 items 40000 -data file ./output/vorTR1.data -attribute "dep" string "faces" - -object 17 class array type float rank 0 items 40000 -data file ./output/thicknessdiff.data -attribute "dep" string "faces" - -object 18 class array type float rank 0 items 40000 -data file ./output/keVD.data -attribute "dep" string "faces" - -object 19 class array type float rank 0 items 40000 -data file ./output/keVDdiff.data -attribute "dep" string "faces" - - - -object "index" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 0 - -object "block" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 1 - -object "area" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 2 - -object "div" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 3 - -object "vor" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 4 - -object "scalar" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 5 - -object "div_analy" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 6 - -object "curl_analy" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 7 - -object "gradmag_analy" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 8 - -object "gradang_analy" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 9 - -object "gradmag" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 10 - -object "relative" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 11 - -object "height" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 12 - -object "thickness" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 13 - -object "tracer1" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 14 - -object "tracer2" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 15 - -object "vorTR1" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 16 - -object "thicknessdiff" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 17 - -object "keVD" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 18 - -object "keVDdiff" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 19 From 42f42ed1436c98ddd0ef5facbe94dde7f055f514 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:10:30 -0600 Subject: [PATCH 016/180] Remove visualization/matlab --- visualization/matlab/MapDataToDx.m | 145 ----------------- visualization/matlab/MapGridToDx.m | 246 ----------------------------- 2 files changed, 391 deletions(-) delete mode 100644 visualization/matlab/MapDataToDx.m delete mode 100644 visualization/matlab/MapGridToDx.m diff --git a/visualization/matlab/MapDataToDx.m b/visualization/matlab/MapDataToDx.m deleted file mode 100644 index 06e62fe45..000000000 --- a/visualization/matlab/MapDataToDx.m +++ /dev/null @@ -1,145 +0,0 @@ - -% This script open a output.nc file and writes out the output -% in ascii format to be read in by OpenDX - -clear all - -eps = 1.0e-12 - -ncid = netcdf.open('output.nc','nc_nowrite') - -doThickness = 0; -doKE = 0; -doVorticity = 0; -doVelocity = 1; - -%%%%% -% CHECK THAT THE DIMENSION ORDER (AND NUMBER) AGREES WITH OUR OUTPUT.NC -%%%%% - -[TimeName, TimeLength] = netcdf.inqDim(ncid,0); -[nCellsName, nCellsLength] = netcdf.inqDim(ncid,1); -[nEdgesName, nEdgesLength] = netcdf.inqDim(ncid,2); -[nVerticesName, nVerticesLength] = netcdf.inqDim(ncid,5); -[nVertLevelsName, nVertLevelsLength] = netcdf.inqDim(ncid,9); -[nTracersName, nTracersLength] = netcdf.inqDim(ncid,10); - -TimeLength -nCellsLength -nEdgesLength -nVerticesLength -nVertLevelsLength -nTracersLength - - -if (doThickness == 1) - -thicknessID = netcdf.inqVarID(ncid,'h'); -work = netcdf.getVar(ncid,thicknessID); -[thicknessName,xtype,dimids,natts] = netcdf.inqVar(ncid,thicknessID); -thickness=work; - -system('rm -f ./dx/h.*.*.data') -for iLevel=1:nVertLevelsLength -for iTime=0:TimeLength-1 - iTime - stringTime = int2str(iTime); - stringVert = int2str(iLevel); - FileName = strcat('./dx/', thicknessName, '.', ... - stringVert, '.', stringTime, '.', 'data') - for iCell=1:nCellsLength - data = thickness(iLevel,iCell,iTime+1); - if abs(data) < eps, data=0, end - dlmwrite(FileName, data, ... - 'precision', '%18.10e', '-append') - end -end -end -end - -if (doKE == 1) - -keID = netcdf.inqVarID(ncid,'ke'); -work = netcdf.getVar(ncid,keID); -[keName,xtype,dimids,natts] = netcdf.inqVar(ncid,keID); -ke=work; - -system('rm -f ./dx/ke.*.*.data') -for iLevel=1:nVertLevelsLength -for iTime=0:TimeLength-1 - stringTime = int2str(iTime); - stringVert = int2str(iLevel); - FileName = strcat('./dx/', keName, '.', ... - stringVert, '.', stringTime, '.', 'data') - for iCell=1:nCellsLength - data = ke(iLevel,iCell,iTime+1); - if abs(data) < eps, data=0;, end - dlmwrite(FileName, data, ... - 'precision', '%18.10e', '-append') - end -end -end -end - -if (doVorticity == 1) - -vorticityID = netcdf.inqVarID(ncid,'vorticity'); -work = netcdf.getVar(ncid,vorticityID); -[vorticityName,xtype,dimids,natts] = netcdf.inqVar(ncid,vorticityID); -vorticity=work; - -system('rm -f ./dx/vorticity.*.*.data') -for iLevel=1:nVertLevelsLength -for iTime=0:TimeLength-1 - stringTime = int2str(iTime); - stringVert = int2str(iLevel); - FileName = strcat('./dx/', vorticityName, '.', ... - stringVert, '.', stringTime, '.', 'data') - for iVertex=1:nVerticesLength - data = vorticity(iLevel,iVertex,iTime+1); - if abs(data) < eps, data=0;, end - dlmwrite(FileName, data, ... - 'precision', '%18.10e', '-append') - end -end -end -end - -if (doVelocity == 1) - -uID = netcdf.inqVarID(ncid,'uReconstructX'); -work = netcdf.getVar(ncid,uID); -[uName,xtype,dimids,natts] = netcdf.inqVar(ncid,uID); -u=work; - -vID = netcdf.inqVarID(ncid,'uReconstructY'); -work = netcdf.getVar(ncid,vID); -[vName,xtype,dimids,natts] = netcdf.inqVar(ncid,vID); -v=work; - -wID = netcdf.inqVarID(ncid,'uReconstructZ'); -work = netcdf.getVar(ncid,wID); -[wName,xtype,dimids,natts] = netcdf.inqVar(ncid,wID); -w=work; - -system('rm -f ./dx/velocity.*.*.data') -for iLevel=1:nVertLevelsLength -for iTime=0:TimeLength-1 - stringTime = int2str(iTime); - stringVert = int2str(iLevel); - FileName = strcat('./dx/', 'velocity', '.', ... - stringVert, '.', stringTime, '.', 'data') - for iCell=1:nCellsLength - r(1) = u(iLevel,iCell,iTime+1); - r(2) = v(iLevel,iCell,iTime+1); - r(3) = w(iLevel,iCell,iTime+1); - dlmwrite(FileName, r(1), ... - 'precision', '%18.10e', '-append') - dlmwrite(FileName, r(2), ... - 'precision', '%18.10e', '-append') - dlmwrite(FileName, r(3), ... - 'precision', '%18.10e', '-append') - end -end -end -end \ No newline at end of file diff --git a/visualization/matlab/MapGridToDx.m b/visualization/matlab/MapGridToDx.m deleted file mode 100644 index 39e4dbf07..000000000 --- a/visualization/matlab/MapGridToDx.m +++ /dev/null @@ -1,246 +0,0 @@ - - -% This script open a grid.nc file and writes out the grid description -% in ascii format to be read in by OpenDX - -clear all - -% begin periodic parameters -doPeriodic = 0; -dc = 1000.0; -nx = 200; -ny = 200; -% end periodic parameters - -doWrite = 1 -doVor = 1 -doTri = 1 -doVector = 1 - -ncid = netcdf.open('grid.nc','nc_nowrite'); - -if (doVor == 1) - - xV_id = netcdf.inqVarID(ncid,'xVertex'); - yV_id = netcdf.inqVarID(ncid,'yVertex'); - zV_id = netcdf.inqVarID(ncid,'zVertex'); - nEdgesOnCell_id = netcdf.inqVarID(ncid,'nEdgesOnCell'); - verticesOnCell_id = netcdf.inqVarID(ncid,'verticesOnCell'); - areaCell_id = netcdf.inqVarID(ncid,'areaCell'); - - xV=netcdf.getVar(ncid, xV_id); - yV=netcdf.getVar(ncid, yV_id); - zV=netcdf.getVar(ncid, zV_id); - nEdgesOnCell=netcdf.getVar(ncid, nEdgesOnCell_id); - verticesOnCell=netcdf.getVar(ncid, verticesOnCell_id); - areaCell = netcdf.getVar(ncid, areaCell_id); - - xC_id = netcdf.inqVarID(ncid,'xCell'); - yC_id = netcdf.inqVarID(ncid,'yCell'); - zC_id = netcdf.inqVarID(ncid,'zCell'); - - xC=netcdf.getVar(ncid, xC_id); - yC=netcdf.getVar(ncid, yC_id); - zC=netcdf.getVar(ncid, zC_id); - - work=size(nEdgesOnCell(:,1)); - nCells=work(1) - - if (doWrite == 1) - system('rm -f ./dx/vor.position.data'); - system('rm -f ./dx/vor.edge.data'); - system('rm -f ./dx/vor.loop.data'); - system('rm -f ./dx/vor.face.data'); - system('rm -f ./dx/vor.area.data'); - - iloop=0; - iedge=0; - for i=1:nCells - dlmwrite('./dx/vor.face.data', i-1, '-append'); - dlmwrite('./dx/vor.area.data', areaCell(i), ... - 'precision', '%18.10e', '-append'); - dlmwrite('./dx/vor.loop.data', iloop, ... - 'precision', '%10i', '-append'); - edge(1:nEdgesOnCell(i)) = iedge; - - for j=1:nEdgesOnCell(i) - x(1,j) = xV(verticesOnCell(j,i)); - x(2,j) = yV(verticesOnCell(j,i)); - x(3,j) = zV(verticesOnCell(j,i)); - end; - - if (doPeriodic == 1); - for j=1:nEdgesOnCell(i); - dx = x(1,j)-xC(i); - dy = x(2,j)-yC(i); - if(abs(dx) > 0.1*nx*dc); - if(dx > 0);, x(1,j) = x(1,j) - nx*dc;, end; - if(dx < 0);, x(1,j) = x(1,j) + nx*dc;, end; - end; - if(abs(dy) > 0.1*ny*dc*sqrt(3)/2); - if(dy > 0);, x(2,j) = x(2,j) - sqrt(3)*nx*dc/2;, end; - if(dy < 0);, x(2,j) = x(2,j) + sqrt(3)*nx*dc/2;, end; - end; - end; - end; - - for j=1:nEdgesOnCell(i) - dlmwrite('./dx/vor.position.data', x(:,j), 'delimiter', '\t', ... - 'precision', '%18.10e', '-append'); - edge(j) = iedge + j - 1; - end; - dlmwrite('./dx/vor.edge.data', edge(1:nEdgesOnCell(i)), ... - 'delimiter', '\t', 'precision', '%10i', '-append') - iloop = iloop + nEdgesOnCell(i); - iedge = iedge + nEdgesOnCell(i); - end; - - end; - -end; - -if (doTri == 1) - - xC_id = netcdf.inqVarID(ncid,'xCell'); - yC_id = netcdf.inqVarID(ncid,'yCell'); - zC_id = netcdf.inqVarID(ncid,'zCell'); - nCellsOnVertex = 3; - cellsOnVertex_id = netcdf.inqVarID(ncid, 'cellsOnVertex'); - areaTriangle_id = netcdf.inqVarID(ncid,'areaTriangle'); - - xC=netcdf.getVar(ncid, xC_id); - yC=netcdf.getVar(ncid, yC_id); - zC=netcdf.getVar(ncid, zC_id); - cellsOnVertex=netcdf.getVar(ncid, cellsOnVertex_id); - areaTriangle = netcdf.getVar(ncid, areaTriangle_id); - - xV_id = netcdf.inqVarID(ncid,'xVertex'); - yV_id = netcdf.inqVarID(ncid,'yVertex'); - zV_id = netcdf.inqVarID(ncid,'zVertex'); - - xV=netcdf.getVar(ncid, xV_id); - yV=netcdf.getVar(ncid, yV_id); - zV=netcdf.getVar(ncid, zV_id); - - work=size(cellsOnVertex); - nVertices = work(:,2) - - if (doWrite == 1) - system('rm -f ./dx/tri.position.data'); - system('rm -f ./dx/tri.edge.data'); - system('rm -f ./dx/tri.loop.data'); - system('rm -f ./dx/tri.face.data'); - system('rm -f ./dx/tri.area.data'); - - iloop=0; - iedge=0; - for i=1:nVertices - dlmwrite('./dx/tri.face.data', i-1, '-append'); - dlmwrite('./dx/tri.area.data', areaTriangle(i), ... - 'precision', '%18.10e', '-append'); - dlmwrite('./dx/tri.loop.data', iloop, ... - 'precision', '%10i', '-append'); - edge(1:3) = iedge; - for j=1:nCellsOnVertex - x(1,j) = xC(cellsOnVertex(j,i)); - x(2,j) = yC(cellsOnVertex(j,i)); - x(3,j) = zC(cellsOnVertex(j,i)); - end; - - if (doPeriodic == 1); - for j=1:nCellsOnVertex; - dx = x(1,j)-xV(i); - dy = x(2,j)-yV(i); - if(abs(dx) > 0.1*nx*dc); - if(dx > 0);, x(1,j) = x(1,j) - nx*dc;, end; - if(dx < 0);, x(1,j) = x(1,j) + nx*dc;, end; - end; - if(abs(dy) > 0.1*ny*dc*sqrt(3)/2); - if(dy > 0);, x(2,j) = x(2,j) - sqrt(3)*nx*dc/2;, end; - if(dy < 0);, x(2,j) = x(2,j) + sqrt(3)*nx*dc/2;, end; - end; - end; - end; - - for j=1:nCellsOnVertex; - dlmwrite('./dx/tri.position.data', x(:,j), 'delimiter', '\t', ... - 'precision', '%18.10e', '-append') - edge(j) = iedge + j - 1; - end; - dlmwrite('./dx/tri.edge.data', edge(1:3), ... - 'delimiter', '\t', 'precision', '%10i', '-append') - iloop = iloop + 3; - iedge = iedge + 3; - end; - - end; - -end; - -if (doVector == 1) - - if (doWrite == 1) - system('rm -f ./dx/vector.position.data'); - system('rm -f ./dx/vector.data'); - end; - - nEdgesOnCell_id = netcdf.inqVarID(ncid,'nEdgesOnCell'); - nEdgesOnCell=netcdf.getVar(ncid, nEdgesOnCell_id); - work=size(nEdgesOnCell(:,1)); - nCells=work(1) - - xC_id = netcdf.inqVarID(ncid,'xCell'); - yC_id = netcdf.inqVarID(ncid,'yCell'); - zC_id = netcdf.inqVarID(ncid,'zCell'); - - xC=netcdf.getVar(ncid, xC_id); - yC=netcdf.getVar(ncid, yC_id); - zC=netcdf.getVar(ncid, zC_id); - - xP = 0.0; - yP = 0.0; - zP = 1.0; - - for i=1:nCells - - a(1) = xC(i); - a(2) = yC(i); - a(3) = zC(i); - - b(1) = xP; - b(2) = yP; - b(3) = zP; - - c(1) = a(2)*b(3) - a(3)*b(2); - c(2) = a(3)*b(1) - a(1)*b(3); - c(3) = a(1)*b(2) - a(2)*b(1); - - - if (doWrite == 1) - - dlmwrite('./dx/vector.position.data', xC(i), ... - 'precision', '%18.10e', '-append') - - dlmwrite('./dx/vector.position.data', yC(i), ... - 'precision', '%18.10e', '-append') - - dlmwrite('./dx/vector.position.data', zC(i), ... - 'precision', '%18.10e', '-append') - - dlmwrite('./dx/vector.data', c(1), ... - 'precision', '%18.10e', '-append') - - dlmwrite('./dx/vector.data', c(2), ... - 'precision', '%18.10e', '-append') - - dlmwrite('./dx/vector.data', c(3), ... - 'precision', '%18.10e', '-append') - - - end; - -end; - -end; - -netcdf.close(ncid) \ No newline at end of file From 81e5d7f9aae7948afd2b1589701854c05d21c252 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:10:48 -0600 Subject: [PATCH 017/180] Remove visualization/mean_section --- visualization/mean_section/ColdHot.m | 44 ---- visualization/mean_section/example_sections.m | 109 -------- .../mean_section/find_cell_weights.m | 174 ------------- .../mean_section/load_large_variables.m | 142 ----------- visualization/mean_section/mean_section.m | 240 ------------------ .../mean_section/sub_plot_cross_sections.m | 210 --------------- .../mean_section/sub_plot_section_locations.m | 97 ------- visualization/mean_section/triArea.m | 1 - 8 files changed, 1017 deletions(-) delete mode 100644 visualization/mean_section/ColdHot.m delete mode 100644 visualization/mean_section/example_sections.m delete mode 100644 visualization/mean_section/find_cell_weights.m delete mode 100644 visualization/mean_section/load_large_variables.m delete mode 100644 visualization/mean_section/mean_section.m delete mode 100644 visualization/mean_section/sub_plot_cross_sections.m delete mode 100644 visualization/mean_section/sub_plot_section_locations.m delete mode 120000 visualization/mean_section/triArea.m diff --git a/visualization/mean_section/ColdHot.m b/visualization/mean_section/ColdHot.m deleted file mode 100644 index c93e0629f..000000000 --- a/visualization/mean_section/ColdHot.m +++ /dev/null @@ -1,44 +0,0 @@ -function B = ColdHot(m) -% A colormap for blue cold, white zero, Hot positives. - -if nargin < 1, m = 256; end - -n = fix(m/8); - -% Create cold part: -A = [ - 102 0 102; - 0 41 253; - 102 153 255; - 41 255 255; - 255 255 255]/255; -%A = ones(size(A)) - A; - -v = [n-1 n n n]; - -cold = linspacev(A,v); - -% Create hot part: -A = [ - 255 255 255; - 255 255 0; - 255 102 41; - 255 0 0; - 102 41 0]/255; - -v = [n n n n-1]; -myhot = linspacev(A,v); - - -B = [cold; myhot]; - -%B = [B; flipud(hot(fix(m/2)))]; - - -% Original cold part, 8/2/02: -A = [ - 102 0 102; - 41 0 153; - 0 0 204; - 42 102 255; - 255 255 255]/255; \ No newline at end of file diff --git a/visualization/mean_section/example_sections.m b/visualization/mean_section/example_sections.m deleted file mode 100644 index cb7b578c8..000000000 --- a/visualization/mean_section/example_sections.m +++ /dev/null @@ -1,109 +0,0 @@ -% example_sections.m - -% This file simply contains example cross sections with text names. -% -% Mark Petersen, MPAS-Ocean Team, LANL, March 2014 - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify section coordinates and text -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% see exampleSections.m for more example sections. - -% sectionText a cell array with text describing each section -sectionText = { -'N Atlantic zonal mean',... -'N Atlantic EUC zonal mean',... -'Eq Pacific 140W lon',... - }; - -% coord(nSections,4) endpoints of sections, with one section per row as -% [startLat startLon endLat endLon] -% Traverse from south to north, and from east to west. -% Then positive velocities are eastward and northward. -coord = [... - 0 -80 60 0;... % N Atlantic zonal mean - ]; - -coord = [... - 0 -90 60 -60;... % N Atlantic zonal mean - ]; - -coord = [... - -8 -140 10 -140;... % Eq Pac Meridional - ]; - -coord = [... - -8 -170 10 -95;... % Eq Pac Meridional - ]; - -coord = [... - 0 -80 60 0;... % N Atlantic zonal mean - 21 -80 45 -60;... % DWBC N Atl meridional section - 21 283 32 285;... % DWBC N Atl meridional section - ]; - -coord = [... - -35 -80 70 -1;... % N Atlantic zonal mean - ]; - -coord = [... - -35 -97 70 -1;... % N Atlantic zonal mean - ]; -nSections = size(coord,1); - -% number of points to plot for each figure -nLat = 100; -nLon = 100; - -% Direction to take mean: zonal (z) or meridional (m) -meanDirection = 'z'; - -% plotDepth(nSections) depth to which to plot each section, in m -plotDepth = 5000*ones(1,size(coord,1)); - -% For plotting, only four plots are allowed per row. -% Choose sections above for each page. -% page.name name of this group of sections -% sectionID section numbers for each row of this page -page(1).name = 'NA'; -page(1).sectionID = [1:nSections]; - -% coord range may need alteration to match lonVertex: -% If lonVertex is between 0 and 2*pi, ensure the coordinate range is 0 to 360. -%coord(:,2) = mod(coord(:,2),360); -%coord(:,4) = mod(coord(:,4),360); -% If lonVertex is between -pi and pi, ensure the coordinate range is -180 to 180. -coord(:,2) = mod(coord(:,2)+180,360)-180; -coord(:,4) = mod(coord(:,4)+180,360)-180; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify variables to view -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% see exampleSections.m for more example variables - -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% var_conv_factor multiply each variable by this unit conversion. -% var_lims(nVars,3) contour line definition: min, max, interval - -var_name = {... -'avgVelocityZonal',... -'avgVelocityMeridional',... -'ke_avgVelocity'}; - -var_name = {... -'avgVelocityMeridional',... -}; - -var_conv_factor = [100 100 1]; % convert m/s to cm/s for velocities - -%var_lims = [-20 20 2.0; -10 10 1.0; 0 20 2.5]; -var_lims = [-1 1 .1; -10 10 1.0; 0 20 2.5]; -%var_lims = [-5 5 .5; -10 10 1.0; 0 20 2.5]; - -%var_lims = [-110 110 10.0; -10 10 1.0; 0 20 2.5]; - diff --git a/visualization/mean_section/find_cell_weights.m b/visualization/mean_section/find_cell_weights.m deleted file mode 100644 index 595da92e3..000000000 --- a/visualization/mean_section/find_cell_weights.m +++ /dev/null @@ -1,174 +0,0 @@ -function [cellsOnVertexSection, cellWeightsSection, latSection,lonSection, ... - refLayerThickness, refMidDepth, refBottomDepth, maxLevelCellSection, sphere_radius] = find_cell_weights ... - (wd,dir,netcdf_file,sectionText,coord,nLat,nLon) - -% This function reads grid data from an MPAS-Ocean grid or restart -% netCDF file, and finds a path of cells that connect the endpoints -% specified in coord. The path is forced to travel through cells -% that are closest to the line connecting the beginning and end -% cells. -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% The text string [wd '/' dir '/' netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. -% sectionText a cell array with text describing each section -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% -%%%%%%%%%% output arguments %%%%%%%%% -% cellsOnVertexSection(vertexDegree,nLat,nLon,nSections) cells neighboring nearest vertex -% cellWeightsSection(vertexDegree,nLat,nLon,nSections) weights for each cell -% latSection(nLat,nSections) lat coordinates of each section -% lonSection(nLon,nSections) lon coordinates of each section -% maxLevelCellSection(nLat,nLon,nSections) min of maxLevelCell of cells surrounding vertex -% refMidDepth(nVertLevels) depth of center of each layer, for plotting -% latCellDeg(nCells) lat arrays for all cells -% lonCellDeg(nCells) lon arrays for all cells - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Read data from file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf(['** find_cell_sections, simulation: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ] -ncid = netcdf.open(filename,'nc_nowrite'); - -xCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'xCell')); -yCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'yCell')); -zCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'zCell')); -latVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'latVertex')); -lonVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'lonVertex')); -xVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'xVertex')); -yVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'yVertex')); -zVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'zVertex')); -cellsOnVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'cellsOnVertex')); -refLayerThickness = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'refLayerThickness')); -refBottomDepth = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'refBottomDepth')); -maxLevelCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'maxLevelCell')); -sphere_radius = netcdf.getAtt(ncid,netcdf.getConstant('NC_GLOBAL'),'sphere_radius'); -[dimname,nVertices]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertices')); -[dimname,vertexDegree]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'vertexDegree')); -[dimname,nVertLevels]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertLevels')); -netcdf.close(ncid) - -nSections = size(coord,1); - -% Compute depth of center of each layer, for plotting -refMidDepth(1) = refLayerThickness(1)/2; -for i=2:nVertLevels - refMidDepth(i) = refMidDepth(i-1) + 0.5*(refLayerThickness(i) + refLayerThickness(i-1)); -end -% depth(1)=0; % make top layer plot to surface - -latSection = zeros(nLat,nSections); -lonSection = zeros(nLon,nSections); -maxLevelCellSection = zeros(nLat,nLon,nSections); -nearestVertexSection = zeros(nLat,nLon,nSections); -cellsOnVertexSection = zeros(vertexDegree,nLat,nLon,nSections); -cellWeightsSection = zeros(vertexDegree,nLat,nLon,nSections); -margin=.5; - -for iSection=1:nSections - fprintf('Finding nearest vertices for Section %g \n',iSection) - latSection(:,iSection) = linspace(coord(iSection,1),coord(iSection,3),nLat); - lonSection(:,iSection) = linspace(coord(iSection,2),coord(iSection,4),nLon); - - maxLon = (max(lonSection(:,iSection))+margin)*pi/180; - minLon = (min(lonSection(:,iSection))-margin)*pi/180; - -% maxLat = (max(latSection(:,iSection))+margin)*pi/180; -% minLat = (min(latSection(:,iSection))-margin)*pi/180; - -% vInd = find(latVertex>minLat&latVertexminLon&lonVertexminLat&latVertexminLon&lonVertex ' doc_dir '/' filename ]); - -% if not(strcmp(latex_command,'none')) -% fprintf('*** Compiling latex document \n') -% cd(doc_dir); -% unix([latex_command ' ' filename]); -% cd('../..'); -% end - -end % iSim - - diff --git a/visualization/mean_section/sub_plot_cross_sections.m b/visualization/mean_section/sub_plot_cross_sections.m deleted file mode 100644 index 3b5483d64..000000000 --- a/visualization/mean_section/sub_plot_cross_sections.m +++ /dev/null @@ -1,210 +0,0 @@ -function sub_plot_cross_sections(dir,netcdfFile,sectionText,pageName,sectionID,sectionData,refMidDepth,refBottomDepth,... - latSection,lonSection, maxLevelCellSection,coord, plotDepth,... - var_name,var_lims,meanDirection,fid_latex) - -% Plot cross-sections of MPAS fields. -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% dir text string, name of simulation -% sectionText a cell array with text describing each section -% sectionID section numbers for each row of this page -% pageName name of this group of sections -% sectionData(nVertLevels,nPoints,nSections,nVars) -% data in each cross-section for each variable -% refMidDepth(nVertLevels) depth of center of each layer, for plotting -% latSection(nPoints,nSections) lat coordinates of each section -% lonSection(nPoints,nSections) lon coordinates of each section -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% plotDepth(nSections) depth to which to plot each section -% var_lims(nVars,3) contour line definition: min, max, interval -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% meanDirection Direction to take mean: zonal (z) or meridional (m) -% fid_latex file ID of latex file - -fprintf(['** sub_plot_cross_sections simulation: ' dir '/' netcdfFile ... - ' plotting page: ' pageName '\n']) - -px = [.28 .78]; -py=linspace(.84,.13,4); % Midpoint position of plots -pw = [.4]; ph=[.18]; % width and height of plots - -nPoints = size(sectionData,2); -nSections = size(sectionData,3); -nVars = size(sectionData,4); - -for iVar=1:nVars - %figure(iVar+1); clf - %set(gcf,'Position',[100+(iVar*100) 600-iVar*100 550 400]) - %temptext2=char(var_name(iVar)); - - for iRow = 1:length(sectionID) - figure(10*iRow+iVar); clf - %set(gcf,'Position',[100+(iRow*100) 1200-iRow*100 550 400]) - iSection = sectionID(iRow); - if meanDirection == 'z' % zonal mean - xtext = 'latitude'; - xaxis = latSection(:,iSection); - elseif meanDirection == 'm' % meridional mean - xtext = 'longitude'; - xaxis = lonSection(:,iSection); - end - - % left column - % ha=subplot('position',[px(1)-pw/2 py(iRow)-ph/2 pw ph]); - % temptext = char(sectionText(iSection)); - % if temptext2(1:6)=='ke_fromAvgVelocity' - % h=surf(xaxis, refMidDepth,log10(sectionData(:,1:nCellsInSection(iSection),iSection,iVar))); - % set(gca,'CLim',[-1 1.2]) - % else - % h=surf(xaxis, refMidDepth,sectionData(:,1:nCellsInSection(iSection),iSection,iVar)); - % end - - % set(h,'EdgeColor','none') - % view(0,-90) - % title([temptext ', cm/s']) - % ylabel('depth, m') - % xlabel(xtext) - % axis tight - % set(gca,'YLim',[0 plotDepth(iSection)]) - % h=colorbar ; - % if temptext2(1:6)=='ke_fromAvgVelocity' -% set(h,'YTick',[-1:1:1.2],'YTickLabel',[0.1 1 10]) - % end - - % right column - -px = [.53]; -py=.53; -pw = [.85]; ph=[.83]; % width and height of plots - ha=subplot('position',[px(1)-pw/2 py-ph/2 pw ph]); - temptext = char(sectionText(iSection)); - hold on -% contour(xaxis, refMidDepth,sectionData(:,1:nCellsInSection(iSection),iSection,iVar), ... -% [var_lims(iVar,1):var_lims(iVar,3):var_lims(iVar,2)]); -% set(gca,'CLim',var_lims(iVar,1:2)) - - %%%%%% special commands for DWBC mrp - % imitating colorbar at http://www.agu.org/journals/jc/jc1203/2011JC007586/figures.shtml#fig10 - - %xaxis = xaxis - 360*ones(size(xaxis)); - % xaxis is now in longitude. Convert to Distance (km) - % along 26.5N east of 77W - %xaxis = (xaxis+77)*99; % for DWBC only - %contour_lims = [-25 -20 -15 -10 -5 -2 -1 1 2 5 10 15 20 25]; - %contour_lims = [-20 -15 -10 -5 -2 0 2 5 10 15 20 25 30]; - contour_lims = [var_lims(iVar,1):var_lims(iVar,3):var_lims(iVar,2)]; - [cout,h]=contourf(xaxis, refMidDepth,sectionData(:,:,iSection,iVar), ... - contour_lims); - set(gca,'CLim',[min(contour_lims) max(contour_lims)]) - set(h,'LineColor',[.5 .5 .5]) - cbfrac=0; - - % Text labels on countours - [cout,h]=contour(xaxis, refMidDepth,sectionData(:,:,iSection,iVar),... - contour_lims); - ls=[200]; - clabel(cout,h,'fontsize',10,'color','k','rotation',0,'LabelSpacing',ls); - set(h,'LineColor',[.5 .5 .5]) - - % Black lines - %[cout,h]=contour(xaxis, refMidDepth,sectionData(:,:,iSection,iVar),[-100:50:100]); - %set(h,'LineColor',[0 0 0],'LineWidth',1) - - % stretched colorbar using contour_lims: - cmin=min(contour_lims); - cmax=max(contour_lims); -% cvalue = cmin-.5*dc:dc:cmax+.5*dc; - nc_orig = 256; - nc = length(contour_lims); - cmap_orig = ColdHot(nc_orig); - cmap_orig_short = zeros(nc-1,3); - ind=(.5:1:nc-.5); - for j=1:nc-1 - cmap_orig_short(j,:) = cmap_orig( floor((j-.5)/(nc-1)*nc_orig),:); - end - - cvalue = linspace(cmin,cmax,256); - nc_inc = length(cvalue); - - cmapnew = zeros(nc_inc,3); - for jnew=2:nc_inc - jold = max(min(min(find(contour_lims>=cvalue(jnew))),nc)-1,1); - cmapnew(jnew-1,:) = cmap_orig_short(jold,:); - end - cmapnew(nc_inc,:) = cmap_orig_short(nc-1,:); - - colormap(cmapnew) - %colorbarf_spec(cout,h,'vert',contour_lims); - %xlabel('Distance (km) along 26.5N east of 77W') % for DWBC only - - axis tight - %set(gca,'YLim',[0 plotDepth(iSection)],'XLim',[0 175]) % for DWBC only - %set(gca,'YTick',[0:1000:5000],'XTick',[0:25:175]) - set(gca,'YLim',[0 plotDepth(iRow)]) - %set(gca,'YTick',[0:100:400]) - xlabel(xtext) - % set(gca,'XTick',-1*[80:.5:70]) - %%%%%% special commands for DWBC mrp end - - %%%%%% special commands for EUC mrp end - %if iRow==2 - % set(gca,'XTick',[143 156 165 180 190 205 220 235 250 265]) - % set(gca,'XTickLabel',{'143' '156' '165E' '180' '170W' '155' '140' '125' '110' '95'}) - %end - - %%%%%% special commands for EUC mrp end - - set(gca,'YDir','reverse') - title([temptext ', ' char(var_name(iVar))]) - ylabel('depth, m') - grid on - set(gca,'layer','top'); - h=colorbar; - - % mrp draw bottom based on zero contour - hold on - n = nPoints; - % old way: maxLevelCell=zeros(1,n); - x(2:n) = (xaxis(1:n-1)+xaxis(2:n))/2; - x(1) = xaxis(1) - (xaxis(2)-xaxis(1))/2; - x(n+1) = xaxis(n) + (xaxis(n)-xaxis(n-1))/2; - b = max(refBottomDepth); - for j=1:n - % old way: maxLevelCell(j)=max(min(find(sectionData(:,j,iSection,iVar)==0.0))-1,1); - %depthline(j) = refBottomDepth(maxLevelCellSection(j,iSection)); - %h=patch([x(j) x(j+1) x(j+1) x(j) x(j)],... - % [b b depthline(j) depthline(j) b], [.5 .5 .5]); - %set(h,'LineStyle','none') - end - - % mrp draw bottom based on zero contour end - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 5.5 3.2]) - subplot('position',[0 .95 1 .05]); axis off - title_txt = [regexprep(char(var_name(iVar)),'_','\\_') ', ' regexprep(dir,'_','\\_')]; -% h=text(.55,.4,title_txt); -% set(h,'HorizontalAlignment','center','FontWeight','bold','FontSize',14) -% text(.005,.7,[ date ]); - - unix(['mkdir -p f/' dir ]); - temp=['f/' dir '/' netcdfFile '_' pageName num2str(iRow) '_' ... - char(var_name(iVar))]; - filename = regexprep(temp,'\.','_'); - print('-djpeg',[filename '.jpg']); - print('-depsc2',[filename '.eps']); - unix(['epstopdf ' filename '.eps --outfile=' filename '.pdf']); - fprintf(fid_latex,['\\begin{figure}[btp] \\center \n \\includegraphics[width=7.5in]{'... - filename '.jpg} \n\\end{figure} \n']); - - % print('-depsc2',[filename '.eps']) - - end - - -end - diff --git a/visualization/mean_section/sub_plot_section_locations.m b/visualization/mean_section/sub_plot_section_locations.m deleted file mode 100644 index 6e100eb21..000000000 --- a/visualization/mean_section/sub_plot_section_locations.m +++ /dev/null @@ -1,97 +0,0 @@ -function sub_plot_section_locations(dir,coord, ... - latSection,lonSection,fid_latex) - -% Plot section locations on world map - -% Mark Petersen, MPAS-Ocean Team, LANL, Sep 2012 - -%%%%%%%%%% input arguments %%%%%%%%% -% dir text string, name of simulation -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% latSection(nPoints,nSections) lat coordinates of each section -% lonSection(nPoints,nSections) lon coordinates of each section -% fid_latex file ID of latex file - -fprintf(['** sub_plot_cell_sections, on figure 1.\n']) - -nSections = size(coord,1); - -figure(1); clf - - minLon = -180.0; - latTrans = 360.0; - - % plot topo data of the earth. This is just low-rez one deg - % data for visual reference. - load('topo.mat','topo','topomap1'); - if minLon==-180 - topoNew(:,1:180) = topo(:,181:360); - topoNew(:,181:360) = topo(:,1:180); - image([-180 180],[-90 90],topoNew,'CDataMapping', 'scaled'); - else - image([0 360],[-90 90],topo,'CDataMapping', 'scaled'); - end - - colormap(topomap1); - set(gca,'YDir','normal') - - hold on - - % world - axis tight - set(gca,'XTick',30*[-10:12]) - set(gca,'YTick',15*[-20:20]) - - % half world -% axis([-360+latTrans 0+latTrans -80 70]) -% set(gca,'XTick',20*[-10:20]) -% set(gca,'YTick',10*[-20:20]) - - % N Atlantic -% axis([-90+latTrans -5+latTrans -5 70]) -% set(gca,'XTick',[-100:5:360]) -% set(gca,'YTick',[-90:5:90]) - - % Drake passage -% axis([-90+latTrans,-50+latTrans,-75,-50]) -% set(gca,'XTick',[-100:2:360]) - % set(gca,'YTick',[-200:2:200]) - - % Pacific -% axis([130 260 -10 10]) -% set(gca,'XTick',[0:1:300]) -% set(gca,'YTick',[-20:.1:20]) - - hold on - grid on - - for iSection=1:nSections - h=plot(coord(iSection,[2 2 4 4 2 4 2 4]),... - coord(iSection,[1 3 3 1 1 3 3 1]),'y-'); - set(h,'Color','y','LineWidth',1) - h=text(lonSection(1,iSection),latSection(1,iSection), ... - num2str(iSection)); - - set(h,'Color',[1 1 1],'FontWeight','bold') - %h=plot(lonSection(:,iSection),latSection(:,iSection),'y.'); - end - - ylabel('latitude') - xlabel('longitude') - title(['Domain: ' regexprep(dir,'_','\\_') ' Areas for means. ']) - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 6 4]) - - subplot('position',[0 .95 1 .05]); axis off - text(.005,.7,[ date ]); - - unix(['mkdir -p f/' dir ]); - filename=['f/' dir '/' 'mean_location_map' ]; - print('-djpeg',[filename '.jpg']) - - % put printing text in a latex file - fprintf(fid_latex,... - ['\\begin{figure}[btp] \\center \n \\includegraphics[width=7.5in]{'... - filename '.jpg} \n\\end{figure} \n']); diff --git a/visualization/mean_section/triArea.m b/visualization/mean_section/triArea.m deleted file mode 120000 index 6c9e81502..000000000 --- a/visualization/mean_section/triArea.m +++ /dev/null @@ -1 +0,0 @@ -../cross_section/triArea.m \ No newline at end of file From 30d85c996a6549b8da656c407261f46a68a0765e Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:11:15 -0600 Subject: [PATCH 018/180] Remove visualization/moc --- visualization/moc/ColdHot.m | 44 --- visualization/moc/compute_moc_from_w.m | 48 ---- visualization/moc/compute_transport.m | 98 ------- visualization/moc/find_edge_sections.m | 242 ---------------- visualization/moc/land_mask_Atlantic.m | 44 --- visualization/moc/land_mask_global.m | 23 -- visualization/moc/load_large_variables_edge.m | 78 ------ visualization/moc/load_vertical_velocity.m | 45 --- visualization/moc/moc.m | 259 ------------------ visualization/moc/plot_moc.m | 114 -------- visualization/moc/sub_plot_edge_sections.m | 117 -------- 11 files changed, 1112 deletions(-) delete mode 100644 visualization/moc/ColdHot.m delete mode 100644 visualization/moc/compute_moc_from_w.m delete mode 100644 visualization/moc/compute_transport.m delete mode 100644 visualization/moc/find_edge_sections.m delete mode 100644 visualization/moc/land_mask_Atlantic.m delete mode 100644 visualization/moc/land_mask_global.m delete mode 100644 visualization/moc/load_large_variables_edge.m delete mode 100644 visualization/moc/load_vertical_velocity.m delete mode 100644 visualization/moc/moc.m delete mode 100644 visualization/moc/plot_moc.m delete mode 100644 visualization/moc/sub_plot_edge_sections.m diff --git a/visualization/moc/ColdHot.m b/visualization/moc/ColdHot.m deleted file mode 100644 index c93e0629f..000000000 --- a/visualization/moc/ColdHot.m +++ /dev/null @@ -1,44 +0,0 @@ -function B = ColdHot(m) -% A colormap for blue cold, white zero, Hot positives. - -if nargin < 1, m = 256; end - -n = fix(m/8); - -% Create cold part: -A = [ - 102 0 102; - 0 41 253; - 102 153 255; - 41 255 255; - 255 255 255]/255; -%A = ones(size(A)) - A; - -v = [n-1 n n n]; - -cold = linspacev(A,v); - -% Create hot part: -A = [ - 255 255 255; - 255 255 0; - 255 102 41; - 255 0 0; - 102 41 0]/255; - -v = [n n n n-1]; -myhot = linspacev(A,v); - - -B = [cold; myhot]; - -%B = [B; flipud(hot(fix(m/2)))]; - - -% Original cold part, 8/2/02: -A = [ - 102 0 102; - 41 0 153; - 0 0 204; - 42 102 255; - 255 255 255]/255; \ No newline at end of file diff --git a/visualization/moc/compute_moc_from_w.m b/visualization/moc/compute_moc_from_w.m deleted file mode 100644 index 0e58ec640..000000000 --- a/visualization/moc/compute_moc_from_w.m +++ /dev/null @@ -1,48 +0,0 @@ -function [mocTop] = compute_moc_from_w ... - (vertVelocityTop, botDepth, ... - latCell,lonCell, areaCell,transport,mocLat,landMask) - -% Compute moc streamfunction from vertical velocity -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% -%%%%%%%%%% output arguments %%%%%%%%% -% mocTop(nVertLevels,nLat) -% data in each cross-section for each variable - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Load large variables -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf('\n') -fprintf(['** compute moc simulation: \n']) - -nVertLevels = length(botDepth); -nCells = length(areaCell); -nLat = length(mocLat); - -mocTop = zeros(nVertLevels+1,nLat); - -for k=2:nVertLevels+1 - mocTop(k,1) = mocTop(k-1,1) + transport(k-1)*1e6; -end - -for iLat = 2:nLat - ind = find(landMask==1 & latCell>=mocLat(iLat-1) & latCellabs(latChange) % zonal section - if lonChange>0 - fprintf(['Warning: Zonal sections should go from east to west. ' ... - 'For section %g start and end longitudes are %g, %g \n'], ... - j,sectionCoord(j,2),sectionCoord(j,4)) - end - else - if latChange<0 - fprintf(['Warning: Meridional sections should go from south to north. ' ... - 'For section %g start and end latitudes are %g, %g \n'], ... - j,sectionCoord(j,1),sectionCoord(j,3)) - end - end - -end - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Read edge and edge data from grid file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf(['** find_edge_sections, simulation: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ]; -ncid = netcdf.open(filename,'nc_nowrite'); - -latVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'latVertex')); -lonVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'lonVertex')); -verticesOnEdge = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'verticesOnEdge')); -edgesOnVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'edgesOnVertex')); -[dimname,nEdges]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nEdges')); -[dimname,nVertices]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertices')); -[dimname,vertexDegree]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'vertexDegree')); -netcdf.close(ncid) - -% Grid variables should be: -% lat varies from -pi/2:pi/2 -% lon varies from 0:2*pi -if (min(lonVertex)<-1e-8) - lonVertex = mod(lonVertex,2*pi); -end -% inserted for lon -180:180 -lonVertex = mod(lonVertex+pi,2*pi)-pi; - -% convert to degrees for plotting: -latVertexDeg = latVertex*180/pi; -lonVertexDeg = lonVertex*180/pi; - -sectionVertexIndex = zeros(maxEdges,nSections); -sectionEdgeIndex = zeros(maxEdges,nSections); -sectionEdgeSign = zeros(maxEdges,nSections); -nEdgesInSection = zeros(1,nSections); - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Find edges that connect beginning and ending points -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -for iSection=1:nSections - latCoord = [sectionCoord(iSection,1) sectionCoord(iSection,3)]/180*pi; - lonCoord = [sectionCoord(iSection,2) sectionCoord(iSection,4)]/180*pi; - - % Find vertex closest to start and end coordinates. - % The seed vertex array simply stores start and end index. - minDist = 1e10*ones(1,2); - seedVertexIndex = zeros(1,2); - for iVertex = 1:nVertices - for i=1:2 - dist = sqrt( ... - (lonCoord(i) - lonVertex(iVertex))^2 ... - + (latCoord(i) - latVertex(iVertex))^2); - if (dist0) - % Find the vertex on the other side of iEdge - if (verticesOnEdge(1,iEdge)==sectionVertexIndex(i,iSection)) - iVertex = verticesOnEdge(2,iEdge); - % Going from vertex 1 to vertex 2. Leave positive. - edgeSign = 1; - else - iVertex = verticesOnEdge(1,iEdge); - % Going from vertex 2 to vertex 1. Make negative. - edgeSign = -1; - end - - % I am using lat/lon Cartesian distance. - % This is distance to the final vertex location. - dist = sqrt( ... - (lonVertex(iVertex) - lonVertex(endVertexIndex))^2 ... - + (latVertex(iVertex) - latVertex(endVertexIndex))^2 ); - -%fprintf('%6i %6i %8.4f %8.4f h1=plot(%g,%g); h2=plot(%g,%g); \n',... -%i,j,dist,distLastVertex,... -% lonVertex(iVertex)*180/pi,latVertex(iVertex)*180/pi,... -% lonVertex(endVertexIndex)*180/pi,latVertex(endVertexIndex)*180/pi) - % check if this vertex is closer to the end vertex than the - % last vertex. If so, it is a candidate, and we can continue. - if (dist=70)) = 0.0; -land_mask(find( lat<-35)) = 0.0; - -% mask out eastern boundary -land_mask(find( lat>=-35 & lat<10 & lon180>22)) = 0.0; -land_mask(find( lat>= 10 & lat<49 & lon180> 0)) = 0.0; -land_mask(find( lat>= 49 & lat<66 & lon180>13)) = 0.0; -land_mask(find( lat>= 66 & lat<70 & lon180>30)) = 0.0; - -% mask out western boundary -land_mask(find( lat>=-35 & lat< 9 & lon180<-63)) = 0.0; -land_mask(find( lat>= 9 & lat<14 & lon180<-84)) = 0.0; -land_mask(find( lat>= 14 & lat<17 & lon180<-89)) = 0.0; -land_mask(find( lat>= 17 & lat<50 & lon180<-98)) = 0.0; -land_mask(find( lat>= 50 & lat<70 & lon180<-70)) = 0.0; - - diff --git a/visualization/moc/land_mask_global.m b/visualization/moc/land_mask_global.m deleted file mode 100644 index caf4dd6bf..000000000 --- a/visualization/moc/land_mask_global.m +++ /dev/null @@ -1,23 +0,0 @@ -function [land_mask] = land_mask_global(lat,lon) - -% Given latitude and longitude coordinates, produce a land mask -% land_mask = 1 in specified region -% land_mask = 0 elsewhere -% -% Mark Petersen, MPAS-Ocean Team, LANL, January 2013 -% -%%%%%%%%%% input arguments %%%%%%%%% -% lat(nPoints) latitude in degrees, ranging from 0 to 360 -% or -180 to 180 -% lon(nPoints) longitude in degrees, ranging from -90 to 90 -% -%%%%%%%%%% output arguments %%%%%%%%% -% land_mask(nPoints) - -if size(lat) ~= size(lon) - fprintf('Size of lat and lon must be the same.\n') - return -end - -% for global, include all points -land_mask = ones(size(lat)); diff --git a/visualization/moc/load_large_variables_edge.m b/visualization/moc/load_large_variables_edge.m deleted file mode 100644 index 249302aa8..000000000 --- a/visualization/moc/load_large_variables_edge.m +++ /dev/null @@ -1,78 +0,0 @@ -function [sectionData] = load_large_variables_edge ... - (wd,dir,netcdf_file, var_name, var_conv_factor, ... - sectionEdgeIndex, nEdgesInSection) - -% Load large variables from netcdf file -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% The text string [wd '/' dir '/' netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% var_conv_factor multiply each variable by this unit conversion. -% sectionEdgeIndex(maxEdges,nSections) cell index of each section -% nEdgesInSection(nSections) number of cells in each section -% -%%%%%%%%%% output arguments %%%%%%%%% -% sectionData(nVertLevels,max(nEdgesInSection),nSections,nVars) -% data in each cross-section for each variable - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Load large variables -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf('\n') -fprintf(['** load_large_variables simulation: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ]; -ncid = netcdf.open(filename,'nc_nowrite'); - -nAverage = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'nAverage')); - -[dimname,nVertLevels]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertLevels')); -[dimname,nEdges]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nEdges')); - -% see if nTimeSlices dimension exists. If not, set nTimeSlices to 1. -try - [dimname,nTimeSlices]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'Time')); -catch - nTimeSlices = 1; -end - -nVars = length(var_name); -nSections = length(nEdgesInSection); - -maxNumberEdges = max(nEdgesInSection); -sectionData = zeros(nVertLevels,maxNumberEdges,nSections,nVars); - -for iVar=1:nVars - temptext = char(var_name(iVar)); - fprintf(['loading: ' temptext '\n']) - -% acc_var = netcdf.getVar(ncid,netcdf.inqVarID(ncid,char(var_name(iVar)))); -% mean_var = zeros(nVertLevels, nEdges); -% for i=1:nTimeSlices -% mean_var = mean_var + nAverage(i)*squeeze(acc_var(:,:,i)); -% end -% mean_var = mean_var/sum(nAverage)*var_conv_factor(iVar); - - avgNormalVelocity = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'avgNormalVelocity')); - - for iSection = 1:nSections - for i=1:nEdgesInSection(iSection) - iEdge = sectionEdgeIndex(i,iSection); - for k=1:nVertLevels -% sectionData(k,i,iSection,iVar) = mean_var(k,iEdge); - sectionData(k,i,iSection,iVar) = avgNormalVelocity(k,iEdge); - end - end - end - -end -netcdf.close(ncid) - -fprintf('\n') - diff --git a/visualization/moc/load_vertical_velocity.m b/visualization/moc/load_vertical_velocity.m deleted file mode 100644 index d8db32714..000000000 --- a/visualization/moc/load_vertical_velocity.m +++ /dev/null @@ -1,45 +0,0 @@ -function [avgVertVelocityTop, refBottomDepth, latCell,lonCell, areaCell, nVertLevels] ... - = load_vertical_velocity ... - (wd,dir,netcdf_file,vert_var_name) - -% load vertical velcoity -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% The text string [wd '/' dir '/' netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. -% -%%%%%%%%%% output arguments %%%%%%%%% -% refBottomDepth(nVertLevels) depth of center of each layer, for plotting -% vertVelocityTop(nVertLevels,nCells) vertical velocity at cell center, top - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Read data from file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf(['** compute u_acc, simulation: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ] -ncid = netcdf.open(filename,'nc_nowrite'); - -temp=char(vert_var_name(1)); -if temp(1:7)=='avgEddy' - fprintf('Computing eddy-induced vertical velocity') - avgVertVelocityTopEulerian = netcdf.getVar(ncid,netcdf.inqVarID(ncid,char(vert_var_name(2)))); - avgVertVelocityTopTransport = netcdf.getVar(ncid,netcdf.inqVarID(ncid,char(vert_var_name(3)))); - avgVertVelocityTop = avgVertVelocityTopEulerian - avgVertVelocityTopTransport; -else - avgVertVelocityTop = netcdf.getVar(ncid,netcdf.inqVarID(ncid,char(vert_var_name(1)))); -end - -refBottomDepth = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'refBottomDepth')); -latCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'latCell'))*180/pi; -lonCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'lonCell'))*180/pi; -areaCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'areaCell')); -[dimname,nVertLevels]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertLevels')); - -netcdf.close(ncid) - -fprintf(['\n']) diff --git a/visualization/moc/moc.m b/visualization/moc/moc.m deleted file mode 100644 index 76d3438d0..000000000 --- a/visualization/moc/moc.m +++ /dev/null @@ -1,259 +0,0 @@ -% function moc - -% Plot cross-sections of means of MPAS fields. -% -% This is the main function, where the user can specify data files, -% coordinates and text, then call functions to find sections, load -% data, and plot cross-sections. -% -% The final product is a set of plots as jpg files, a latex file, -% and a compiled pdf file of the plots, if desired. -% -% Mark Petersen, MPAS-Ocean Team, LANL, January 2013 - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify data files -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% all plots are placed in the f directory. Comment out if not needed. -unix('mkdir -p f docs'); - -% The text string [wd '/' sim(i).dir '/' sim(i).netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. - -wd = '/var/tmp/mpeterse/runs/'; - -% These files only need to contain a small number of variables. -% You may need to reduce the file size before copying to a local -% machine using: -% ncks -v avgNormalVelocityReconstructMeridional,avgNormalVelocityReconstructZonal, \ -% nAverage,latVertex,lonVertex,verticesOnEdge,edgesOnVertex,refLayerThickness,\ -% dvEdge,latCell,lonCell,cellsOnCell,nEdgesOnCell \ -% file_in.nc file_out.nc - -dir='m91'; -abc='klmnop'; - -for j=1:length(abc) - sim(j).dir = [dir abc(j)]; - sim(j).netcdf_file = ['output_total_avg.nc']; -end - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify section coordinates and text -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% Choose Atlantic or global MOC -%region='Atlant' - region='global' - -% Compute MOC using section -% [startlat startlon endlat endlon] -if region=='Atlant' - sectionText = {'Atlantic MOC'}; - sectionCoord = [-34.5 19.9 -34.5 -55] % '34.5S, South America to Africa -63 to 22 - mocLat = [-34.5:.5:70]; -elseif region=='global' - sectionText = {'Global MOC'}; - mocLat = [-80:.5:85]; -else - fprintf('Incorrect region name') - return -end - -% For plotting, only four plots are allowed per row. -% Choose sections above for each page. -% page.name name of this group of sections -% sectionID section numbers for each row of this page -page(1).name = 'NA'; -page(1).sectionID = [1:1]; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify variables to view -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% var_conv_factor multiply each variable by this unit conversion. -% var_lims(nVars,3) contour line definition: min, max, interval - -% Typical variables used for plotting: -% Eulerian velocity from prognostic momentum equation -hor_var_name = {'avgNormalVelocity'};vert_var_name = {'avgVertVelocityTop'};fign=1; -% total transport velocity -%hor_var_name = {'avgNormalTransportVelocity'}; vert_var_name = {'avgVertTransportVelocityTop'};fign=2 -% remaining: eddy-induced transport -%hor_var_name = {'avgNormalGMBolusVelocity'}; vert_var_name = {'avgVertGMBolusVelocityTop'};fign=3 - -var_conv_factor = [1 1 1]; % No conversion here. -if region=='Atlant' - contour_lims = [-10:2:10]; -elseif region=='global' - contour_lims = [-40:4:40]; - %contour_lims = [-20:4:-4 -2 2 4:4:20]; % for Bolus velocity MOS -end - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify latex command -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% This matlab script will invoke a latex compiler in order to -% produce a pdf file. Specify the unix command-line latex -% executable, or 'none' to not compile the latex document. - -latex_command = 'latex'; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify actions to be taken -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -if region=='Atlant' - find_edge_sections_flag = true ; - plot_edge_sections_flag = false ; - compute_transport_flag = true ; -elseif region=='global' - find_edge_sections_flag = true ; - plot_edge_sections_flag = true ; - compute_transport_flag = false ; -end -load_vertical_velocity_flag = true ; -compute_moc_flag = true ; -plot_moc_flag = true ; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Begin main code. Normally this does not need to change. -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -nSections = 1; - -for iSim = 1:length(sim) - - fprintf(['**** simulation: ' sim(iSim).dir '\n']) - fid_latex = fopen('temp.tex','w'); - fprintf(fid_latex,['%% file created by plot_mpas_cross_sections, ' date '\n\n']); - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % load vertical velocity - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if load_vertical_velocity_flag - [sim(iSim).avgVertVelocityTop, sim(iSim).botDepth, ... - sim(iSim).latCell,sim(iSim).lonCell, sim(iSim).areaCell,nVertLevels] = ... - load_vertical_velocity(wd,sim(iSim).dir,sim(iSim).netcdf_file,vert_var_name); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Find edges that connect beginning and end points of section - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if find_edge_sections_flag - [sim(iSim).sectionEdgeIndex, sim(iSim).sectionEdgeSign, sim(iSim).nEdgesInSection, ... - sim(iSim).latSectionVertex,sim(iSim).lonSectionVertex, ... - sim(iSim).latVertexDeg,sim(iSim).lonVertexDeg] ... - = find_edge_sections(wd,sim(iSim).dir,sim(iSim).netcdf_file, ... - sectionText,sectionCoord); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Plot edge section locations on world map - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if plot_edge_sections_flag - sub_plot_edge_sections(sim(iSim).dir,sectionCoord, ... - sim(iSim).latSectionVertex,sim(iSim).lonSectionVertex, ... - sim(iSim).latVertexDeg,sim(iSim).lonVertexDeg, ... - sim(iSim).sectionEdgeIndex, sim(iSim).nEdgesInSection,... - fid_latex); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Load large variables from netcdf file for section - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if compute_transport_flag - [sim(iSim).sectionData] = load_large_variables_edge ... - (wd,sim(iSim).dir,sim(iSim).netcdf_file, hor_var_name, var_conv_factor, ... - sim(iSim).sectionEdgeIndex, sim(iSim).nEdgesInSection); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Compute transport through each section - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if compute_transport_flag - transport = compute_transport ... - (wd,sim(iSim).dir,sim(iSim).netcdf_file, hor_var_name, ... - sim(iSim).sectionEdgeIndex, sim(iSim).sectionEdgeSign, sim(iSim).nEdgesInSection, ... - sim(iSim).sectionData,sectionText,sectionAbbreviation); - else - transport = zeros(nVertLevels,nSections); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Compute MOC - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if compute_moc_flag - if region=='Atlant' - [sim(iSim).landMask] = land_mask_Atlantic(sim(iSim).latCell,sim(iSim).lonCell); - elseif region=='global' - [sim(iSim).landMask] = land_mask_global(sim(iSim).latCell,sim(iSim).lonCell); - end - - [sim(iSim).mocTop] = compute_moc_from_w ... - (sim(iSim).avgVertVelocityTop, sim(iSim).botDepth, ... - sim(iSim).latCell,sim(iSim).lonCell, sim(iSim).areaCell,transport,mocLat,sim(iSim).landMask); - - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Plot MOC - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - if plot_moc_flag - plot_moc(sim(iSim).dir,sectionText,sim(iSim).mocTop,mocLat, ... - sim(iSim).botDepth,contour_lims,vert_var_name(1),fign) - end - -% doc_dir = ['docs/' regexprep(sim(iSim).dir,'/','_') '_' ... -% sim(iSim).netcdf_file '_dir' ]; -% unix(['mkdir -p ' doc_dir '/f']); -% unix(['mv f/*jpg ' doc_dir '/f']); - -% filename = [ regexprep(sim(iSim).dir,'/','_') '_' sim(iSim).netcdf_file '.tex']; -% unix(['cat mpas_sections.head.tex temp.tex > ' doc_dir '/' filename ]); - -% if not(strcmp(latex_command,'none')) -% fprintf('*** Compiling latex document \n') -% cd(doc_dir); -% unix([latex_command ' ' filename]); -% cd('../..'); -% end - -end % iSim - - diff --git a/visualization/moc/plot_moc.m b/visualization/moc/plot_moc.m deleted file mode 100644 index b8b990356..000000000 --- a/visualization/moc/plot_moc.m +++ /dev/null @@ -1,114 +0,0 @@ -function plot_moc(dir,sectionText,mocTop,mocLat,botDepth, ... - contour_lims,var_name, fign) - -% Plot cross-sections of MPAS fields. -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% dir text string, name of simulation -% sectionText a cell array with text describing each section -% sectionID section numbers for each row of this page -% pageName name of this group of sections -% mocData(nVertLevels,nPoints,nSections,nVars) -% data in each cross-section for each variable -% depth(nVertLevels) depth of center of each layer, for plotting -% latSection(nPoints,nSections) lat coordinates of each section -% lonSection(nPoints,nSections) lon coordinates of each section -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% plotDepth(nSections) depth to which to plot each section -% contour_lims(nVars,3) contour line definition: min, max, interval -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% fid_latex file ID of latex file - -fprintf(['** sub_plot_cross_sections simulation: ' dir ... - '\n']) - -px = [.54]; -py=.53; -pw = [.87]; ph=[.83]; % width and height of plots - -% smooth once, like POP: -[nVertLevels nLat] = size(mocTop); -mocTopSmooth(:,1) = mocTop(:,1); -mocTopSmooth(:,nLat) = mocTop(:,nLat); -for j=2:nLat-1 - mocTopSmooth(:,j) = (mocTop(:,j-1) + mocTop(:,j-1) + mocTop(:,j-1))/3; -end - -figure(fign); clf - - [cout,h]=contourf(mocLat,[0; botDepth],mocTopSmooth,... - contour_lims); - set(gca,'CLim',[min(contour_lims) max(contour_lims)]) - set(h,'LineColor',[.5 .5 .5]) - cbfrac=0; - hold on - - % Text labels on countours - [cout,h]=contour(mocLat,[0; botDepth],mocTopSmooth,... - contour_lims); - ls=[200]; - clabel(cout,h,'fontsize',10,'color','k','rotation',0,'LabelSpacing',ls); - set(h,'LineColor',[.5 .5 .5]) - - % Black lines - %[cout,h]=contour(mocLat,[0; botDepth],mocTopSmooth,[-100:100:100]); - %set(h,'LineColor',[0 0 0],'LineWidth',1) - - -%contour(mocLat,[0 botDepth],mocTopSmooth,[-15:2:20]) -set(gca,'YDir','reverse') -%colorbar -grid on -xlabel('latitude') -ylabel('depth') -title([char(sectionText(1)) ', Sv, ' dir ', ' char(var_name(1))],'Interpreter','none'); - - - % stretched colorbar using contour_lims: - cmin=min(contour_lims); - cmax=max(contour_lims); -% cvalue = cmin-.5*dc:dc:cmax+.5*dc; - nc_orig = 256; - nc = length(contour_lims); - cmap_orig = ColdHot(nc_orig); - cmap_orig_short = zeros(nc-1,3); - ind=(.5:1:nc-.5); - for j=1:nc-1 - cmap_orig_short(j,:) = cmap_orig( floor((j-.5)/(nc-1)*nc_orig),:); - end - - cvalue = linspace(cmin,cmax,256); - nc_inc = length(cvalue); - - cmapnew = zeros(nc_inc,3); - for jnew=2:nc_inc - jold = max(min(min(find(contour_lims>=cvalue(jnew))),nc)-1,1); - cmapnew(jnew-1,:) = cmap_orig_short(jold,:); - end - cmapnew(nc_inc,:) = cmap_orig_short(nc-1,:); - - colormap(cmapnew) - -h=colorbar; -set(h,'YTick',contour_lims); - - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 7 3.2]) -% subplot('position',[0 .95 1 .05]); axis off -% title_txt = [regexprep(char(var_name(iVar)),'_','\\_') ', ' regexprep(dir,'_','\\_')]; -% h=text(.55,.4,title_txt); -% set(h,'HorizontalAlignment','center','FontWeight','bold','FontSize',14) -% text(.005,.7,[ date ]); - - unix(['mkdir -p f/' dir ]); - tempTxt = char(sectionText(1)); - temp=['f/' dir '/' tempTxt(1:6) 'Moc_' char(var_name(1))]; - filename = regexprep(temp,'\.','_'); - print('-djpeg',[filename '.jpg']); - print('-depsc2',[filename '.eps']); - unix(['epstopdf ' filename '.eps --outfile=' filename '.pdf']); diff --git a/visualization/moc/sub_plot_edge_sections.m b/visualization/moc/sub_plot_edge_sections.m deleted file mode 100644 index 7f63357fd..000000000 --- a/visualization/moc/sub_plot_edge_sections.m +++ /dev/null @@ -1,117 +0,0 @@ -function sub_plot_edge_sections(dir,sectionCoord, ... - latSectionVertex,lonSectionVertex, ... - latVertexDeg,lonVertexDeg, ... - sectionEdgeIndex, nEdgesInSection,... - fid_latex) - -% Plot edge section locations on world map - -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 - -%%%%%%%%%% input arguments %%%%%%%%% -% dir text string, name of simulation -% sectionCoord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% latVertexDeg(nVertices) lat arrays for all vertices -% lonVertexDeg(nVertices) lon arrays for all vertices -% sectionEdgeIndex(maxEdges,nSections) edge index of each section -% nEdgesInSection(nSections) number of edges in each section -% fid_latex file ID of latex file - -fprintf(['** sub_plot_edge_sections, on figure 1.\n']) - -nSections = size(sectionCoord,1); - -figure(1); clf - -if (min(lonVertexDeg)>-1e-8) - minLon = 0.0; - latTrans = 360; -else - minLon = -180.0; - latTrans = 0.0; -end - - % plot topo data of the earth. This is just low-rez one deg - % data for visual reference. - load('topo.mat','topo','topomap1'); - if minLon==-180 - topoNew(:,1:180) = topo(:,181:360); - topoNew(:,181:360) = topo(:,1:180); - image([-180 180],[-90 90],topoNew,'CDataMapping', 'scaled'); - else - image([0 360],[-90 90],topo,'CDataMapping', 'scaled'); - end - - colormap(topomap1); - set(gca,'YDir','normal') - - hold on - - % world - axis tight - set(gca,'XTick',30*[-10:12]) - set(gca,'YTick',15*[-20:20]) - - % half world -% axis([-240+latTrans 0+latTrans -80 70]) -% set(gca,'XTick',20*[-10:20]) -% set(gca,'YTick',10*[-20:20]) - - % N Atlantic -% axis([-90+latTrans -5+latTrans -5 70]) -% set(gca,'XTick',[-100:5:360]) -% set(gca,'YTick',[-90:5:90]) - - % Drake passage -% axis([-90+latTrans,-50+latTrans,-75,-50]) -% set(gca,'XTick',[-100:2:360]) - % set(gca,'YTick',[-200:2:200]) - - % Pacific -% axis([130 260 -10 10]) -% set(gca,'XTick',[0:1:300]) -% set(gca,'YTick',[-20:.1:20]) - - - % plot vertexs. This is just done for debugging. - plot(lonVertexDeg,latVertexDeg,'.y') - - grid on - - for iSection=1:nSections - latCoordDeg = [sectionCoord(iSection,1) sectionCoord(iSection,3)]; - lonCoordDeg = [sectionCoord(iSection,2) sectionCoord(iSection,4)]; - - h=plot([mod(lonCoordDeg,360)],[latCoordDeg],'*-'); - set(h,'Color','y','LineWidth',1) - h=plot([mod(lonCoordDeg(1),360)],[latCoordDeg(1)],'*k'); - - for i=1:nEdgesInSection(iSection) - h = line([lonSectionVertex(i,iSection) lonSectionVertex(i+1,iSection)],... - [latSectionVertex(i,iSection) latSectionVertex(i+1,iSection)]); - set(h,'Color','r','LineWidth',2) - %plot([lonVertexDeg(sectionVertexIndex(i+1,iSection))], ... - % [latVertexDeg(sectionVertexIndex(i+1,iSection))],'sk') - end - end - - ylabel('latitude') - xlabel('longitude') - title(['Domain: ' regexprep(dir,'_','\\_') ' Edges of transport sections. ']) - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 8 8]) - - subplot('position',[0 .95 1 .05]); axis off - text(.005,.7,[ date ]); - - dir_name1 = regexprep(dir,'\.','_'); - dir_name2 = regexprep(dir_name1,'/','_'); - filename=['f/' dir_name2 '_vertex_map' ]; - print('-djpeg',[filename '.jpg']); - - % put printing text in a latex file - fprintf(fid_latex,... - ['\\begin{figure}[btp] \\center \n \\includegraphics[width=7.5in]{'... - filename '.jpg} \n\\end{figure} \n']); From 460e7402896fc5e3362bcbe6235882b33ed41199 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:11:45 -0600 Subject: [PATCH 019/180] Remove visualization/ncl --- visualization/ncl/atm_cells.ncl | 145 ----------- visualization/ncl/atm_contours.ncl | 152 ------------ visualization/ncl/atm_mesh.ncl | 80 ------- visualization/ncl/atm_xsec.ncl | 373 ----------------------------- visualization/ncl/cells.ncl | 144 ----------- visualization/ncl/cells_hyd0.ncl | 159 ------------ visualization/ncl/contours.ncl | 100 -------- visualization/ncl/fill.ncl | 121 ---------- visualization/ncl/tc2_cells.ncl | 109 --------- visualization/ncl/tc2_contours.ncl | 100 -------- visualization/ncl/tracer1.ncl | 101 -------- visualization/ncl/tracer2.ncl | 101 -------- visualization/ncl/xsec.ncl | 235 ------------------ 13 files changed, 1920 deletions(-) delete mode 100644 visualization/ncl/atm_cells.ncl delete mode 100644 visualization/ncl/atm_contours.ncl delete mode 100644 visualization/ncl/atm_mesh.ncl delete mode 100644 visualization/ncl/atm_xsec.ncl delete mode 100644 visualization/ncl/cells.ncl delete mode 100644 visualization/ncl/cells_hyd0.ncl delete mode 100644 visualization/ncl/contours.ncl delete mode 100644 visualization/ncl/fill.ncl delete mode 100644 visualization/ncl/tc2_cells.ncl delete mode 100644 visualization/ncl/tc2_contours.ncl delete mode 100644 visualization/ncl/tracer1.ncl delete mode 100644 visualization/ncl/tracer2.ncl delete mode 100644 visualization/ncl/xsec.ncl diff --git a/visualization/ncl/atm_cells.ncl b/visualization/ncl/atm_cells.ncl deleted file mode 100644 index da8dd652f..000000000 --- a/visualization/ncl/atm_cells.ncl +++ /dev/null @@ -1,145 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" - -begin - - r2d = 57.2957795 ; radians to degrees - - maxedges = 8 - - wks = gsn_open_wks("pdf","atm_cells") - gsn_define_colormap(wks,"BlAqGrYeOrReVi200") - - fname = getenv("FNAME") - f = addfile(fname,"r") - - nEdgesOnCell = f->nEdgesOnCell(:) - verticesOnCell = f->verticesOnCell(:,:) - verticesOnEdge = f->verticesOnEdge(:,:) - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - - res = True - res@gsnPaperOrientation = "portrait" - - res@sfXArray = x - res@sfYArray = y - - res@cnFillOn = True - res@cnFillMode = "RasterFill" - res@cnLinesOn = False - res@cnLineLabelsOn = False - res@cnInfoLabelOn = False - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@mpProjection = "CylindricalEquidistant" -; res@mpProjection = "Orthographic" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 0. - res@mpCenterLonF = 0. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - ; - ; The purpose of this section is simply to set up a graphic ('map') - ; that uses the projection specified above, and over which we - ; can draw polygons - ; - h = f->areaCell(:) - sizes = dimsizes(h) - nCells = sizes(0) - xpoly = new((/maxedges/), "double") - ypoly = new((/maxedges/), "double") - res@cnConstFLabelOn = False - res@lbLabelBarOn = False - map = gsn_csm_contour_map(wks,h,res) - - t = stringtointeger(getenv("T")) - - ; - ; Set the field to be plotted here - ; - pres = True - h = f->qv(t,:,0) - minfld = min(h) - maxfld = max(h) - fldrange = maxfld - minfld - do iCell=0,nCells-1 - do i=0,nEdgesOnCell(iCell)-1 - xpoly(i) = lonVertex(verticesOnCell(iCell,i)-1) - ypoly(i) = latVertex(verticesOnCell(iCell,i)-1) - if (i .gt. 0) then - if (abs(xpoly(i) - xpoly(0)) .gt. 180.0) then - if (xpoly(i) .gt. xpoly(0)) then - xpoly(i) = xpoly(i) - 360.0 - else - xpoly(i) = xpoly(i) + 360.0 - end if - end if - end if - end do - pres@gsFillColor = doubletointeger(198*(h(iCell) - minfld)/fldrange+2) - gsn_polygon(wks,map,xpoly(0:nEdgesOnCell(iCell)-1),ypoly(0:nEdgesOnCell(iCell)-1),pres); - end do - - - ; - ; Draw label bar - ; - - xcb = new((/4/), "float") - ycb = new((/4/), "float") - - tres = True - tres@txAngleF = 90.0 - tres@txFontHeightF = 0.015 - do i=2,200 - xcb(0) = 0.125 + i*0.75/198 - ycb(0) = 0.11 - - xcb(1) = 0.125 + (i+1)*0.75/198 - ycb(1) = 0.11 - - xcb(2) = 0.125 + (i+1)*0.75/198 - ycb(2) = 0.16 - - xcb(3) = 0.125 + i*0.75/198 - ycb(3) = 0.16 - - tres@gsFillColor = i - - gsn_polygon_ndc(wks,xcb,ycb,tres); - - j = (i-2) % 20 - if ((j .eq. 0) .or. (i .eq. 200)) then - ff = minfld + int2flt(i-2) * fldrange / 198.0 - label = sprintf("%5.3g", ff) - gsn_text_ndc(wks, label, xcb(0), 0.060, tres) - end if - - end do - - mres = True - mres@mpCenterLatF = 0. - mres@mpCenterLonF = 0. - mres@mpGridAndLimbOn = False - mres@mpOutlineOn = True - mres@mpFillOn = False - mres@mpPerimOn = False - mres@gsnFrame = False - mapo = gsn_csm_map(wks,mres) - - frame(wks) - -end - diff --git a/visualization/ncl/atm_contours.ncl b/visualization/ncl/atm_contours.ncl deleted file mode 100644 index 856837f68..000000000 --- a/visualization/ncl/atm_contours.ncl +++ /dev/null @@ -1,152 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - - ; - ; Which field to plot - ; - plotfield = "h" -; plotfield = "ke" -; plotfield = "vorticity" - - ; - ; Whether to plot wind vectors - ; -; winds = True - winds = False - - ; - ; Whether to do color-filled plot (filled=True) or - ; to plot contours of height field (filled=False) - ; -; filled = True - filled = False - - ; - ; The (lat,lon) the plot is to be centered over - ; - cenLat = 0.0 - cenLon = 0.0 - - ; - ; Projection to use for plot - ; -; projection = "Orthographic" - projection = "CylindricalEquidistant" - - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - - wks = gsn_open_wks("pdf","atm_contours") - gsn_define_colormap(wks,"gui_default") - - fname = getenv("FNAME") - f = addfile(fname,"r") - - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - if (plotfield .eq. "h" .or. plotfield .eq. "ke") then - res@sfXArray = lonCell - res@sfYArray = latCell - end if - if (plotfield .eq. "vorticity") then - res@sfXArray = lonVertex - res@sfYArray = latVertex - end if - - res@cnFillMode = "AreaFill" - - if (filled) then - res@cnFillOn = True - res@cnLinesOn = False - res@cnLineLabelsOn = False - else - res@cnFillOn = False - res@cnLinesOn = True - res@cnLineLabelsOn = True - end if - -; res@cnLevelSpacingF = 50.0 - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@mpProjection = projection - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = cenLat - res@mpCenterLonF = cenLon - res@mpGridAndLimbOn = True - res@mpGridAndLimbDrawOrder = "PreDraw" - res@mpGridLineColor = "Background" - res@mpOutlineOn = True - res@mpDataBaseVersion = "Ncarg4_1" - res@mpDataSetName = "Earth..3" - res@mpOutlineBoundarySets = "Geophysical" - res@mpFillOn = False - res@mpPerimOn = True - res@gsnFrame = False - res@cnLineThicknessF = 2.0 - res@cnLineColor = "NavyBlue" - - t = stringtointeger(getenv("T")) - if (plotfield .eq. "h") then -; fld = f->xice(t,:) -; fld = f->sst(t,:) -; fld = f->surface_pressure(t,:) -; fld = f->pressure_base(t,:,25) + f->pressure_p(t,:,25) - fld = f->theta(t,:,25) - end if - if (plotfield .eq. "ke") then - fld = f->ke(t,:,0) - end if - if (plotfield .eq. "vorticity") then - fld = f->vorticity(t,:,0) - end if - res@cnLineDashPattern = 0 - map = gsn_csm_contour_map(wks,fld,res) - - if (winds) then - u = f->u(t,:,0) - v = f->v(t,:,0) - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - lat_edge = new(dimsizes(u),float) - lon_edge = new(dimsizes(u),float) - do i=0,esizes(0)-1 - u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) - v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - lat_edge(i) = doubletofloat(latEdge(i)) - lon_edge(i) = doubletofloat(lonEdge(i)) - end do - - wmsetp("VCH",0.0010) - wmsetp("VRN",0.010) - wmsetp("VRS",100.0) - wmsetp("VCW",0.10) - - wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - end if - - frame(wks) - -end - diff --git a/visualization/ncl/atm_mesh.ncl b/visualization/ncl/atm_mesh.ncl deleted file mode 100644 index e68aaba8d..000000000 --- a/visualization/ncl/atm_mesh.ncl +++ /dev/null @@ -1,80 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - - r2d = 57.2957795 ; radians to degrees - - wks = gsn_open_wks("pdf","atm_mesh") - - colors = (/"white","black","lightskyblue1","lightskyblue1","bisque"/) -; colors = (/"white","black","white","white","grey90"/) - gsn_define_colormap(wks,colors) - - fname = getenv("FNAME") - f = addfile(fname,"r") - - xVertex = f->xVertex(:) - yVertex = f->yVertex(:) - zVertex = f->zVertex(:) - verticesOnCell = f->verticesOnCell(:,:) - verticesOnEdge = f->verticesOnEdge(:,:) - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - - res = True - res@gsnMaximize = True - - res@mpProjection = "Orthographic" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 50. - res@mpCenterLonF = -100. - res@mpCenterRotF = -100. - res@mpGridAndLimbOn = False - res@mpOutlineOn = True - res@mpFillOn = True - res@mpPerimOn = False - res@gsnFrame = False - res@mpOceanFillColor = 3 - res@mpInlandWaterFillColor = 3 - res@mpLandFillColor = 4 - - map = gsn_csm_map(wks,res) - - lres = True - lres@gsLineThicknessF = 0.10 - - esizes = dimsizes(latEdge) - ecx = new((/esizes(0),2/),double) - ecy = new((/esizes(0),2/),double) - do j=0,esizes(0)-1 - ecy(j,0) = latVertex(verticesOnEdge(j,0)-1) - ecx(j,0) = lonVertex(verticesOnEdge(j,0)-1) - ecy(j,1) = latVertex(verticesOnEdge(j,1)-1) - ecx(j,1) = lonVertex(verticesOnEdge(j,1)-1) - end do - - do j=0,esizes(0)-1 - if (abs(ecx(j,0) - ecx(j,1)) .gt. 180.0) then - if (ecx(j,0) .gt. ecx(j,1)) then - ecx(j,0) = ecx(j,0) - 360.0 - else - ecx(j,1) = ecx(j,1) - 360.0 - end if - end if - end do - - do j=0,esizes(0)-1 - gsn_polyline(wks,map,ecx(j,:),ecy(j,:),lres) - end do - - frame(wks) - -end - diff --git a/visualization/ncl/atm_xsec.ncl b/visualization/ncl/atm_xsec.ncl deleted file mode 100644 index e90fcc65e..000000000 --- a/visualization/ncl/atm_xsec.ncl +++ /dev/null @@ -1,373 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - pi = 3.14159265 - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - ; - ; Which field to plot - ; -; plotfield = "w" - plotfield = "theta" -; plotfield = "ke" -; plotfield = "vorticity" - - - ; - ; Whether to plot horizontal wind vectors - ; -; horiz_winds = True - horiz_winds = False - - ; - ; Whether to do color-filled plot (filled=True) or - ; to plot contours of height field (filled=False) - ; - filled = True -; filled = False - - ; - ; Starting and ending locations (in degrees) - ; Exercise caution when setting these: setting start_lon=90.0 and end_lon=-90.0 - ; would create a cross-section including the prime meridian, whereas setting - ; start_lon=90.0 and end_lon=270.0 would create a cross-section containing - ; the date line, for example. - ; - ; - start_lat = 40.0 - start_lon = -140.0 - end_lat = 40.0 - end_lon = -80.0 - - ; - ; The number of points along the cross section - ; - nsec = 250 - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - - wks = gsn_open_wks("pdf","atm_xsec") - gsn_define_colormap(wks,"BlAqGrYeOrReVi200") - - fname = getenv("FNAME") - f = addfile(fname,"r") - - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - xCell = f->xCell(:) - yCell = f->yCell(:) - zCell = f->zCell(:) - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - xVertex = f->xVertex(:) - yVertex = f->yVertex(:) - zVertex = f->zVertex(:) - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - xEdge = f->xEdge(:) - yEdge = f->yEdge(:) - zEdge = f->zEdge(:) - zgrid = f->zgrid(:,:) / 1000.0 - verticesOnCell = f->verticesOnCell(:,:) - edgesOnCell = f->edgesOnCell(:,:) - nCellsOnCell = f->nEdgesOnCell(:) - cellsOnCell = f->cellsOnCell(:,:) - alpha = f->angleEdge(:) - - dims = dimsizes(latCell) - nCells = dims(0) - - start_lat = start_lat / r2d - start_lon = start_lon / r2d - end_lat = end_lat / r2d - end_lon = end_lon / r2d - - radius = 6371220.0 - xsec_latitude = start_lat - xsec_longitude = start_lon - xsec_lat_inc = (end_lat - start_lat) / (int2flt(nsec) - 1.0) - xsec_lon_inc = (end_lon - start_lon) / (int2flt(nsec) - 1.0) - - xsecx = new((/nsec/),float) - xsecy = new((/nsec/),float) - xsecz = new((/nsec/),float) - xsec_cell_id = new((/nsec/),integer) - xsec_edge_id = new((/nsec/),integer) - xsec_vtx_id = new((/nsec/),integer) - xsec_id = new((/nsec/),integer) - - ; Compute (x,y,z) coordinates for points on cross section - do i=0,nsec-1 - xsecx(i) = radius * cos(xsec_longitude) * cos(xsec_latitude) - xsecy(i) = radius * sin(xsec_longitude) * cos(xsec_latitude) - xsecz(i) = radius * sin(xsec_latitude) - xsec_latitude = xsec_latitude + xsec_lat_inc - xsec_longitude = xsec_longitude + xsec_lon_inc - end do - - ; Find cell containing first cross section point - dmin = 2.0 * radius - cellmin = -1 - do i=0,nCells-1 - d = sqrt((xCell(i) - xsecx(0))^2.0 + (yCell(i) - xsecy(0))^2.0 + (zCell(i) - xsecz(0))^2.0) - if (d .lt. dmin) then - cellmin = i - dmin = doubletofloat(d) - end if - end do - xsec_cell_id(0) = cellmin - - ; For the remaining cross section points, find the grid cell containing them - do j=1,nsec-1 - moved = 1 - do while (moved .ne. 0) - moved = 0 - d = sqrt((xCell(cellmin) - xsecx(j))^2.0 + (yCell(cellmin) - xsecy(j))^2.0 + (zCell(cellmin) - xsecz(j))^2.0) - do k=0,nCellsOnCell(cellmin)-1 - dn = sqrt((xCell(cellsOnCell(cellmin,k)-1) - xsecx(j))^2.0 + (yCell(cellsOnCell(cellmin,k)-1) - xsecy(j))^2.0 + (zCell(cellsOnCell(cellmin,k)-1) - xsecz(j))^2.0) - if (dn .lt. d) then - d = dn - nearest = (/cellsOnCell(cellmin,k)/)-1 - moved = 1 - end if - end do - if (moved .eq. 1) then - cellmin = nearest - end if - end do - xsec_cell_id(j) = cellmin - end do - - ; For all cross section points, find the nearest vertex and edge - do i=0,nsec-1 - iVtx = verticesOnCell(xsec_cell_id(i),0) - 1 - iEdge = edgesOnCell(xsec_cell_id(i),0) - 1 - xsec_edge_id(i) = iEdge - xsec_vtx_id(i) = iVtx - de = sqrt((xEdge(iEdge) - xsecx(i))^2.0 + (yEdge(iEdge) - xsecy(i))^2.0 + (zEdge(iEdge) - xsecz(i))^2.0) - dv = sqrt((xVertex(iVtx) - xsecx(i))^2.0 + (yVertex(iVtx) - xsecy(i))^2.0 + (zVertex(iVtx) - xsecz(i))^2.0) - do j=1,nCellsOnCell(xsec_cell_id(i))-1 - iVtx = verticesOnCell(xsec_cell_id(i),j) - 1 - iEdge = edgesOnCell(xsec_cell_id(i),j) - 1 - de_test = sqrt((xEdge(iEdge) - xsecx(i))^2.0 + (yEdge(iEdge) - xsecy(i))^2.0 + (zEdge(iEdge) - xsecz(i))^2.0) - dv_test = sqrt((xVertex(iVtx) - xsecx(i))^2.0 + (yVertex(iVtx) - xsecy(i))^2.0 + (zVertex(iVtx) - xsecz(i))^2.0) - if (de_test .lt. de) then - de = de_test - xsec_edge_id(i) = iEdge - end if - if (dv_test .lt. dv) then - dv = dv_test - xsec_vtx_id(i) = iVtx - end if - end do - end do - - ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - ; At this point, xsec_cell_id(:), xsec_edge_id(:), and xsec_vtx_id(:) contains the cell, edge, - ; and vertex IDs of the nearest points to those along the cross section - ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - res = True - res@gsnMaximize = False - res@gsnSpreadColors = True - - res@cnFillMode = "AreaFill" - - if (filled) then - res@cnFillOn = True - res@cnLinesOn = False - res@cnLineLabelsOn = False - else - res@cnFillOn = False - res@cnLinesOn = True - res@cnLineLabelsOn = True - end if - - res@cnLevelSpacingF = 0.01 - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@gsnFrame = False - - - ; - ; Select field to be plotted, and set generic array xsec_id(:) to contain IDs of - ; locations (cell, edge, or vertex) in that field containg cross section points - ; - - t = stringtointeger(getenv("T")) - if (plotfield .eq. "w") then - fld1 = f->w(t,:,:) - ldims = dimsizes(fld1) - fld = new((/ldims(0),ldims(1)-1/),"double") - ; Average w to center of layers - do i=0,ldims(0)-1 - do j=0,ldims(1)-2 - fld(i,j) = 0.5*(fld1(i,j)+fld1(i,j+1)) - end do - end do - nVertLevels = ldims(1) - nVertLevels = nVertLevels-1 - xsec_id(:) = xsec_cell_id(:) - end if - if (plotfield .eq. "theta") then - fld = f->theta(t,:,:) - ldims = dimsizes(fld) - nVertLevels = ldims(1) - xsec_id(:) = xsec_cell_id(:) - end if - if (plotfield .eq. "ke") then - fld = f->ke(t,:,:) - ldims = dimsizes(fld) - nVertLevels = ldims(1) - xsec_id(:) = xsec_cell_id(:) - end if - if (plotfield .eq. "vorticity") then - fld = f->vorticity(t,:,:) - ldims = dimsizes(fld) - nVertLevels = ldims(1) - xsec_id(:) = xsec_vtx_id(:) - end if - res@cnLineDashPattern = 0 - - height1 = new((/nVertLevels+1,nsec/),float) - height = new((/nVertLevels+1,nsec+1/),float) - x = new((/nVertLevels+1,nsec+1/),float) - - ; Extract field from along cross section into plotting array - arr = new((/nVertLevels,nsec/),float) - do i=0,nsec-1 - do j=0,nVertLevels-1 -; arr(j,i) = 0.5*doubletofloat(fld(xsec_id(i),j)+fld(xsec_id(i),j+1)) - arr(j,i) = doubletofloat(fld(xsec_id(i),j)) - height1(j,i) = doubletofloat(zgrid(xsec_id(i),j)) - end do - j = nVertLevels - height1(j,i) = doubletofloat(zgrid(xsec_id(i),j)) - end do - - do j=0,nVertLevels - x(j,nsec) = int2flt(nsec) + 0.5 - x(j,0) = 0.5 - height(j,0) = height1(j,0) - height(j,nsec) = height1(j,nsec-1) - end do - - do i=1,nsec-1 - do j=0,nVertLevels - height(j,i) = 0.5*(height1(j,i) + height1(j,i-1)) - x(j,i) = int2flt(i) + 0.5 - end do - end do - - xpoly = new((/5/), "float") - ypoly = new((/5/), "float") - - minfld = min(arr) - maxfld = max(arr) - fldrange = maxfld - minfld - - res@trYMinF = min(zgrid) - res@trYMaxF = max(zgrid) - res@trXMinF = int2flt(0) - res@trXMaxF = int2flt(nsec+1) - - res@tiYAxisString = "z(km)" - res@tiYAxisFontHeightF = 0.017 - res@tiXAxisString = "cell" - res@tiXAxisFontHeightF = 0.017 - - map = gsn_csm_xy(wks,x,height,res) - - do i=0,nsec-1 - do j=0,nVertLevels-1 - xpoly(0) = x(j,i) - xpoly(1) = x(j,i+1) - xpoly(2) = x(j+1,i+1) - xpoly(3) = x(j+1,i) - xpoly(4) = x(j,i) - - ypoly(0) = height(j,i) - ypoly(1) = height(j,i+1) - ypoly(2) = height(j+1,i+1) - ypoly(3) = height(j+1,i) - ypoly(4) = height(j,i) - - res@gsFillColor = doubletointeger(195*(arr(j,i) - minfld)/fldrange+2) - gsn_polygon(wks,map,xpoly,ypoly,res); - end do - end do - - if (horiz_winds) then - u = f->u(t,:,:) - v = f->v(t,:,:) - esizes = dimsizes(u) - nVertLevels = esizes(1) - u_earth = new((/nVertLevels,nsec/),float) - v_earth = new((/nVertLevels,nsec/),float) - x_edge = new((/nVertLevels,nsec/),float) - y_edge = new((/nVertLevels,nsec/),float) - do i=0,nsec-1 - do j=0,nVertLevels-1 - u_earth(j,i) = doubletofloat(u(xsec_edge_id(i),j)*cos(alpha(xsec_edge_id(i))) - v(xsec_edge_id(i),j)*sin(alpha(xsec_edge_id(i)))) - v_earth(j,i) = doubletofloat(u(xsec_edge_id(i),j)*sin(alpha(xsec_edge_id(i))) + v(xsec_edge_id(i),j)*cos(alpha(xsec_edge_id(i)))) - x_edge(j,i) = i - y_edge(j,i) = j - end do - end do - - wmsetp("VCH",0.0010) - wmsetp("VRN",0.010) - wmsetp("VRS",50.0) - wmsetp("VCW",0.10) - - wmvect(wks, x_edge, y_edge, u_earth, v_earth) - end if - - ; - ; Draw label bar - ; - - xcb = new((/4/), "float") - ycb = new((/4/), "float") - - tres = True - tres@txAngleF = 90.0 - tres@txFontHeightF = 0.013 - do i=2,200 - xcb(0) = 0.125 + i*0.75/198 - ycb(0) = 0.08 - - xcb(1) = 0.125 + (i+1)*0.75/198 - ycb(1) = 0.08 - - xcb(2) = 0.125 + (i+1)*0.75/198 - ycb(2) = 0.10 - - xcb(3) = 0.125 + i*0.75/198 - ycb(3) = 0.10 - - tres@gsFillColor = i - - gsn_polygon_ndc(wks,xcb,ycb,tres); - - j = (i-2) % 20 - if ((j .eq. 0) .or. (i .eq. 200)) then - ff = minfld + int2flt(i-2) * fldrange / 198.0 - label = sprintf("%8.3g", ff) - gsn_text_ndc(wks, label, xcb(0), 0.050, tres) - end if - - end do - - frame(wks) - -end - diff --git a/visualization/ncl/cells.ncl b/visualization/ncl/cells.ncl deleted file mode 100644 index 94f55be35..000000000 --- a/visualization/ncl/cells.ncl +++ /dev/null @@ -1,144 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - - ; - ; Which field to plot - ; - plotfield = "h" -; plotfield = "ke" -; plotfield = "vorticity" - - ; - ; Whether to plot wind vectors - ; -; winds = True - winds = False - - ; - ; Whether to do color-filled plot (filled=True) or - ; to plot contours of height field (filled=False) - ; -; filled = True - filled = False - - ; - ; The (lat,lon) the plot is to be centered over - ; - cenLat = 0.0 - cenLon = 0.0 - - ; - ; Projection to use for plot - ; -; projection = "Orthographic" - projection = "CylindricalEquidistant" - - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - if (plotfield .eq. "h" .or. plotfield .eq. "ke") then - res@sfXArray = lonCell - res@sfYArray = latCell - end if - if (plotfield .eq. "vorticity") then - res@sfXArray = lonVertex - res@sfYArray = latVertex - end if - - res@cnFillMode = "AreaFill" - - if (filled) then - res@cnFillOn = True - res@cnLinesOn = False - res@cnLineLabelsOn = False - else - res@cnFillOn = False - res@cnLinesOn = True - res@cnLineLabelsOn = True - end if - - res@cnLevelSpacingF = 50.0 - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@mpProjection = projection - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = cenLat - res@mpCenterLonF = cenLon - res@mpGridAndLimbOn = True - res@mpGridAndLimbDrawOrder = "PreDraw" - res@mpGridLineColor = "Background" - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - if (plotfield .eq. "h") then - h = f->h(t,:,0) - hs = f->h_s(:) - fld = h + hs - end if - if (plotfield .eq. "ke") then - fld = f->ke(t,:,0) - end if - if (plotfield .eq. "vorticity") then - fld = f->vorticity(t,:,0) - end if - res@cnLineDashPattern = 0 - map = gsn_csm_contour_map(wks,fld,res) - - if (winds) then - u = f->u(t,:,0) - v = f->v(t,:,0) - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - lat_edge = new(dimsizes(u),float) - lon_edge = new(dimsizes(u),float) - do i=0,esizes(0)-1 - u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) - v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - lat_edge(i) = doubletofloat(latEdge(i)) - lon_edge(i) = doubletofloat(lonEdge(i)) - end do - - wmsetp("VCH",0.0010) - wmsetp("VRN",0.010) - wmsetp("VRS",100.0) - wmsetp("VCW",0.10) - - wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - end if - - frame(wks) - -end - diff --git a/visualization/ncl/cells_hyd0.ncl b/visualization/ncl/cells_hyd0.ncl deleted file mode 100644 index 090711cbf..000000000 --- a/visualization/ncl/cells_hyd0.ncl +++ /dev/null @@ -1,159 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - - ; - ; Which field to plot - ; - plotfield = "h" -; plotfield = "ke" -; plotfield = "vorticity" - - ; - ; Whether to plot wind vectors - ; -; winds = True - winds = False - - ; - ; Whether to do color-filled plot (filled=True) or - ; to plot contours of height field (filled=False) - ; - filled = True -; filled = False - - ; - ; The (lat,lon) the plot is to be centered over - ; - cenLat = 0.0 - cenLon = 180.0 - - ; - ; Projection to use for plot - ; -; projection = "Orthographic" - projection = "CylindricalEquidistant" - - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - -; wks_type = "pdf" -; wks_type@wkOrientation = "landscape" -; wks = gsn_open_wks(wks_type,"cells") - - wks = gsn_open_wks("pdf","cells") -; wks = gsn_open_wks("x11","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - if (plotfield .eq. "h" .or. plotfield .eq. "ke") then - res@sfXArray = lonCell - res@sfYArray = latCell - end if - if (plotfield .eq. "vorticity") then - res@sfXArray = lonVertex - res@sfYArray = latVertex - end if - - res@cnFillMode = "AreaFill" - - if (filled) then - res@cnFillOn = True -; res@cnLinesOn = False -; res@cnRasterModeOn = True - res@cnLinesOn = True - res@cnLineLabelsOn = False - else - res@cnFillOn = False - res@cnLinesOn = True - res@cnLineLabelsOn = True - end if - -; res@cnLevelSpacingF = 10.0 - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@mpProjection = projection - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = cenLat - res@mpCenterLonF = cenLon - res@mpGridAndLimbOn = True - res@mpGridAndLimbDrawOrder = "PreDraw" - res@mpGridLineColor = "Background" - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - if (plotfield .eq. "h") then -; h = f->h(t,:,0) -; hs = f->h_s(:) -; fld = h + hs -; h = f->ww(t,:,5) -; h = (f->surface_pressure(t,:) + 219.4)/100. -; h = f->geopotential(t,:,18) -; h = f->theta(t,:,25)-f->theta(0,:,25) -; h = f->theta(t,:,18) - h = f->surface_pressure(t,:)/100. -; h = (f->surface_pressure(t,:)-f->surface_pressure(0,:))/100. - fld = h - end if - if (plotfield .eq. "ke") then - fld = f->ke(t,:,18) - end if - if (plotfield .eq. "vorticity") then - fld = f->vorticity(t,:,0) - end if - res@cnLineDashPattern = 0 - map = gsn_csm_contour_map(wks,fld,res) - - if (winds) then - u = f->u(t,:,25) - f->u(0,:,25) - v = f->v(t,:,25) - f->v(0,:,25) - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - lat_edge = new(dimsizes(u),float) - lon_edge = new(dimsizes(u),float) - do i=0,esizes(0)-1 - u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) - v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - lat_edge(i) = doubletofloat(latEdge(i)) - lon_edge(i) = doubletofloat(lonEdge(i)) - end do - - wmsetp("VCH",0.0010) - wmsetp("VRN",0.010) - wmsetp("VRS",100.0) - wmsetp("VCW",0.10) - - wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - end if - - frame(wks) - -end - diff --git a/visualization/ncl/contours.ncl b/visualization/ncl/contours.ncl deleted file mode 100644 index 3f6625509..000000000 --- a/visualization/ncl/contours.ncl +++ /dev/null @@ -1,100 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - cellArea = f->areaCell(:) - - sizes = dimsizes(verticesOnCell) - - cx = new((/sizes(0),maxedges/),double) - cy = new((/sizes(0),maxedges/),double) - - do j=0,sizes(0)-1 - do i=0,maxedges-2 - cy(j,i) = latVertex(verticesOnCell(j,i)-1) - cx(j,i) = lonVertex(verticesOnCell(j,i)-1) - end do - cx(j,maxedges-1) = cx(j,0) - cy(j,maxedges-1) = cy(j,0) - end do - - res = True -; res@gsnMaximize = True - res@gsnSpreadColors = True - - res@sfXArray = x - res@sfYArray = y - res@sfXCellBounds = cx - res@sfYCellBounds = cy - - res@cnLinesOn = True - res@cnLineLabelsOn = False - res@cnLevelSelectionMode = "AutomaticLevels" - res@cnLevelSpacingF = 100. - res@cnMonoLineDashPattern = True - res@cnLineDashPattern = 2 - res@cnInfoLabelOn = False - - res@lbBoxLinesOn = False - - res@mpProjection = "Orthographic" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 0. - res@mpCenterLonF = 270. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpPerimOn = True - res@mpAreaMaskingOn = True - res@mpMaskAreaSpecifiers = "Land" - res@gsnFrame = False - - h0 = f->h(0,:,0) - print("Min/max of initial height field: "+min(h0)+", "+max(h0)) - map0 = gsn_csm_contour_map(wks,h0,res) - - t = stringtointeger(getenv("T")) - res@cnLineDashPattern = 0 - h = f->h(t,:,0) - print("Min/max of height field after one rotation: "+min(h)+", "+max(h)) - map1 = gsn_csm_contour_map(wks,h,res) - tres = False - - frame(wks) - - hdiff = h - h0 - print("Min/max error after one rotation: "+min(hdiff)+", "+max(hdiff)) - map1 = gsn_csm_contour_map(wks,hdiff,res) - - frame(wks) - - cellsizes = dimsizes(h) - intgrl = 0.0D - do i=0,cellsizes(0)-1 - intgrl = intgrl + h0(i)*cellArea(i) - end do - print("Original integrated volume: "+intgrl) - intgrl = 0.0D - do i=0,cellsizes(0)-1 - intgrl = intgrl + h(i)*cellArea(i) - end do - print("Integrated volume after one rotation: "+intgrl) - -end - diff --git a/visualization/ncl/fill.ncl b/visualization/ncl/fill.ncl deleted file mode 100644 index f7f07daa6..000000000 --- a/visualization/ncl/fill.ncl +++ /dev/null @@ -1,121 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - - maxedges = 8 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - sizes = dimsizes(verticesOnCell) - - cx = new((/sizes(0),maxedges/),double) - cy = new((/sizes(0),maxedges/),double) - - do j=0,sizes(0)-1 - do i=0,maxedges-2 - cy(j,i) = latVertex(verticesOnCell(j,i)-1) - cx(j,i) = lonVertex(verticesOnCell(j,i)-1) - end do - cx(j,maxedges-1) = cx(j,0) - cy(j,maxedges-1) = cy(j,0) - end do - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - res@sfXArray = x - res@sfYArray = y - res@sfXCellBounds = cx - res@sfYCellBounds = cy - - res@cnFillMode = "RasterFill" - - res@cnFillOn = True - res@cnLinesOn = False - res@cnLineLabelsOn = False - -; res@cnFillOn = False -; res@cnLinesOn = True -; res@cnLineLabelsOn = True - -; res@cnMaxLevelCount = 22 -; res@cnLevelSelectionMode = "ManualLevels" -; res@cnMinLevelValF = -100. -; res@cnMaxLevelValF = 1000. - res@cnLevelSpacingF = 50. - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - -; res@mpProjection = "Satellite" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 0. - res@mpCenterLonF = 0. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - u = f->u(t,:,0) - v = f->v(t,:,0) - h = f->h(t,:,0) - hs = f->h_s(:) -; hsizes = dimsizes(h) -; h_total = new((/hsizes(1)/),double) -; h_total = 0.0 -; do i=1,hsizes(1) -; h_total(:) = h_total(:) + h(:) -; end do -; h_total(:) = h_total(:) + hs(:) - h(:) = h(:) + hs(:) - res@cnLineDashPattern = 0 - map = gsn_csm_contour_map(wks,h,res) - -; res@cnLineDashPattern = 11 -; map = gsn_csm_contour_map(wks,hs,res) - frame(wks) - -; esizes = dimsizes(u) -; u_earth = new(dimsizes(u),float) -; v_earth = new(dimsizes(u),float) -; lat_edge = new(dimsizes(u),float) -; lon_edge = new(dimsizes(u),float) -; do i=0,esizes(0)-1 -; j = i % 20 -; if (j .eq. 0) then -; u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) -; v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) -; else -; u_earth(i) = -999. -; v_earth(i) = -999. -; end if -; lat_edge(i) = doubletofloat(latEdge(i)) -; lon_edge(i) = doubletofloat(lonEdge(i)) -; end do - -; wmsetp("VCH",0.003) -; wmsetp("VRN",0.010) -; wmsetp("VRS",20.0) -; wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - -; frame(wks) -end - diff --git a/visualization/ncl/tc2_cells.ncl b/visualization/ncl/tc2_cells.ncl deleted file mode 100644 index 74b7118fc..000000000 --- a/visualization/ncl/tc2_cells.ncl +++ /dev/null @@ -1,109 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - - wks = gsn_open_wks("x11","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - sizes = dimsizes(verticesOnCell) - - cx = new((/sizes(0),maxedges/),double) - cy = new((/sizes(0),maxedges/),double) - - do j=0,sizes(0)-1 - do i=0,maxedges-2 - cy(j,i) = latVertex(verticesOnCell(j,i)-1) - cx(j,i) = lonVertex(verticesOnCell(j,i)-1) - end do - cx(j,maxedges-1) = cx(j,0) - cy(j,maxedges-1) = cy(j,0) - end do - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - res@sfXArray = x - res@sfYArray = y - res@sfXCellBounds = cx - res@sfYCellBounds = cy - - res@cnFillOn = True - res@cnFillMode = "RasterFill" - res@cnLinesOn = False - res@cnLineLabelsOn = False -; res@cnMaxLevelCount = 22 - res@cnLevelSelectionMode = "ManualLevels" - res@cnMinLevelValF = 1000. - res@cnMaxLevelValF = 3000. - res@cnLevelSpacingF = 100. - res@cnInfoLabelOn = False - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@mpProjection = "Stereographic" - res@mpLimitMode = "LatLon" - res@mpMinLonF = 0.0 - res@mpMaxLonF = 360.0 - res@mpMinLatF = 0.0 - res@mpMaxLatF = 90.0 - - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 90. - res@mpCenterLonF = 270. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - u = f->u(t,:,0) - v = f->v(t,:,0) - h = f->h(t,:,0) - map = gsn_csm_contour_map(wks,h,res) - - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - lat_edge = new(dimsizes(u),float) - lon_edge = new(dimsizes(u),float) - do i=0,esizes(0)-1 - j = i % 20 - if (j .eq. 0) then -; u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) -; v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - u_earth(i) = doubletofloat(u(i)) - v_earth(i) = doubletofloat(v(i)) - else - u_earth(i) = -999. - v_earth(i) = -999. - end if - lat_edge(i) = doubletofloat(latEdge(i)) - lon_edge(i) = doubletofloat(lonEdge(i)) - end do - - wmsetp("VCH",0.003) - wmsetp("VRN",0.010) - wmsetp("VRS",20.0) - wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - - frame(wks) -end - diff --git a/visualization/ncl/tc2_contours.ncl b/visualization/ncl/tc2_contours.ncl deleted file mode 100644 index 6b4eef125..000000000 --- a/visualization/ncl/tc2_contours.ncl +++ /dev/null @@ -1,100 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - - maxedges = 8 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - cellArea = f->areaCell(:) - - sizes = dimsizes(verticesOnCell) - - cx = new((/sizes(0),maxedges/),double) - cy = new((/sizes(0),maxedges/),double) - - do j=0,sizes(0)-1 - do i=0,maxedges-2 - cy(j,i) = latVertex(verticesOnCell(j,i)-1) - cx(j,i) = lonVertex(verticesOnCell(j,i)-1) - end do - cx(j,maxedges-1) = cx(j,0) - cy(j,maxedges-1) = cy(j,0) - end do - - res = True -; res@gsnMaximize = True - res@gsnSpreadColors = True - - res@sfXArray = x - res@sfYArray = y - res@sfXCellBounds = cx - res@sfYCellBounds = cy - - res@cnLinesOn = True - res@cnLineLabelsOn = False - res@cnLevelSelectionMode = "AutomaticLevels" - res@cnLevelSpacingF = 100. - res@cnMonoLineDashPattern = True - res@cnLineDashPattern = 2 - res@cnInfoLabelOn = False - - res@lbBoxLinesOn = False - - res@mpProjection = "Stereographic" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 90. - res@mpCenterLonF = 270. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpPerimOn = True - res@mpAreaMaskingOn = True - res@mpMaskAreaSpecifiers = "Land" - res@gsnFrame = False - - h0 = f->h(0,:,0) - print("Min/max of initial height field: "+min(h0)+", "+max(h0)) - map0 = gsn_csm_contour_map(wks,h0,res) - - t = stringtointeger(getenv("T")) - res@cnLineDashPattern = 0 - h = f->h(t,:,0) - print("Min/max of height field after one rotation: "+min(h)+", "+max(h)) - map1 = gsn_csm_contour_map(wks,h,res) - tres = False - - frame(wks) - - hdiff = h - h0 - print("Min/max error after one rotation: "+min(hdiff)+", "+max(hdiff)) - map1 = gsn_csm_contour_map(wks,hdiff,res) - - frame(wks) - - cellsizes = dimsizes(h) - intgrl = 0.0D - do i=0,cellsizes(0)-1 - intgrl = intgrl + h0(i)*cellArea(i) - end do - print("Original integrated volume: "+intgrl) - intgrl = 0.0D - do i=0,cellsizes(0)-1 - intgrl = intgrl + h(i)*cellArea(i) - end do - print("Integrated volume after one rotation: "+intgrl) - -end - diff --git a/visualization/ncl/tracer1.ncl b/visualization/ncl/tracer1.ncl deleted file mode 100644 index 63b018ed3..000000000 --- a/visualization/ncl/tracer1.ncl +++ /dev/null @@ -1,101 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - sizes = dimsizes(verticesOnCell) - - cx = new((/sizes(0),maxedges/),double) - cy = new((/sizes(0),maxedges/),double) - - do j=0,sizes(0)-1 - do i=0,maxedges-2 - cy(j,i) = latVertex(verticesOnCell(j,i)-1) - cx(j,i) = lonVertex(verticesOnCell(j,i)-1) - end do - cx(j,maxedges-1) = cx(j,0) - cy(j,maxedges-1) = cy(j,0) - end do - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - res@sfXArray = x - res@sfYArray = y - res@sfXCellBounds = cx - res@sfYCellBounds = cy - - res@cnFillOn = True - res@cnFillMode = "RasterFill" - res@cnLinesOn = False - res@cnLineLabelsOn = False -; res@cnMaxLevelCount = 22 -; res@cnLevelSelectionMode = "ManualLevels" -; res@cnMinLevelValF = -100. -; res@cnMaxLevelValF = 1000. - res@cnLevelSpacingF = 50. - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - -; res@mpProjection = "Satellite" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 0. - res@mpCenterLonF = 0. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - u = f->u(t,:,0) - v = f->v(t,:,0) - h = f->tracers(t,:,0,0) - map = gsn_csm_contour_map(wks,h,res) - - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - lat_edge = new(dimsizes(u),float) - lon_edge = new(dimsizes(u),float) - do i=0,esizes(0)-1 - j = i % 20 - if (j .eq. 0) then - u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) - v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - else - u_earth(i) = -999. - v_earth(i) = -999. - end if - lat_edge(i) = doubletofloat(latEdge(i)) - lon_edge(i) = doubletofloat(lonEdge(i)) - end do - - wmsetp("VCH",0.003) - wmsetp("VRN",0.010) - wmsetp("VRS",20.0) -; wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - - frame(wks) -end - diff --git a/visualization/ncl/tracer2.ncl b/visualization/ncl/tracer2.ncl deleted file mode 100644 index 4a98ccc3f..000000000 --- a/visualization/ncl/tracer2.ncl +++ /dev/null @@ -1,101 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - sizes = dimsizes(verticesOnCell) - - cx = new((/sizes(0),maxedges/),double) - cy = new((/sizes(0),maxedges/),double) - - do j=0,sizes(0)-1 - do i=0,maxedges-2 - cy(j,i) = latVertex(verticesOnCell(j,i)-1) - cx(j,i) = lonVertex(verticesOnCell(j,i)-1) - end do - cx(j,maxedges-1) = cx(j,0) - cy(j,maxedges-1) = cy(j,0) - end do - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - res@sfXArray = x - res@sfYArray = y - res@sfXCellBounds = cx - res@sfYCellBounds = cy - - res@cnFillOn = True - res@cnFillMode = "RasterFill" - res@cnLinesOn = False - res@cnLineLabelsOn = False -; res@cnMaxLevelCount = 22 -; res@cnLevelSelectionMode = "ManualLevels" -; res@cnMinLevelValF = -100. -; res@cnMaxLevelValF = 1000. - res@cnLevelSpacingF = 50. - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - -; res@mpProjection = "Satellite" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 0. - res@mpCenterLonF = 0. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - u = f->u(t,:,0) - v = f->v(t,:,0) - h = f->tracers(t,:,0,1) - map = gsn_csm_contour_map(wks,h,res) - - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - lat_edge = new(dimsizes(u),float) - lon_edge = new(dimsizes(u),float) - do i=0,esizes(0)-1 - j = i % 20 - if (j .eq. 0) then - u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) - v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - else - u_earth(i) = -999. - v_earth(i) = -999. - end if - lat_edge(i) = doubletofloat(latEdge(i)) - lon_edge(i) = doubletofloat(lonEdge(i)) - end do - - wmsetp("VCH",0.003) - wmsetp("VRN",0.010) - wmsetp("VRS",20.0) -; wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - - frame(wks) -end - diff --git a/visualization/ncl/xsec.ncl b/visualization/ncl/xsec.ncl deleted file mode 100644 index 73e2ed7cb..000000000 --- a/visualization/ncl/xsec.ncl +++ /dev/null @@ -1,235 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - pi = 3.14159265 - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - ; - ; Which field to plot - ; - plotfield = "h" -; plotfield = "ke" -; plotfield = "vorticity" - - - ; - ; Whether to plot horizontal wind vectors - ; -; horiz_winds = True - horiz_winds = False - - ; - ; Whether to do color-filled plot (filled=True) or - ; to plot contours of height field (filled=False) - ; - filled = True -; filled = False - - ; - ; The longitude of the pole-to-pole cross section - ; - xsec_longitude = -1.0 * pi / 6.0 - - ; - ; The number of points along the cross section - ; - nsec = 200 - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - - wks = gsn_open_wks("pdf","xsec") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - xCell = f->xCell(:) - yCell = f->yCell(:) - zCell = f->zCell(:) - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - xVertex = f->xVertex(:) - yVertex = f->yVertex(:) - zVertex = f->zVertex(:) - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - xEdge = f->xEdge(:) - yEdge = f->yEdge(:) - zEdge = f->zEdge(:) - verticesOnCell = f->verticesOnCell(:,:) - edgesOnCell = f->edgesOnCell(:,:) - nCellsOnCell = f->nEdgesOnCell(:) - cellsOnCell = f->cellsOnCell(:,:) - alpha = f->angleEdge(:) - - dims = dimsizes(latCell) - nCells = dims(0) - - radius = 6371220.0 - xsec_latitude = 3.141592653 / 2.0 - xsec_lat_inc = 3.141592653 / (int2flt(nsec) - 1.0) - - xsecx = new((/nsec/),float) - xsecy = new((/nsec/),float) - xsecz = new((/nsec/),float) - xsec_id = new((/nsec/),integer) - xsec_edge_id = new((/nsec/),integer) - xsec_vtx_id = new((/nsec/),integer) - - ; Compute (x,y,z) coordinates for points on cross section - do i=0,nsec-1 - xsecx(i) = radius * cos(xsec_longitude) * cos(xsec_latitude) - xsecy(i) = radius * sin(xsec_longitude) * cos(xsec_latitude) - xsecz(i) = radius * sin(xsec_latitude) - xsec_latitude = xsec_latitude - xsec_lat_inc - end do - - ; Find cell containing first cross section point - dmin = 2.0 * radius - cellmin = -1 - do i=0,nCells-1 - d = sqrt((xCell(i) - xsecx(0))^2.0 + (yCell(i) - xsecy(0))^2.0 + (zCell(i) - xsecz(0))^2.0) - if (d .lt. dmin) then - cellmin = i - dmin = doubletofloat(d) - end if - end do - xsec_id(0) = cellmin - - ; For all other cross section points, find the grid cell containing them - do j=1,nsec-1 - moved = 1 - do while (moved .ne. 0) - moved = 0 - d = sqrt((xCell(cellmin) - xsecx(j))^2.0 + (yCell(cellmin) - xsecy(j))^2.0 + (zCell(cellmin) - xsecz(j))^2.0) - do k=0,nCellsOnCell(cellmin)-1 - dn = sqrt((xCell(cellsOnCell(cellmin,k)-1) - xsecx(j))^2.0 + (yCell(cellsOnCell(cellmin,k)-1) - xsecy(j))^2.0 + (zCell(cellsOnCell(cellmin,k)-1) - xsecz(j))^2.0) - if (dn .lt. d) then - d = dn - nearest = (/cellsOnCell(cellmin,k)/)-1 - moved = 1 - end if - end do - if (moved .eq. 1) then - cellmin = nearest - end if - end do - xsec_id(j) = cellmin - end do - - ; For all cross section points, find the nearest vertex and edge - do i=0,nsec-1 - iVtx = verticesOnCell(xsec_id(i),0) - 1 - iEdge = edgesOnCell(xsec_id(i),0) - 1 - xsec_edge_id(i) = iEdge - xsec_vtx_id(i) = iVtx - de = sqrt((xEdge(iEdge) - xsecx(i))^2.0 + (yEdge(iEdge) - xsecy(i))^2.0 + (zEdge(iEdge) - xsecz(i))^2.0) - dv = sqrt((xVertex(iVtx) - xsecx(i))^2.0 + (yVertex(iVtx) - xsecy(i))^2.0 + (zVertex(iVtx) - xsecz(i))^2.0) - do j=1,nCellsOnCell(xsec_id(i))-1 - iVtx = verticesOnCell(xsec_id(i),j) - 1 - iEdge = edgesOnCell(xsec_id(i),j) - 1 - de_test = sqrt((xEdge(iEdge) - xsecx(i))^2.0 + (yEdge(iEdge) - xsecy(i))^2.0 + (zEdge(iEdge) - xsecz(i))^2.0) - dv_test = sqrt((xVertex(iVtx) - xsecx(i))^2.0 + (yVertex(iVtx) - xsecy(i))^2.0 + (zVertex(iVtx) - xsecz(i))^2.0) - if (de_test .lt. de) then - de = de_test - xsec_edge_id(i) = iEdge - end if - if (dv_test .lt. dv) then - dv = dv_test - xsec_vtx_id(i) = iVtx - end if - end do - end do - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - res@cnFillMode = "AreaFill" - - if (filled) then - res@cnFillOn = True - res@cnLinesOn = False - res@cnLineLabelsOn = False - else - res@cnFillOn = False - res@cnLinesOn = True - res@cnLineLabelsOn = True - end if - - res@cnLevelSpacingF = 50.0 - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - if (plotfield .eq. "h") then - fld = f->h(t,:,:) - hs = f->h_s(:) - ldims = dimsizes(fld) - nVertLevels = ldims(1) - do i=0,nVertLevels-1 - fld(:,i) = fld(:,i) + hs(:) - end do - end if - if (plotfield .eq. "ke") then - fld = f->ke(t,:,:) - ldims = dimsizes(fld) - nVertLevels = ldims(1) - end if - if (plotfield .eq. "vorticity") then - fld = f->vorticity(t,:,:) - ldims = dimsizes(fld) - nVertLevels = ldims(1) - xsec_id(:) = xsec_vtx_id(:) - end if - res@cnLineDashPattern = 0 - - ; Extract field from along cross section into plotting array - arr = new((/nVertLevels,nsec/),float) - do i=0,nsec-1 - do j=0,nVertLevels-1 - arr(j,i) = doubletofloat(fld(xsec_id(i),j)) - end do - end do - - map = gsn_csm_contour(wks,arr,res) - - if (horiz_winds) then - u = f->u(t,:,:) - v = f->v(t,:,:) - esizes = dimsizes(u) - u_earth = new((/nVertLevels,nsec/),float) - v_earth = new((/nVertLevels,nsec/),float) - x_edge = new((/nVertLevels,nsec/),float) - y_edge = new((/nVertLevels,nsec/),float) - do i=0,nsec-1 - do j=0,nVertLevels-1 - u_earth(j,i) = doubletofloat(u(xsec_edge_id(i),j)*cos(alpha(xsec_edge_id(i))) - v(xsec_edge_id(i),j)*sin(alpha(xsec_edge_id(i)))) - v_earth(j,i) = doubletofloat(u(xsec_edge_id(i),j)*sin(alpha(xsec_edge_id(i))) + v(xsec_edge_id(i),j)*cos(alpha(xsec_edge_id(i)))) - x_edge(j,i) = i - y_edge(j,i) = j - end do - end do - - wmsetp("VCH",0.0010) - wmsetp("VRN",0.010) - wmsetp("VRS",100.0) - wmsetp("VCW",0.10) - - wmvect(wks, x_edge, y_edge, u_earth, v_earth) - end if - - frame(wks) - -end - From 2625a5172dfd2deda80d37a275f6fc6097dcfcf8 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:12:16 -0600 Subject: [PATCH 020/180] Remove visualization/transport_sections --- visualization/transport_sections/README | 41 --- .../transport_sections/compute_transport.m | 99 ------- .../transport_sections/find_edge_sections.m | 239 --------------- .../load_large_variables_edge.m | 66 ----- .../transport_sections/plot_results.m | 102 ------- .../sub_plot_edge_sections.m | 120 -------- .../transport_sections/transport_sections.m | 280 ------------------ .../write_edge_sections_netcdf.m | 76 ----- .../write_edge_sections_text.m | 104 ------- 9 files changed, 1127 deletions(-) delete mode 100644 visualization/transport_sections/README delete mode 100644 visualization/transport_sections/compute_transport.m delete mode 100644 visualization/transport_sections/find_edge_sections.m delete mode 100644 visualization/transport_sections/load_large_variables_edge.m delete mode 100644 visualization/transport_sections/plot_results.m delete mode 100644 visualization/transport_sections/sub_plot_edge_sections.m delete mode 100644 visualization/transport_sections/transport_sections.m delete mode 100644 visualization/transport_sections/write_edge_sections_netcdf.m delete mode 100644 visualization/transport_sections/write_edge_sections_text.m diff --git a/visualization/transport_sections/README b/visualization/transport_sections/README deleted file mode 100644 index 18ca701ea..000000000 --- a/visualization/transport_sections/README +++ /dev/null @@ -1,41 +0,0 @@ -transport_sections README - -This is a matlab tool to find sections that connect two points on the -globe. These sections are a sequence of connected edges, and the -edges and other variables are output as both a netcdf and text files. -The transport can then be measured using this matlab code using output -files, or in MPAS-Ocean during runtime. - -To begin, change the parameters at the top of transport_sections.m. -You will need to change the text strings wd, sim(i).dir, and -sim(i).netcdf_file so that the text string - -[wd '/' sim(i).dir '/' sim(i).netcdf_file ] is the file path, - -where wd is the working directory and dir is the run directory. -Details of the section coordinates and variables may be specified in -transport_sections.m. - -The data files only need to contain a small number of variables. -You may need to reduce the file size before copying to a local -machine using: - -ncks -v acc_u, \ -nAccumulate,latVertex,lonVertex,verticesOnEdge,edgesOnVertex,hZLevel,\ -dvEdge \ -file_in.nc file_out.nc - -The matlab scripts will create a new netcdf file named -{your_domain}_section_edge_data.nc. To merge this file with an -existing grid or restart file, use: - -ncks -A -v sectionEdgeIndex,sectionEdgeSign,nEdgesInSection,\ -sectionText,sectionAbbreviation,sectionCoord \ -{your_domain}_section_edge_data.nc your_restart_file.nc - -These matlab tools have been tested for ocean output files, but should -nearly work for other cores as well. A few lines will need to be -changed. - -Mark Petersen, MPAS-Ocean Team, LANL, May 2012 - diff --git a/visualization/transport_sections/compute_transport.m b/visualization/transport_sections/compute_transport.m deleted file mode 100644 index 4315ddf25..000000000 --- a/visualization/transport_sections/compute_transport.m +++ /dev/null @@ -1,99 +0,0 @@ -function tr_total = compute_transport ... - (wd,dir,netcdf_file, ... - sectionEdgeIndex, sectionEdgeSign, ... - nEdgesInSection, sectionData,sectionText,sectionAbbreviation) - -% Load large variables from netcdf file - -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 - -%%%%%%%%%% input arguments %%%%%%%%% -% The text string [wd '/' dir '/' netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. -% var_conv_factor multiply each variable by this unit conversion. -% sectionEdgeIndex(maxEdges,nSections) cell index of each section -% nEdgesInSection(nSections) number of cells in each section -% sectionData(nVertLevels,max(nEdgesInSection),nSections,nVars) -% data in each cross-section for each variable -% sectionText a cell array with text describing each section -% sectionAbbreviation an 8-character title for each section - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Compute transport through each section -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf('\n') -fprintf(['** Compute transport: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ]; -ncid = netcdf.open(filename,'nc_nowrite'); - -refLayerThickness = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'refLayerThickness')); -dvEdge = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'dvEdge')); -[dimname,nVertLevels]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertLevels')); -[dimname,nTimeSlices]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'Time')); -netcdf.close(ncid) - -nSections = length(nEdgesInSection); -maxNEdgesInSection = max(nEdgesInSection); - -m3ps_to_Sv = 1e-6; % m^3/sec flux to Sverdrups - -% the volume transport -tr = zeros(nVertLevels,maxNEdgesInSection,nSections); -tr_total = zeros(nSections,nTimeSlices); - -for iTime=1:nTimeSlices -header=' '; -data_str=' '; -for iSection = 1:nSections - for i=1:nEdgesInSection(iSection) - iEdge = sectionEdgeIndex(i,iSection); - for k=1:nVertLevels - % Compute transport. - % I am assuming here that sectionData(:,:,:,1) contains avgNormalVelocity - tr(k,i,iSection,iTime) = sectionEdgeSign(i,iSection)... - *sectionData(k,i,iSection,1,iTime)*dvEdge(iEdge)* ... - refLayerThickness(k)*m3ps_to_Sv; - tr_total(iSection,iTime) = tr_total(iSection,iTime) + tr(k,i,iSection,iTime); - - % This is edge velocity - %tr(k,i,iSection,iTime) = sectionEdgeSign(i,iSection)*sectionData(k,i,iSection,1,iTime); - end - end - - % Optional, for plotting the flow across a cross-section. - % This plots u on edges, so columns oscillate as edges change - % direction. The best way to view a cross-section is to use the - % uMeridional and uZonal at the cell center. - %figure(iSection+1) - %imagesc(log(abs(tr(:,1:nEdgesInSection(iSection),iSection)))) - %imagesc(tr(:,1:nEdgesInSection(iSection),iSection)) - %colorbar - - % note: flow computed in matlab only matches that computed in - % MPAS-O if they both use refLayerThickness. To do a verification check, - % replace the line - % * h_edge(k,iEdge)*m3ps_to_Sv; - % in mpas_ocn_time_average.F with the line - % * refLayerThickness(k,iEdge)*m3ps_to_Sv; - - temptext = char(sectionText(iSection)); -% fprintf(['Section %3i, ' temptext(1:22) ' observed flow:' ... -% temptext(63:75) ' mpas flow: %20.15f Sv\n'],iSection,tr_total(iSection)) - - header = [header sectionAbbreviation(iSection,:) ' ']; - data_str = [data_str num2str_fixed(tr_total(iSection,iTime),'%4.1f',7)... - ' ']; -end - -if iTime==1 -fprintf(['\n Summary, in Sv: \n' header '\n' ]) -end -fprintf([data_str ' \n']) -end - - -fprintf('\n') - diff --git a/visualization/transport_sections/find_edge_sections.m b/visualization/transport_sections/find_edge_sections.m deleted file mode 100644 index d9297c1df..000000000 --- a/visualization/transport_sections/find_edge_sections.m +++ /dev/null @@ -1,239 +0,0 @@ -function [sectionEdgeIndex, sectionEdgeSign, nEdgesInSection, ... - latSectionVertex,lonSectionVertex, ... - latVertexDeg,lonVertexDeg] = find_edge_sections ... - (wd,dir,netcdf_file,sectionText,sectionCoord) - -% This function reads grid data from an MPAS-Ocean grid or restart -% netCDF file, and finds a path of edges that connect the endpoints -% specified in sectionCoord. The path is forced to travel through edges -% that are closest to the line connecting the beginning and end -% edges. -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% The text string [wd '/' dir '/' netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. -% sectionText a cell array with text describing each section -% sectionCoord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% -%%%%%%%%%% output arguments %%%%%%%%% -% sectionEdgeIndex(maxNEdgesInSection,nSections) edge index of each section -% sectionEdgeSign(maxNEdgesInSection,nSections) sign of each -% section, positive is to right of path direction. -% nEdgesInSection(nSections) number of edges in each section -% latSectionVertex(maxNEdgesInSection,nSections) lat coordinates of each section -% lonSectionVertex(maxNEdgesInSection,nSections) lon coordinates of each section -% latVertexDeg(nEdges) lat arrays for all edges -% lonVertexDeg(nEdges) lon arrays for all edges - -%%%%%%%%%% parameters internal to this function %%%%%%%%%% - -% maxEdges specifies the maximum number of Edges attempted along -% the path to the end-edge before stopping with a warning. -maxEdges = 1500; - -% Make sure sectionCoord traverse from south to north, and from east to west. -% [startlat startlon endlat endlon] -nSections = size(sectionCoord,1); -for j=1:nSections - latChange = sectionCoord(j,3) - sectionCoord(j,1); - lonChange = sectionCoord(j,4) - sectionCoord(j,2); - if abs(lonChange)>abs(latChange) % zonal section - if lonChange>0 - fprintf(['Warning: Zonal sections should go from east to west. ' ... - 'For section %g start and end longitudes are %g, %g \n'], ... - j,sectionCoord(j,2),sectionCoord(j,4)) - end - else - if latChange<0 - fprintf(['Warning: Meridional sections should go from south to north. ' ... - 'For section %g start and end latitudes are %g, %g \n'], ... - j,sectionCoord(j,1),sectionCoord(j,3)) - end - end - -end - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Read edge and edge data from grid file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf(['** find_edge_sections, simulation: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ]; -ncid = netcdf.open(filename,'nc_nowrite'); - -latVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'latVertex')); -lonVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'lonVertex')); -verticesOnEdge = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'verticesOnEdge')); -edgesOnVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'edgesOnVertex')); -[dimname,nEdges]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nEdges')); -[dimname,nVertices]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertices')); -[dimname,vertexDegree]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'vertexDegree')); -netcdf.close(ncid) - -% Grid variables should be: -% lat varies from -pi/2:pi/2 -% lon varies from 0:2*pi -if (min(lonVertex)<-1e-8) - lonVertex = mod(lonVertex,2*pi); -end -% convert to degrees for plotting: -latVertexDeg = latVertex*180/pi; -lonVertexDeg = lonVertex*180/pi; - -sectionVertexIndex = zeros(maxEdges,nSections); -sectionEdgeIndex = zeros(maxEdges,nSections); -sectionEdgeSign = zeros(maxEdges,nSections); -nEdgesInSection = zeros(1,nSections); - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Find edges that connect beginning and ending points -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -for iSection=1:nSections - latCoord = [sectionCoord(iSection,1) sectionCoord(iSection,3)]/180*pi; - lonCoord = [sectionCoord(iSection,2) sectionCoord(iSection,4)]/180*pi; - - % Find vertex closest to start and end coordinates. - % The seed vertex array simply stores start and end index. - minDist = 1e10*ones(1,2); - seedVertexIndex = zeros(1,2); - for iVertex = 1:nVertices - for i=1:2 - dist = sqrt( ... - (lonCoord(i) - lonVertex(iVertex))^2 ... - + (latCoord(i) - latVertex(iVertex))^2); - if (dist0) - % Find the vertex on the other side of iEdge - if (verticesOnEdge(1,iEdge)==sectionVertexIndex(i,iSection)) - iVertex = verticesOnEdge(2,iEdge); - % Going from vertex 1 to vertex 2. Leave positive. - edgeSign = 1; - else - iVertex = verticesOnEdge(1,iEdge); - % Going from vertex 2 to vertex 1. Make negative. - edgeSign = -1; - end - - % I am using lat/lon Cartesian distance. - % This is distance to the final vertex location. - dist = sqrt( ... - (lonVertex(iVertex) - lonVertex(endVertexIndex))^2 ... - + (latVertex(iVertex) - latVertex(endVertexIndex))^2 ); - -%fprintf('%6i %6i %8.4f %8.4f h1=plot(%g,%g); h2=plot(%g,%g); \n',... -%i,j,dist,distLastVertex,... -% lonVertex(iVertex)*180/pi,latVertex(iVertex)*180/pi,... -% lonVertex(endVertexIndex)*180/pi,latVertex(endVertexIndex)*180/pi) - % check if this vertex is closer to the end vertex than the - % last vertex. If so, it is a candidate, and we can continue. - if (dist-1e-8) - minLon = 0.0; - latTrans = 360; -else - minLon = -180.0; - latTrans = 0.0; -end - - % plot topo data of the earth. This is just low-rez one deg - % data for visual reference. - load('topo.mat','topo','topomap1'); - if minLon==-180 - topoNew(:,1:180) = topo(:,181:360); - topoNew(:,181:360) = topo(:,1:180); - image([-180 180],[-90 90],topoNew,'CDataMapping', 'scaled'); - else - image([0 360],[-90 90],topo,'CDataMapping', 'scaled'); - end - - colormap(topomap1); - patch([-10 1000 1000 -10 -10],[-100 -100 100 100 -100],[.5 1 0]) - patch([-10 1000 1000 -10 -10],[-100 -100 100 100 -100],[1 1 1]) - set(gca,'YDir','normal') - - hold on - - % world - axis([0 360 -90 90]) - set(gca,'XTick',30*[-10:12]) - set(gca,'YTick',15*[-20:20]) - - % half world -% axis([-240+latTrans 0+latTrans -80 70]) -% set(gca,'XTick',20*[-10:20]) -% set(gca,'YTick',10*[-20:20]) - - % N Atlantic -% axis([-90+latTrans -5+latTrans -5 70]) -% set(gca,'XTick',[-100:5:360]) -% set(gca,'YTick',[-90:5:90]) - - % Drake passage -% axis([-90+latTrans,-50+latTrans,-75,-50]) -% set(gca,'XTick',[-100:2:360]) - % set(gca,'YTick',[-200:2:200]) - - % Pacific -% axis([130 260 -10 10]) -% set(gca,'XTick',[0:1:300]) -% set(gca,'YTick',[-20:.1:20]) - - - % plot vertexs. This is just done for debugging. - h=plot(lonVertexDeg,latVertexDeg,'.b'); - set(h,'MarkerSize',2) - - grid on - - for iSection=1:nSections - latCoordDeg = [sectionCoord(iSection,1) sectionCoord(iSection,3)]; - lonCoordDeg = [sectionCoord(iSection,2) sectionCoord(iSection,4)]; - - %h=plot([mod(lonCoordDeg,360)],[latCoordDeg],'*-'); - %set(h,'Color','y','LineWidth',1) - %h=plot([mod(lonCoordDeg(1),360)],[latCoordDeg(1)],'*k'); - - for i=1:nEdgesInSection(iSection) - h = line([lonSectionVertex(i,iSection) lonSectionVertex(i+1,iSection)],... - [latSectionVertex(i,iSection) latSectionVertex(i+1,iSection)]); - set(h,'Color','r','LineWidth',2) - %plot([lonVertexDeg(sectionVertexIndex(i+1,iSection))], ... - % [latVertexDeg(sectionVertexIndex(i+1,iSection))],'sk') - end - end - - ylabel('latitude') - xlabel('longitude') - title(['Domain: ' regexprep(dir,'_','\\_') ' Edges of transport sections. ']) - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 16 8]) - - subplot('position',[0 .95 1 .05]); axis off - text(.005,.7,[ date ]); - - dir_name1 = regexprep(dir,'\.','_'); - dir_name2 = regexprep(dir_name1,'/','_'); - filename=['f/' dir_name2 '_vertex_map' ]; - print('-djpeg',[filename '.jpg']); - - % put printing text in a latex file - fprintf(fid_latex,... - ['\\begin{figure}[btp] \\center \n \\includegraphics[width=7.5in]{'... - filename '.jpg} \n\\end{figure} \n']); diff --git a/visualization/transport_sections/transport_sections.m b/visualization/transport_sections/transport_sections.m deleted file mode 100644 index 2d11d91ec..000000000 --- a/visualization/transport_sections/transport_sections.m +++ /dev/null @@ -1,280 +0,0 @@ -%function transport_sections - -% Specify data files, coordinates and text, then call functions -% to find edge sections, load data, and compute transport through -% each section. -% -% This script produces new netcdf files in the subdirectory -% netcdf_files which can then be merged with grid.nc or restart.nc -% files to collect transport data in MPAS-Ocean -% -% To merge the new *_section_edge_data.nc with an existing grid or -% restart file, use: -% ncks -A -v sectionEdgeIndex,sectionEdgeSign,nEdgesInSection,\ -% sectionText,sectionAbbreviation,sectionCoord \ -% your_file_section_edge_data.nc your_restart_file.nc -% -% Mark Petersen, MPAS-Ocean Team, LANL, March 2014 - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify data files -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% all plots are placed in the f directory. Comment out if not needed. -unix('mkdir -p f netcdf_files docs text_files'); - -% The text string [wd '/' sim(i).dir '/' sim(i).netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. - -wd = '/var/tmp/mpeterse/runs/'; -dir='m91'; -abc = 'klmnop'; - -for letter=1:length(abc) - -% These files only need to contain a small number of variables. -% You may need to reduce the file size before copying to a local -% machine using: -% ncks -v avgNormalVelocity,avgNormalTransportVelocity,nAverage,latVertex,lonVertex,verticesOnEdge,edgesOnVertex,refLayerThickness,dvEdge \ -% file_in.nc file_out.nc - -clear sim -for j=1:3 - sim(j).dir=[dir abc(letter)]; - sim(j).netcdf_file = ['output.00' num2str_fixed0(16+j,'%g',2) '-02-01_00.00.00.nc_transport_vars.nc']; -end - - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify section coordinates and text -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% sectionText a cell array with text describing each section -sectionText = { -'Drake Passage, S Ocean -56 to -63 lat, 68W lon, section A21, 140+/- 6 Sv in Ganachaud_Wunsch00n and Ganachaud99thesis',... -'Tasmania-Ant, S Ocean -44 to -66 lat, 140E lon, section P12, 157+/-10 Sv in Ganachaud_Wunsch00n and Ganachaud99thesis',... -'Africa-Ant, S Ocean -31.3to -70 lat, 30E lon, section I6, Sv in Ganachaud99thesis ',... -'Antilles Inflow, Carib. -18.4+/-4.7Sv in Johns_ea02dsr '... -'Mona Passage, Caribbian -2.6+/-1.2Sv in Johns_ea02dsr '... -'Windward Passage, Carib -7.0 Sv in Nelepo_ea76sr, Roemmich81jgr '... -'Florida-Cuba, Caribbian 31.5+/-1.5Sv in Johns_ea02dsr, 32.3+/-3.2Sv Larsen92rslpt'... -'Florida-Bahamas, Carib. 27 lat, -80 to -78.8lon, 31.5+/-1.5Sv in Johns_ea02dsr, 32.3+/-3.2Sv Larsen92rslpt'... -'Indonesian Throughflow, -9 to -18 lat, 116E lon, section J89, -16+/- 5 Sv in Ganachaud_Wunsch00n and Ganachaud99thesis',... -'Agulhas -70+/-20 Sv in Bryden_Beal01dsr ',... -'Mozambique Channel, -25 lat, 35 to 44E lon, section I4 , -14+/- 6 Sv in Ganachaud_Wunsch00n and Ganachaud99thesis',... -'Bering Strait, Arctic 0.83+/-0.66Sv in Roach_ea95jgr '... - }; -%'Lancaster Sound, Arctic 0.67+/-0.3Sv in Maltrud_McLean05om '... -%'Fram Strait, Arctic -4.2+/-2.3Sv in Fahrbach_ea01pr '... -%'Robeson Channel, Arctic -0.75+/-0.2Sv in Maltrud_McLean05om '... - -% sectionAbbreviation an 8-character title for each section -sectionAbbreviation = [... - 'Drake Pa';... - 'Tasm-Ant';... - 'Afri-Ant';... - 'Antilles';... - 'Mona Pas';... - 'Wind Pas';... - 'FL-Cuba ';... - 'FL-Baham';... - 'Ind Thru';... - 'Agulhas ';... - 'Mozam Ch';... - 'Bering ';... -]; -% 'Lancastr';... -% 'Fram ';... -% 'Robeson ';... - -% sectionCoord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% Traverse from south to north, and from east to west. -% Then positive velocities are eastward and northward. -sectionCoord = [... - -64.5 -64 -55 -65.3;... % Drake - -67 140 -43.5 147 ;... % Tasm-Ant - -70.0 30 -31.3 30 ;... % Afri-Ant - 10.7 -63.2 18.0 -65.9;... % Antilles - 18.4 -67.2 18.4 -68.5;... % Mona Passage - 19.8 -73.4 20.1 -74.3;... % Windward Passage - 23.1 -81.0 25.15 -81.0;... % Florida-Cuba - 26.52 -78.78 26.7 -80.1;... % Florida-Bahamas - -21 116.0 -8.8 116 ;... % Ind Thru - -32.4 32.0 -31.0 30.2;... % Agulhas - -25 44.0 -25.0 34 ;... % Mozam Ch - 65.8 -167.7 66.1 -169.7;... % Bering St - ]; -% 73.7 -80.6 74.6 -81.0;... % Lancaster Sound- was not able to -% get this to connect for all resolutions -% 79.7 10.7 79.7 -17.7;... % Fram St - crosses 0 lon. This is not in code yet. -% 81.0 -63.5 82.0 -63.5;... % Robeson Ch - was not able to get this to connect - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify variables -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% var_conv_factor multiply each variable by this unit conversion. -% var_lims(nVars,3) contour line definition: min, max, interval - -% Eulerian velocity from prognostic momentum equation -var_name = {'avgNormalVelocity'}; -% total transport velocity -%var_name = {'avgNormalTransportVelocity'} - -var_conv_factor = [1 1 1]; % No conversion here. - -var_lims = [-10 10 2.5; -10 10 2.5; 0 20 2.5]; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify actions to be taken -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -find_edge_sections_flag = true ; -write_edge_sections_text_flag = false ; -write_edge_sections_netcdf_flag = false ; -plot_edge_sections_flag = true ; -compute_transport_flag = true ; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Begin main code. Normally this does not need to change. -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%close all - -% change the coordinate range to be 0 to 360. -sectionCoord(:,2) = mod(sectionCoord(:,2),360); -sectionCoord(:,4) = mod(sectionCoord(:,4),360); - -for iSim = 1:length(sim) - - fprintf(['**** simulation: ' sim(iSim).dir ' ' sim(iSim).netcdf_file '\n']) - unix(['mkdir -p docs/' sim(iSim).netcdf_file '_dir/f']); - fid_latex = fopen('temp.tex','w'); - fprintf(fid_latex,['%% file created by plot_mpas_cross_sections, ' date '\n\n']); - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Find edges that connect beginning and end points of section - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if find_edge_sections_flag - [sim(iSim).sectionEdgeIndex, sim(iSim).sectionEdgeSign, sim(iSim).nEdgesInSection, ... - sim(iSim).latSectionVertex,sim(iSim).lonSectionVertex, ... - sim(iSim).latVertexDeg,sim(iSim).lonVertexDeg] ... - = find_edge_sections(wd,sim(iSim).dir,sim(iSim).netcdf_file, ... - sectionText,sectionCoord); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Write section edge information to a netcdf file - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if write_edge_sections_text_flag - write_edge_sections_text... - (sim(iSim).dir, sim(iSim).sectionEdgeIndex, ... - sim(iSim).sectionEdgeSign, sim(iSim).nEdgesInSection, ... - sectionText,sectionAbbreviation,sectionCoord) - end - - if write_edge_sections_netcdf_flag - write_edge_sections_netcdf... - (sim(iSim).dir, sim(iSim).sectionEdgeIndex, ... - sim(iSim).sectionEdgeSign, sim(iSim).nEdgesInSection, ... - sectionText,sectionAbbreviation,sectionCoord) - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Plot edge section locations on world map - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if plot_edge_sections_flag - sub_plot_edge_sections(sim(iSim).dir,sectionCoord, ... - sim(iSim).latSectionVertex,sim(iSim).lonSectionVertex, ... - sim(iSim).latVertexDeg,sim(iSim).lonVertexDeg, ... - sim(iSim).sectionEdgeIndex, sim(iSim).nEdgesInSection,... - fid_latex); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Load large variables from netcdf file - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if compute_transport_flag - [sim(iSim).sectionData] = load_large_variables_edge ... - (wd,sim(iSim).dir,sim(iSim).netcdf_file, var_name,var_conv_factor, ... - sim(iSim).sectionEdgeIndex, sim(iSim).nEdgesInSection); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Compute transport through each section - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if compute_transport_flag - sim(iSim).tr_total = compute_transport ... - (wd,sim(iSim).dir,sim(iSim).netcdf_file, ... - sim(iSim).sectionEdgeIndex, sim(iSim).sectionEdgeSign, sim(iSim).nEdgesInSection, ... - sim(iSim).sectionData,sectionText,sectionAbbreviation); - - if iSim==1 - tr_total = sim(iSim).tr_total'; - else - tr_total = [tr_total; sim(iSim).tr_total']; - end - - end - - fclose(fid_latex); - -end % iSim - -% tr_total -mean_transport = mean(tr_total,1); -%fprintf(['mean over time, ' sim(1).dir ' \n' ]) -fprintf('%10.2f',mean_transport) -fprintf([' mean, ' sim(1).dir ' \n']) - -var_transport = var(tr_total,1); -%fprintf(['variance over time, ' sim(1).dir ' \n' ]) -fprintf('%10.2f',var_transport) -fprintf([' var, ' sim(1).dir ' \n']) - -std_transport = std(tr_total,1); -%fprintf(['stdev over time, ' sim(1).dir ' \n' ]) -fprintf('%10.2f',std_transport) -fprintf([' std, ' sim(1).dir ' \n']) - -min_transport = min(tr_total,[],1); -%fprintf(['minimum over time, ' sim(1).dir ' \n' ]) -fprintf('%10.2f',min_transport) -fprintf([' min, ' sim(1).dir ' \n']) - -max_transport = max(tr_total,[],1); -%fprintf(['maximum over time, ' sim(1).dir ' \n' ]) -fprintf('%10.2f',max_transport) -fprintf([' max, ' sim(1).dir ' \n']) - -filename = ['data/' sim(1).dir '_' char(var_name) '_small_data_file.mat'] -clear sim -save(filename) - -end % letter diff --git a/visualization/transport_sections/write_edge_sections_netcdf.m b/visualization/transport_sections/write_edge_sections_netcdf.m deleted file mode 100644 index acdec2a46..000000000 --- a/visualization/transport_sections/write_edge_sections_netcdf.m +++ /dev/null @@ -1,76 +0,0 @@ -function write_edge_sections_netcdf ... - (dir, ... - sectionEdgeIndex, sectionEdgeSign, nEdgesInSection,... - sectionText,sectionAbbreviation,sectionCoord) - -% Write section edge information to the netcdf file -% netcdf_files/your_dir_transport_section_edges.nc -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% dir string with run directory name -% sectionEdgeIndex(maxNEdgesInSection,nSections) edge index of each section -% sectionEdgeSign(maxNEdgesInSection,nSections) sign of each -% section, positive is to right of path direction. -% nEdgesInSection(nSections) number of cells in each section -% sectionText a cell array with text describing each section -% sectionAbbreviation an 8-character title for each section -% sectionCoord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Write section edge information to a netcdf file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf('\n') -fprintf(['** Write edge information to file: ' dir '\n']) - -nSections = length(nEdgesInSection); -maxNEdgesInSection = max(nEdgesInSection); - -dir_name1 = regexprep(dir,'/','_'); -filename = ['netcdf_files/' dir_name1 '_transport_section_edges.nc']; -ncid = netcdf.create(filename,'nc_clobber'); - -% Define the dimensions of the variable. -dimid_nSections = netcdf.defDim(ncid,'nSections',nSections); -dimid_maxNEdgesInSection = netcdf.defDim(ncid,'maxNEdgesInSection',maxNEdgesInSection); -dimid_latLonPairs = netcdf.defDim(ncid,'latLonPairs',4); -dimid_CharLength8 = netcdf.defDim(ncid,'CharLength8',8); -dimid_CharLength120 = netcdf.defDim(ncid,'CharLength120',120); - -% Define a new variable in the file. -sectionEdgeIndex_varID = netcdf.defVar(ncid,'sectionEdgeIndex',... - 'int',[dimid_maxNEdgesInSection dimid_nSections]); - -sectionEdgeSign_varID = netcdf.defVar(ncid,'sectionEdgeSign',... - 'int',[dimid_maxNEdgesInSection dimid_nSections]); - -nEdgesInSection_varID = netcdf.defVar(ncid,'nEdgesInSection',... - 'int', [dimid_nSections]); - -sectionText_varID = netcdf.defVar(ncid,'sectionText',... - 'char',[dimid_CharLength120 dimid_nSections]); -sectionAbbreviation_varID = netcdf.defVar(ncid,'sectionAbbreviation',... - 'char',[dimid_CharLength8 dimid_nSections]); -sectionCoord_varID = netcdf.defVar(ncid,'sectionCoord',... - 'double',[dimid_latLonPairs dimid_nSections]); - - -% Leave define mode and enter data mode to write data. -netcdf.endDef(ncid) - -% Write data to variable. -netcdf.putVar(ncid,sectionEdgeIndex_varID,sectionEdgeIndex); -netcdf.putVar(ncid,sectionEdgeSign_varID,sectionEdgeSign); -netcdf.putVar(ncid,nEdgesInSection_varID,nEdgesInSection); -netcdf.putVar(ncid,sectionText_varID,char(sectionText)'); -netcdf.putVar(ncid,sectionAbbreviation_varID,sectionAbbreviation'); -netcdf.putVar(ncid,sectionCoord_varID,sectionCoord); - -netcdf.close(ncid) - -fprintf('\n') - diff --git a/visualization/transport_sections/write_edge_sections_text.m b/visualization/transport_sections/write_edge_sections_text.m deleted file mode 100644 index 8141ec4fd..000000000 --- a/visualization/transport_sections/write_edge_sections_text.m +++ /dev/null @@ -1,104 +0,0 @@ -function write_edge_sections_text ... - (dir, ... - sectionEdgeIndex, sectionEdgeSign, nEdgesInSection,... - sectionText,sectionAbbreviation,sectionCoord) - -% Write section edge information to the text file -% text_files/your_dir_transport_section_edges.nc -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% dir string with run directory name -% sectionEdgeIndex(maxNEdgesInSection,nSections) edge index of each section -% sectionEdgeSign(maxNEdgesInSection,nSections) sign of each -% section, positive is to right of path direction. -% nEdgesInSection(nSections) number of cells in each section -% sectionText a cell array with text describing each section -% sectionAbbreviation an 8-character title for each section -% sectionCoord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Write section edge information to a text file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf('\n') -fprintf(['** Write edge information to file: ' dir '\n']) - -nSections = length(nEdgesInSection); -maxNEdgesInSection = max(nEdgesInSection); - -dir_name1 = regexprep(dir,'/','_'); -unix(['mkdir -p text_files/' dir_name1 ]); - -% sectionEdgeIndex -filename = ['text_files/' dir_name1 '/sectionEdgeIndex.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %10i',sectionEdgeIndex(:,j)); - fprintf(fid,' \n'); -end -fclose(fid); - -% sectionEdgeIndex -dir_name1 = regexprep(dir,'/','_'); -filename = ['text_files/' dir_name1 '/sectionEdgeIndex.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %10i',sectionEdgeIndex(:,j)); - fprintf(fid,' \n'); -end -fclose(fid); - -% sectionEdgeSign -dir_name1 = regexprep(dir,'/','_'); -filename = ['text_files/' dir_name1 '/sectionEdgeSign.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %10i',sectionEdgeSign(:,j)); - fprintf(fid,' \n'); -end -fclose(fid); - -% nEdgesInSection -dir_name1 = regexprep(dir,'/','_'); -filename = ['text_files/' dir_name1 '/nEdgesInSection.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %10i',nEdgesInSection(j)); - fprintf(fid,' \n'); -end -fclose(fid); - -% sectionText -dir_name1 = regexprep(dir,'/','_'); -filename = ['text_files/' dir_name1 '/sectionText.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %s',char(sectionText(j))); - fprintf(fid,' \n'); -end -fclose(fid); - -% sectionAbbreviation -dir_name1 = regexprep(dir,'/','_'); -filename = ['text_files/' dir_name1 '/sectionAbbreviation.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %s',sectionAbbreviation(j,:)); - fprintf(fid,' \n'); -end -fclose(fid); - -% sectionCoord -dir_name1 = regexprep(dir,'/','_'); -filename = ['text_files/' dir_name1 '/sectionCoord.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %10.3f',sectionCoord(j,:)); - fprintf(fid,' \n'); -end -fclose(fid); - From 433c3303e2b56b4835006710492f8d9127d67b06 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:54:00 -0600 Subject: [PATCH 021/180] Move 'grid_rotate' from grid_gen/ to mesh_tools/ --- {grid_gen => mesh_tools}/grid_rotate/Makefile | 0 {grid_gen => mesh_tools}/grid_rotate/README | 0 {grid_gen => mesh_tools}/grid_rotate/grid_rotate.f90 | 0 {grid_gen => mesh_tools}/grid_rotate/mesh.ncl | 0 {grid_gen => mesh_tools}/grid_rotate/namelist.input | 0 5 files changed, 0 insertions(+), 0 deletions(-) rename {grid_gen => mesh_tools}/grid_rotate/Makefile (100%) rename {grid_gen => mesh_tools}/grid_rotate/README (100%) rename {grid_gen => mesh_tools}/grid_rotate/grid_rotate.f90 (100%) rename {grid_gen => mesh_tools}/grid_rotate/mesh.ncl (100%) rename {grid_gen => mesh_tools}/grid_rotate/namelist.input (100%) diff --git a/grid_gen/grid_rotate/Makefile b/mesh_tools/grid_rotate/Makefile similarity index 100% rename from grid_gen/grid_rotate/Makefile rename to mesh_tools/grid_rotate/Makefile diff --git a/grid_gen/grid_rotate/README b/mesh_tools/grid_rotate/README similarity index 100% rename from grid_gen/grid_rotate/README rename to mesh_tools/grid_rotate/README diff --git a/grid_gen/grid_rotate/grid_rotate.f90 b/mesh_tools/grid_rotate/grid_rotate.f90 similarity index 100% rename from grid_gen/grid_rotate/grid_rotate.f90 rename to mesh_tools/grid_rotate/grid_rotate.f90 diff --git a/grid_gen/grid_rotate/mesh.ncl b/mesh_tools/grid_rotate/mesh.ncl similarity index 100% rename from grid_gen/grid_rotate/mesh.ncl rename to mesh_tools/grid_rotate/mesh.ncl diff --git a/grid_gen/grid_rotate/namelist.input b/mesh_tools/grid_rotate/namelist.input similarity index 100% rename from grid_gen/grid_rotate/namelist.input rename to mesh_tools/grid_rotate/namelist.input From 3260ea33e03e2eff48c41c16cbf23dba5ba8df75 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:55:09 -0600 Subject: [PATCH 022/180] Move 'triangle_jigsaw_to_netcdf' from grid_gen/ to mesh_tools/ --- {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/.gitignore | 0 {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/README.md | 0 {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/build_mesh.sh | 0 .../triangle_jigsaw_to_netcdf/examples/antarctic_coupled.m | 0 .../triangle_jigsaw_to_netcdf/examples/antarctic_coupled_vis.m | 0 .../triangle_jigsaw_to_netcdf/examples/coast_regional.m | 0 .../triangle_jigsaw_to_netcdf/examples/jigsaw_path_locations.m | 0 .../triangle_jigsaw_to_netcdf/examples/mpas_ec_60to30.m | 0 .../triangle_jigsaw_to_netcdf/examples/mpas_uniform.m | 0 .../triangle_jigsaw_to_netcdf/examples/refined_disk.m | 0 .../triangle_jigsaw_to_netcdf/examples/soma_32to4km.m | 0 .../triangle_jigsaw_to_netcdf/examples/soma_32to8km.m | 0 .../triangle_jigsaw_to_netcdf/inject_bathymetry.py | 0 .../triangle_jigsaw_to_netcdf/mpas_to_triangle.py | 0 {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/open_msh.py | 0 .../triangle_jigsaw_to_netcdf/triangle_jigsaw_to_netcdf.py | 0 16 files changed, 0 insertions(+), 0 deletions(-) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/.gitignore (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/README.md (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/build_mesh.sh (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/examples/antarctic_coupled.m (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/examples/antarctic_coupled_vis.m (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/examples/coast_regional.m (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/examples/jigsaw_path_locations.m (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/examples/mpas_ec_60to30.m (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/examples/mpas_uniform.m (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/examples/refined_disk.m (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/examples/soma_32to4km.m (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/examples/soma_32to8km.m (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/inject_bathymetry.py (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/mpas_to_triangle.py (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/open_msh.py (100%) rename {grid_gen => mesh_tools}/triangle_jigsaw_to_netcdf/triangle_jigsaw_to_netcdf.py (100%) diff --git a/grid_gen/triangle_jigsaw_to_netcdf/.gitignore b/mesh_tools/triangle_jigsaw_to_netcdf/.gitignore similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/.gitignore rename to mesh_tools/triangle_jigsaw_to_netcdf/.gitignore diff --git a/grid_gen/triangle_jigsaw_to_netcdf/README.md b/mesh_tools/triangle_jigsaw_to_netcdf/README.md similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/README.md rename to mesh_tools/triangle_jigsaw_to_netcdf/README.md diff --git a/grid_gen/triangle_jigsaw_to_netcdf/build_mesh.sh b/mesh_tools/triangle_jigsaw_to_netcdf/build_mesh.sh similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/build_mesh.sh rename to mesh_tools/triangle_jigsaw_to_netcdf/build_mesh.sh diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/antarctic_coupled.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/antarctic_coupled.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/antarctic_coupled.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/antarctic_coupled.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/antarctic_coupled_vis.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/antarctic_coupled_vis.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/antarctic_coupled_vis.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/antarctic_coupled_vis.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/coast_regional.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/coast_regional.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/coast_regional.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/coast_regional.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/jigsaw_path_locations.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/jigsaw_path_locations.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/jigsaw_path_locations.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/jigsaw_path_locations.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/mpas_ec_60to30.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/mpas_ec_60to30.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/mpas_ec_60to30.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/mpas_ec_60to30.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/mpas_uniform.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/mpas_uniform.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/mpas_uniform.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/mpas_uniform.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/refined_disk.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/refined_disk.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/refined_disk.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/refined_disk.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/soma_32to4km.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/soma_32to4km.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/soma_32to4km.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/soma_32to4km.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/soma_32to8km.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/soma_32to8km.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/soma_32to8km.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/soma_32to8km.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/inject_bathymetry.py b/mesh_tools/triangle_jigsaw_to_netcdf/inject_bathymetry.py similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/inject_bathymetry.py rename to mesh_tools/triangle_jigsaw_to_netcdf/inject_bathymetry.py diff --git a/grid_gen/triangle_jigsaw_to_netcdf/mpas_to_triangle.py b/mesh_tools/triangle_jigsaw_to_netcdf/mpas_to_triangle.py similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/mpas_to_triangle.py rename to mesh_tools/triangle_jigsaw_to_netcdf/mpas_to_triangle.py diff --git a/grid_gen/triangle_jigsaw_to_netcdf/open_msh.py b/mesh_tools/triangle_jigsaw_to_netcdf/open_msh.py similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/open_msh.py rename to mesh_tools/triangle_jigsaw_to_netcdf/open_msh.py diff --git a/grid_gen/triangle_jigsaw_to_netcdf/triangle_jigsaw_to_netcdf.py b/mesh_tools/triangle_jigsaw_to_netcdf/triangle_jigsaw_to_netcdf.py similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/triangle_jigsaw_to_netcdf.py rename to mesh_tools/triangle_jigsaw_to_netcdf/triangle_jigsaw_to_netcdf.py From cfa6c7675adcf9b09c20af22902b9050bee8bf6a Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:56:18 -0600 Subject: [PATCH 023/180] Move 'points-mpas' from grid_gen/ to mesh_tools/ --- {grid_gen => mesh_tools}/points-mpas/.gitignore | 0 {grid_gen => mesh_tools}/points-mpas/Makefile | 0 {grid_gen => mesh_tools}/points-mpas/Params | 0 {grid_gen => mesh_tools}/points-mpas/README | 0 {grid_gen => mesh_tools}/points-mpas/SaveDensity | 0 {grid_gen => mesh_tools}/points-mpas/SaveTriangles | 0 {grid_gen => mesh_tools}/points-mpas/SaveVertices | 0 {grid_gen => mesh_tools}/points-mpas/points-mpas.cpp | 0 {grid_gen => mesh_tools}/points-mpas/triangulation.h | 0 9 files changed, 0 insertions(+), 0 deletions(-) rename {grid_gen => mesh_tools}/points-mpas/.gitignore (100%) rename {grid_gen => mesh_tools}/points-mpas/Makefile (100%) rename {grid_gen => mesh_tools}/points-mpas/Params (100%) rename {grid_gen => mesh_tools}/points-mpas/README (100%) rename {grid_gen => mesh_tools}/points-mpas/SaveDensity (100%) rename {grid_gen => mesh_tools}/points-mpas/SaveTriangles (100%) rename {grid_gen => mesh_tools}/points-mpas/SaveVertices (100%) rename {grid_gen => mesh_tools}/points-mpas/points-mpas.cpp (100%) rename {grid_gen => mesh_tools}/points-mpas/triangulation.h (100%) diff --git a/grid_gen/points-mpas/.gitignore b/mesh_tools/points-mpas/.gitignore similarity index 100% rename from grid_gen/points-mpas/.gitignore rename to mesh_tools/points-mpas/.gitignore diff --git a/grid_gen/points-mpas/Makefile b/mesh_tools/points-mpas/Makefile similarity index 100% rename from grid_gen/points-mpas/Makefile rename to mesh_tools/points-mpas/Makefile diff --git a/grid_gen/points-mpas/Params b/mesh_tools/points-mpas/Params similarity index 100% rename from grid_gen/points-mpas/Params rename to mesh_tools/points-mpas/Params diff --git a/grid_gen/points-mpas/README b/mesh_tools/points-mpas/README similarity index 100% rename from grid_gen/points-mpas/README rename to mesh_tools/points-mpas/README diff --git a/grid_gen/points-mpas/SaveDensity b/mesh_tools/points-mpas/SaveDensity similarity index 100% rename from grid_gen/points-mpas/SaveDensity rename to mesh_tools/points-mpas/SaveDensity diff --git a/grid_gen/points-mpas/SaveTriangles b/mesh_tools/points-mpas/SaveTriangles similarity index 100% rename from grid_gen/points-mpas/SaveTriangles rename to mesh_tools/points-mpas/SaveTriangles diff --git a/grid_gen/points-mpas/SaveVertices b/mesh_tools/points-mpas/SaveVertices similarity index 100% rename from grid_gen/points-mpas/SaveVertices rename to mesh_tools/points-mpas/SaveVertices diff --git a/grid_gen/points-mpas/points-mpas.cpp b/mesh_tools/points-mpas/points-mpas.cpp similarity index 100% rename from grid_gen/points-mpas/points-mpas.cpp rename to mesh_tools/points-mpas/points-mpas.cpp diff --git a/grid_gen/points-mpas/triangulation.h b/mesh_tools/points-mpas/triangulation.h similarity index 100% rename from grid_gen/points-mpas/triangulation.h rename to mesh_tools/points-mpas/triangulation.h From e496fd4d406366c55c91aa3e87253c15879f5877 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 11:56:43 -0600 Subject: [PATCH 024/180] Move 'periodic_hex' from grid_gen/ to mesh_tools/ --- {grid_gen => mesh_tools}/periodic_hex/Makefile | 0 {grid_gen => mesh_tools}/periodic_hex/cells.ncl | 0 .../periodic_hex/mark_periodic_boundaries_for_culling.py | 0 {grid_gen => mesh_tools}/periodic_hex/module_cell_indexing.F | 0 {grid_gen => mesh_tools}/periodic_hex/module_write_netcdf.F | 0 {grid_gen => mesh_tools}/periodic_hex/namelist.input | 0 {grid_gen => mesh_tools}/periodic_hex/periodic_grid.F | 0 7 files changed, 0 insertions(+), 0 deletions(-) rename {grid_gen => mesh_tools}/periodic_hex/Makefile (100%) rename {grid_gen => mesh_tools}/periodic_hex/cells.ncl (100%) rename {grid_gen => mesh_tools}/periodic_hex/mark_periodic_boundaries_for_culling.py (100%) rename {grid_gen => mesh_tools}/periodic_hex/module_cell_indexing.F (100%) rename {grid_gen => mesh_tools}/periodic_hex/module_write_netcdf.F (100%) rename {grid_gen => mesh_tools}/periodic_hex/namelist.input (100%) rename {grid_gen => mesh_tools}/periodic_hex/periodic_grid.F (100%) diff --git a/grid_gen/periodic_hex/Makefile b/mesh_tools/periodic_hex/Makefile similarity index 100% rename from grid_gen/periodic_hex/Makefile rename to mesh_tools/periodic_hex/Makefile diff --git a/grid_gen/periodic_hex/cells.ncl b/mesh_tools/periodic_hex/cells.ncl similarity index 100% rename from grid_gen/periodic_hex/cells.ncl rename to mesh_tools/periodic_hex/cells.ncl diff --git a/grid_gen/periodic_hex/mark_periodic_boundaries_for_culling.py b/mesh_tools/periodic_hex/mark_periodic_boundaries_for_culling.py similarity index 100% rename from grid_gen/periodic_hex/mark_periodic_boundaries_for_culling.py rename to mesh_tools/periodic_hex/mark_periodic_boundaries_for_culling.py diff --git a/grid_gen/periodic_hex/module_cell_indexing.F b/mesh_tools/periodic_hex/module_cell_indexing.F similarity index 100% rename from grid_gen/periodic_hex/module_cell_indexing.F rename to mesh_tools/periodic_hex/module_cell_indexing.F diff --git a/grid_gen/periodic_hex/module_write_netcdf.F b/mesh_tools/periodic_hex/module_write_netcdf.F similarity index 100% rename from grid_gen/periodic_hex/module_write_netcdf.F rename to mesh_tools/periodic_hex/module_write_netcdf.F diff --git a/grid_gen/periodic_hex/namelist.input b/mesh_tools/periodic_hex/namelist.input similarity index 100% rename from grid_gen/periodic_hex/namelist.input rename to mesh_tools/periodic_hex/namelist.input diff --git a/grid_gen/periodic_hex/periodic_grid.F b/mesh_tools/periodic_hex/periodic_grid.F similarity index 100% rename from grid_gen/periodic_hex/periodic_grid.F rename to mesh_tools/periodic_hex/periodic_grid.F From b0d9a8ce3551895ee49b05997311d042414478ae Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 14:07:03 -0600 Subject: [PATCH 025/180] Move 'periodic_quad' from grid_gen/ to mesh_tools/ --- {grid_gen => mesh_tools}/periodic_quad/Makefile | 0 {grid_gen => mesh_tools}/periodic_quad/cells.ncl | 0 {grid_gen => mesh_tools}/periodic_quad/module_cell_indexing.F | 0 {grid_gen => mesh_tools}/periodic_quad/module_write_netcdf.F | 0 {grid_gen => mesh_tools}/periodic_quad/namelist.input | 0 {grid_gen => mesh_tools}/periodic_quad/periodic_grid.F | 0 6 files changed, 0 insertions(+), 0 deletions(-) rename {grid_gen => mesh_tools}/periodic_quad/Makefile (100%) rename {grid_gen => mesh_tools}/periodic_quad/cells.ncl (100%) rename {grid_gen => mesh_tools}/periodic_quad/module_cell_indexing.F (100%) rename {grid_gen => mesh_tools}/periodic_quad/module_write_netcdf.F (100%) rename {grid_gen => mesh_tools}/periodic_quad/namelist.input (100%) rename {grid_gen => mesh_tools}/periodic_quad/periodic_grid.F (100%) diff --git a/grid_gen/periodic_quad/Makefile b/mesh_tools/periodic_quad/Makefile similarity index 100% rename from grid_gen/periodic_quad/Makefile rename to mesh_tools/periodic_quad/Makefile diff --git a/grid_gen/periodic_quad/cells.ncl b/mesh_tools/periodic_quad/cells.ncl similarity index 100% rename from grid_gen/periodic_quad/cells.ncl rename to mesh_tools/periodic_quad/cells.ncl diff --git a/grid_gen/periodic_quad/module_cell_indexing.F b/mesh_tools/periodic_quad/module_cell_indexing.F similarity index 100% rename from grid_gen/periodic_quad/module_cell_indexing.F rename to mesh_tools/periodic_quad/module_cell_indexing.F diff --git a/grid_gen/periodic_quad/module_write_netcdf.F b/mesh_tools/periodic_quad/module_write_netcdf.F similarity index 100% rename from grid_gen/periodic_quad/module_write_netcdf.F rename to mesh_tools/periodic_quad/module_write_netcdf.F diff --git a/grid_gen/periodic_quad/namelist.input b/mesh_tools/periodic_quad/namelist.input similarity index 100% rename from grid_gen/periodic_quad/namelist.input rename to mesh_tools/periodic_quad/namelist.input diff --git a/grid_gen/periodic_quad/periodic_grid.F b/mesh_tools/periodic_quad/periodic_grid.F similarity index 100% rename from grid_gen/periodic_quad/periodic_grid.F rename to mesh_tools/periodic_quad/periodic_grid.F From 7b5bfe3fc411336f4ae1e27b79054bc7e1baa8e1 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 14:08:16 -0600 Subject: [PATCH 026/180] Move 'create_SCRIP_files' from grid_gen/ to mesh_tools/ --- .../create_SCRIP_files/create_SCRIP_file_from_CISM_mesh.py | 0 .../create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename {grid_gen => mesh_tools}/create_SCRIP_files/create_SCRIP_file_from_CISM_mesh.py (100%) rename {grid_gen => mesh_tools}/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py (100%) diff --git a/grid_gen/create_SCRIP_files/create_SCRIP_file_from_CISM_mesh.py b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_CISM_mesh.py similarity index 100% rename from grid_gen/create_SCRIP_files/create_SCRIP_file_from_CISM_mesh.py rename to mesh_tools/create_SCRIP_files/create_SCRIP_file_from_CISM_mesh.py diff --git a/grid_gen/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py similarity index 100% rename from grid_gen/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py rename to mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py From 32e69ae3fe0a68ce17e549667e4c08a76761f9f5 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 14:10:31 -0600 Subject: [PATCH 027/180] Move 'mesh_conversion_tools' from grid_gen/ to mesh_tools/ --- {grid_gen => mesh_tools}/mesh_conversion_tools/Makefile | 0 {grid_gen => mesh_tools}/mesh_conversion_tools/README | 0 {grid_gen => mesh_tools}/mesh_conversion_tools/edge.h | 0 .../mesh_conversion_tools/json/json-forwards.h | 0 {grid_gen => mesh_tools}/mesh_conversion_tools/json/json.h | 0 {grid_gen => mesh_tools}/mesh_conversion_tools/jsoncpp.cpp | 0 .../mesh_conversion_tools/mark_horns_for_culling.py | 0 .../mesh_conversion_tools/mpas_cell_culler.cpp | 0 .../mesh_conversion_tools/mpas_mask_creator.cpp | 0 .../mesh_conversion_tools/mpas_mesh_converter.cpp | 0 {grid_gen => mesh_tools}/mesh_conversion_tools/netcdf_utils.cpp | 0 {grid_gen => mesh_tools}/mesh_conversion_tools/netcdf_utils.h | 0 {grid_gen => mesh_tools}/mesh_conversion_tools/pnt.h | 0 13 files changed, 0 insertions(+), 0 deletions(-) rename {grid_gen => mesh_tools}/mesh_conversion_tools/Makefile (100%) rename {grid_gen => mesh_tools}/mesh_conversion_tools/README (100%) rename {grid_gen => mesh_tools}/mesh_conversion_tools/edge.h (100%) rename {grid_gen => mesh_tools}/mesh_conversion_tools/json/json-forwards.h (100%) rename {grid_gen => mesh_tools}/mesh_conversion_tools/json/json.h (100%) rename {grid_gen => mesh_tools}/mesh_conversion_tools/jsoncpp.cpp (100%) rename {grid_gen => mesh_tools}/mesh_conversion_tools/mark_horns_for_culling.py (100%) rename {grid_gen => mesh_tools}/mesh_conversion_tools/mpas_cell_culler.cpp (100%) rename {grid_gen => mesh_tools}/mesh_conversion_tools/mpas_mask_creator.cpp (100%) rename {grid_gen => mesh_tools}/mesh_conversion_tools/mpas_mesh_converter.cpp (100%) rename {grid_gen => mesh_tools}/mesh_conversion_tools/netcdf_utils.cpp (100%) rename {grid_gen => mesh_tools}/mesh_conversion_tools/netcdf_utils.h (100%) rename {grid_gen => mesh_tools}/mesh_conversion_tools/pnt.h (100%) diff --git a/grid_gen/mesh_conversion_tools/Makefile b/mesh_tools/mesh_conversion_tools/Makefile similarity index 100% rename from grid_gen/mesh_conversion_tools/Makefile rename to mesh_tools/mesh_conversion_tools/Makefile diff --git a/grid_gen/mesh_conversion_tools/README b/mesh_tools/mesh_conversion_tools/README similarity index 100% rename from grid_gen/mesh_conversion_tools/README rename to mesh_tools/mesh_conversion_tools/README diff --git a/grid_gen/mesh_conversion_tools/edge.h b/mesh_tools/mesh_conversion_tools/edge.h similarity index 100% rename from grid_gen/mesh_conversion_tools/edge.h rename to mesh_tools/mesh_conversion_tools/edge.h diff --git a/grid_gen/mesh_conversion_tools/json/json-forwards.h b/mesh_tools/mesh_conversion_tools/json/json-forwards.h similarity index 100% rename from grid_gen/mesh_conversion_tools/json/json-forwards.h rename to mesh_tools/mesh_conversion_tools/json/json-forwards.h diff --git a/grid_gen/mesh_conversion_tools/json/json.h b/mesh_tools/mesh_conversion_tools/json/json.h similarity index 100% rename from grid_gen/mesh_conversion_tools/json/json.h rename to mesh_tools/mesh_conversion_tools/json/json.h diff --git a/grid_gen/mesh_conversion_tools/jsoncpp.cpp b/mesh_tools/mesh_conversion_tools/jsoncpp.cpp similarity index 100% rename from grid_gen/mesh_conversion_tools/jsoncpp.cpp rename to mesh_tools/mesh_conversion_tools/jsoncpp.cpp diff --git a/grid_gen/mesh_conversion_tools/mark_horns_for_culling.py b/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py similarity index 100% rename from grid_gen/mesh_conversion_tools/mark_horns_for_culling.py rename to mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py diff --git a/grid_gen/mesh_conversion_tools/mpas_cell_culler.cpp b/mesh_tools/mesh_conversion_tools/mpas_cell_culler.cpp similarity index 100% rename from grid_gen/mesh_conversion_tools/mpas_cell_culler.cpp rename to mesh_tools/mesh_conversion_tools/mpas_cell_culler.cpp diff --git a/grid_gen/mesh_conversion_tools/mpas_mask_creator.cpp b/mesh_tools/mesh_conversion_tools/mpas_mask_creator.cpp similarity index 100% rename from grid_gen/mesh_conversion_tools/mpas_mask_creator.cpp rename to mesh_tools/mesh_conversion_tools/mpas_mask_creator.cpp diff --git a/grid_gen/mesh_conversion_tools/mpas_mesh_converter.cpp b/mesh_tools/mesh_conversion_tools/mpas_mesh_converter.cpp similarity index 100% rename from grid_gen/mesh_conversion_tools/mpas_mesh_converter.cpp rename to mesh_tools/mesh_conversion_tools/mpas_mesh_converter.cpp diff --git a/grid_gen/mesh_conversion_tools/netcdf_utils.cpp b/mesh_tools/mesh_conversion_tools/netcdf_utils.cpp similarity index 100% rename from grid_gen/mesh_conversion_tools/netcdf_utils.cpp rename to mesh_tools/mesh_conversion_tools/netcdf_utils.cpp diff --git a/grid_gen/mesh_conversion_tools/netcdf_utils.h b/mesh_tools/mesh_conversion_tools/netcdf_utils.h similarity index 100% rename from grid_gen/mesh_conversion_tools/netcdf_utils.h rename to mesh_tools/mesh_conversion_tools/netcdf_utils.h diff --git a/grid_gen/mesh_conversion_tools/pnt.h b/mesh_tools/mesh_conversion_tools/pnt.h similarity index 100% rename from grid_gen/mesh_conversion_tools/pnt.h rename to mesh_tools/mesh_conversion_tools/pnt.h From 2364706e4c9fe8acee0dd740d64b01dfbc9784b0 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 14:11:05 -0600 Subject: [PATCH 028/180] Move 'planar_grid_transformations' grid_gen/ to mesh_tools/ --- .../planar_grid_transformations/multires_scaled_hex.py | 0 .../planar_grid_transformations/rotate_planar_grid.readme.txt | 0 .../planar_grid_transformations/scale_planar_grid.py | 0 .../set_lat_lon_fields_in_planar_grid.py | 0 .../planar_grid_transformations/translate_planar_grid.py | 0 5 files changed, 0 insertions(+), 0 deletions(-) rename {grid_gen => mesh_tools}/planar_grid_transformations/multires_scaled_hex.py (100%) rename {grid_gen => mesh_tools}/planar_grid_transformations/rotate_planar_grid.readme.txt (100%) rename {grid_gen => mesh_tools}/planar_grid_transformations/scale_planar_grid.py (100%) rename {grid_gen => mesh_tools}/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py (100%) rename {grid_gen => mesh_tools}/planar_grid_transformations/translate_planar_grid.py (100%) diff --git a/grid_gen/planar_grid_transformations/multires_scaled_hex.py b/mesh_tools/planar_grid_transformations/multires_scaled_hex.py similarity index 100% rename from grid_gen/planar_grid_transformations/multires_scaled_hex.py rename to mesh_tools/planar_grid_transformations/multires_scaled_hex.py diff --git a/grid_gen/planar_grid_transformations/rotate_planar_grid.readme.txt b/mesh_tools/planar_grid_transformations/rotate_planar_grid.readme.txt similarity index 100% rename from grid_gen/planar_grid_transformations/rotate_planar_grid.readme.txt rename to mesh_tools/planar_grid_transformations/rotate_planar_grid.readme.txt diff --git a/grid_gen/planar_grid_transformations/scale_planar_grid.py b/mesh_tools/planar_grid_transformations/scale_planar_grid.py similarity index 100% rename from grid_gen/planar_grid_transformations/scale_planar_grid.py rename to mesh_tools/planar_grid_transformations/scale_planar_grid.py diff --git a/grid_gen/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py b/mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py similarity index 100% rename from grid_gen/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py rename to mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py diff --git a/grid_gen/planar_grid_transformations/translate_planar_grid.py b/mesh_tools/planar_grid_transformations/translate_planar_grid.py similarity index 100% rename from grid_gen/planar_grid_transformations/translate_planar_grid.py rename to mesh_tools/planar_grid_transformations/translate_planar_grid.py From b3f872e09629393ddad04c12f2c77abeadb86009 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 14:13:50 -0600 Subject: [PATCH 029/180] Move 'acme_namelist_file_generator' from python_scripts/ to source_code_processing/ --- .../acme_namelist_file_generator/generate_acme_namelist_files.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {python_scripts => source_code_processing}/acme_namelist_file_generator/generate_acme_namelist_files.py (100%) diff --git a/python_scripts/acme_namelist_file_generator/generate_acme_namelist_files.py b/source_code_processing/acme_namelist_file_generator/generate_acme_namelist_files.py similarity index 100% rename from python_scripts/acme_namelist_file_generator/generate_acme_namelist_files.py rename to source_code_processing/acme_namelist_file_generator/generate_acme_namelist_files.py From db4c91ba720c42d8c3ec6a28a5ac9fdb1ef52a9a Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 25 May 2018 14:14:31 -0600 Subject: [PATCH 030/180] Move 'mpas_source_linter' from python_scripts/ to source_code_processing/ --- .../mpas_source_linter/mpas_source_linter.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {python_scripts => source_code_processing}/mpas_source_linter/mpas_source_linter.py (100%) diff --git a/python_scripts/mpas_source_linter/mpas_source_linter.py b/source_code_processing/mpas_source_linter/mpas_source_linter.py similarity index 100% rename from python_scripts/mpas_source_linter/mpas_source_linter.py rename to source_code_processing/mpas_source_linter/mpas_source_linter.py From 604a69adad1b3f0a62b1db083624ce771d28a1eb Mon Sep 17 00:00:00 2001 From: mark-petersen Date: Sat, 9 Jun 2018 06:31:26 -0600 Subject: [PATCH 031/180] Move 'planar_grids' from visualization/python to visualization --- .../planar_grids/convert_mpas_grid_to_regular_grid_netcdf.py | 0 visualization/{python => }/planar_grids/plot_mpas_field.py | 0 visualization/{python => }/planar_grids/plot_mpas_field_xsect.py | 0 .../{python => }/planar_grids/plot_mpas_velocity_on_edges.py | 0 visualization/{python => }/planar_grids/visualize_blocks.py | 0 5 files changed, 0 insertions(+), 0 deletions(-) rename visualization/{python => }/planar_grids/convert_mpas_grid_to_regular_grid_netcdf.py (100%) rename visualization/{python => }/planar_grids/plot_mpas_field.py (100%) rename visualization/{python => }/planar_grids/plot_mpas_field_xsect.py (100%) rename visualization/{python => }/planar_grids/plot_mpas_velocity_on_edges.py (100%) rename visualization/{python => }/planar_grids/visualize_blocks.py (100%) diff --git a/visualization/python/planar_grids/convert_mpas_grid_to_regular_grid_netcdf.py b/visualization/planar_grids/convert_mpas_grid_to_regular_grid_netcdf.py similarity index 100% rename from visualization/python/planar_grids/convert_mpas_grid_to_regular_grid_netcdf.py rename to visualization/planar_grids/convert_mpas_grid_to_regular_grid_netcdf.py diff --git a/visualization/python/planar_grids/plot_mpas_field.py b/visualization/planar_grids/plot_mpas_field.py similarity index 100% rename from visualization/python/planar_grids/plot_mpas_field.py rename to visualization/planar_grids/plot_mpas_field.py diff --git a/visualization/python/planar_grids/plot_mpas_field_xsect.py b/visualization/planar_grids/plot_mpas_field_xsect.py similarity index 100% rename from visualization/python/planar_grids/plot_mpas_field_xsect.py rename to visualization/planar_grids/plot_mpas_field_xsect.py diff --git a/visualization/python/planar_grids/plot_mpas_velocity_on_edges.py b/visualization/planar_grids/plot_mpas_velocity_on_edges.py similarity index 100% rename from visualization/python/planar_grids/plot_mpas_velocity_on_edges.py rename to visualization/planar_grids/plot_mpas_velocity_on_edges.py diff --git a/visualization/python/planar_grids/visualize_blocks.py b/visualization/planar_grids/visualize_blocks.py similarity index 100% rename from visualization/python/planar_grids/visualize_blocks.py rename to visualization/planar_grids/visualize_blocks.py From 2c6c407a660c9b9445d08d42cbd72da9e5f6d009 Mon Sep 17 00:00:00 2001 From: mark-petersen Date: Sat, 9 Jun 2018 06:35:26 -0600 Subject: [PATCH 032/180] Move 'initial_condition_interpolation' from grid_gen to ocean --- .../initial_condition_interpolation/interpolate_state.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {grid_gen => ocean}/initial_condition_interpolation/interpolate_state.py (100%) diff --git a/grid_gen/initial_condition_interpolation/interpolate_state.py b/ocean/initial_condition_interpolation/interpolate_state.py similarity index 100% rename from grid_gen/initial_condition_interpolation/interpolate_state.py rename to ocean/initial_condition_interpolation/interpolate_state.py From df51d1d94398460e7007a3daa633fc6650e37b07 Mon Sep 17 00:00:00 2001 From: mark-petersen Date: Sat, 9 Jun 2018 06:41:02 -0600 Subject: [PATCH 033/180] Move 'seaice_grid_tools' from grid_gen to seaice --- {grid_gen => seaice}/seaice_grid_tools/README | 0 .../seaice_grid_tools/gen_seaice_mesh_partition.py | 0 .../seaice_grid_tools/gen_seaice_regions_latitude_bands.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename {grid_gen => seaice}/seaice_grid_tools/README (100%) rename {grid_gen => seaice}/seaice_grid_tools/gen_seaice_mesh_partition.py (100%) rename {grid_gen => seaice}/seaice_grid_tools/gen_seaice_regions_latitude_bands.py (100%) diff --git a/grid_gen/seaice_grid_tools/README b/seaice/seaice_grid_tools/README similarity index 100% rename from grid_gen/seaice_grid_tools/README rename to seaice/seaice_grid_tools/README diff --git a/grid_gen/seaice_grid_tools/gen_seaice_mesh_partition.py b/seaice/seaice_grid_tools/gen_seaice_mesh_partition.py similarity index 100% rename from grid_gen/seaice_grid_tools/gen_seaice_mesh_partition.py rename to seaice/seaice_grid_tools/gen_seaice_mesh_partition.py diff --git a/grid_gen/seaice_grid_tools/gen_seaice_regions_latitude_bands.py b/seaice/seaice_grid_tools/gen_seaice_regions_latitude_bands.py similarity index 100% rename from grid_gen/seaice_grid_tools/gen_seaice_regions_latitude_bands.py rename to seaice/seaice_grid_tools/gen_seaice_regions_latitude_bands.py From b04f877a63ce2cb6f38c6926e27a572c48000b8e Mon Sep 17 00:00:00 2001 From: mark-petersen Date: Sat, 9 Jun 2018 06:43:43 -0600 Subject: [PATCH 034/180] Move 'landice_grid_tools' from grid_gen to landice --- .../landice_grid_tools/README_grid_generation_workflow.txt | 0 {grid_gen => landice}/landice_grid_tools/calibrate_beta.py | 0 .../landice_grid_tools/conversion_exodus_init_to_mpasli_mesh.py | 0 .../landice_grid_tools/convert_landice_bitmasks.py | 0 .../landice_grid_tools/copy_etopo_to_MPAS_sphere_grid.py | 0 .../create_landice_grid_from_generic_MPAS_grid.py | 0 {grid_gen => landice}/landice_grid_tools/define_cullMask.py | 0 .../landice_grid_tools/interpolate_to_mpasli_grid.py | 0 .../landice_grid_tools/mark_domain_boundaries_dirichlet.py | 0 .../landice_grid_tools/mpas_mesh_to_landice_ic_batch.sh | 0 {grid_gen => landice}/landice_grid_tools/plot_globalStats.py | 0 {grid_gen => landice}/landice_grid_tools/plot_mass_balance.py | 0 .../prepare_pattyn_temperature_field_for_interpolation.m | 0 13 files changed, 0 insertions(+), 0 deletions(-) rename {grid_gen => landice}/landice_grid_tools/README_grid_generation_workflow.txt (100%) rename {grid_gen => landice}/landice_grid_tools/calibrate_beta.py (100%) rename {grid_gen => landice}/landice_grid_tools/conversion_exodus_init_to_mpasli_mesh.py (100%) rename {grid_gen => landice}/landice_grid_tools/convert_landice_bitmasks.py (100%) rename {grid_gen => landice}/landice_grid_tools/copy_etopo_to_MPAS_sphere_grid.py (100%) rename {grid_gen => landice}/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py (100%) rename {grid_gen => landice}/landice_grid_tools/define_cullMask.py (100%) rename {grid_gen => landice}/landice_grid_tools/interpolate_to_mpasli_grid.py (100%) rename {grid_gen => landice}/landice_grid_tools/mark_domain_boundaries_dirichlet.py (100%) rename {grid_gen => landice}/landice_grid_tools/mpas_mesh_to_landice_ic_batch.sh (100%) rename {grid_gen => landice}/landice_grid_tools/plot_globalStats.py (100%) rename {grid_gen => landice}/landice_grid_tools/plot_mass_balance.py (100%) rename {grid_gen => landice}/landice_grid_tools/prepare_pattyn_temperature_field_for_interpolation.m (100%) diff --git a/grid_gen/landice_grid_tools/README_grid_generation_workflow.txt b/landice/landice_grid_tools/README_grid_generation_workflow.txt similarity index 100% rename from grid_gen/landice_grid_tools/README_grid_generation_workflow.txt rename to landice/landice_grid_tools/README_grid_generation_workflow.txt diff --git a/grid_gen/landice_grid_tools/calibrate_beta.py b/landice/landice_grid_tools/calibrate_beta.py similarity index 100% rename from grid_gen/landice_grid_tools/calibrate_beta.py rename to landice/landice_grid_tools/calibrate_beta.py diff --git a/grid_gen/landice_grid_tools/conversion_exodus_init_to_mpasli_mesh.py b/landice/landice_grid_tools/conversion_exodus_init_to_mpasli_mesh.py similarity index 100% rename from grid_gen/landice_grid_tools/conversion_exodus_init_to_mpasli_mesh.py rename to landice/landice_grid_tools/conversion_exodus_init_to_mpasli_mesh.py diff --git a/grid_gen/landice_grid_tools/convert_landice_bitmasks.py b/landice/landice_grid_tools/convert_landice_bitmasks.py similarity index 100% rename from grid_gen/landice_grid_tools/convert_landice_bitmasks.py rename to landice/landice_grid_tools/convert_landice_bitmasks.py diff --git a/grid_gen/landice_grid_tools/copy_etopo_to_MPAS_sphere_grid.py b/landice/landice_grid_tools/copy_etopo_to_MPAS_sphere_grid.py similarity index 100% rename from grid_gen/landice_grid_tools/copy_etopo_to_MPAS_sphere_grid.py rename to landice/landice_grid_tools/copy_etopo_to_MPAS_sphere_grid.py diff --git a/grid_gen/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py b/landice/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py similarity index 100% rename from grid_gen/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py rename to landice/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py diff --git a/grid_gen/landice_grid_tools/define_cullMask.py b/landice/landice_grid_tools/define_cullMask.py similarity index 100% rename from grid_gen/landice_grid_tools/define_cullMask.py rename to landice/landice_grid_tools/define_cullMask.py diff --git a/grid_gen/landice_grid_tools/interpolate_to_mpasli_grid.py b/landice/landice_grid_tools/interpolate_to_mpasli_grid.py similarity index 100% rename from grid_gen/landice_grid_tools/interpolate_to_mpasli_grid.py rename to landice/landice_grid_tools/interpolate_to_mpasli_grid.py diff --git a/grid_gen/landice_grid_tools/mark_domain_boundaries_dirichlet.py b/landice/landice_grid_tools/mark_domain_boundaries_dirichlet.py similarity index 100% rename from grid_gen/landice_grid_tools/mark_domain_boundaries_dirichlet.py rename to landice/landice_grid_tools/mark_domain_boundaries_dirichlet.py diff --git a/grid_gen/landice_grid_tools/mpas_mesh_to_landice_ic_batch.sh b/landice/landice_grid_tools/mpas_mesh_to_landice_ic_batch.sh similarity index 100% rename from grid_gen/landice_grid_tools/mpas_mesh_to_landice_ic_batch.sh rename to landice/landice_grid_tools/mpas_mesh_to_landice_ic_batch.sh diff --git a/grid_gen/landice_grid_tools/plot_globalStats.py b/landice/landice_grid_tools/plot_globalStats.py similarity index 100% rename from grid_gen/landice_grid_tools/plot_globalStats.py rename to landice/landice_grid_tools/plot_globalStats.py diff --git a/grid_gen/landice_grid_tools/plot_mass_balance.py b/landice/landice_grid_tools/plot_mass_balance.py similarity index 100% rename from grid_gen/landice_grid_tools/plot_mass_balance.py rename to landice/landice_grid_tools/plot_mass_balance.py diff --git a/grid_gen/landice_grid_tools/prepare_pattyn_temperature_field_for_interpolation.m b/landice/landice_grid_tools/prepare_pattyn_temperature_field_for_interpolation.m similarity index 100% rename from grid_gen/landice_grid_tools/prepare_pattyn_temperature_field_for_interpolation.m rename to landice/landice_grid_tools/prepare_pattyn_temperature_field_for_interpolation.m From dd3be644f4161846cbe798ebd3b06023f1449e5b Mon Sep 17 00:00:00 2001 From: mark-petersen Date: Sat, 9 Jun 2018 06:47:49 -0600 Subject: [PATCH 035/180] Move 'moc_southern_boundary_extractor' from python_scripts to ocean --- .../moc_southern_boundary_extractor.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {python_scripts => ocean}/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py (100%) diff --git a/python_scripts/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py b/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py similarity index 100% rename from python_scripts/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py rename to ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py From 93c3eefac0f3fcb465c882752e0c54ade022ae27 Mon Sep 17 00:00:00 2001 From: mark-petersen Date: Sat, 9 Jun 2018 06:48:35 -0600 Subject: [PATCH 036/180] Move 'paraview_vtk_field_extractor' from python_scripts to visualization --- .../paraview_vtk_field_extractor/add_earth_sphere.py | 0 .../paraview_vtk_field_extractor/annotate_date.py | 0 .../paraview_vtk_field_extractor/paraview_vtk_field_extractor.py | 0 .../paraview_vtk_field_extractor/utils.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename {python_scripts => visualization}/paraview_vtk_field_extractor/add_earth_sphere.py (100%) rename {python_scripts => visualization}/paraview_vtk_field_extractor/annotate_date.py (100%) rename {python_scripts => visualization}/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py (100%) rename {python_scripts => visualization}/paraview_vtk_field_extractor/utils.py (100%) diff --git a/python_scripts/paraview_vtk_field_extractor/add_earth_sphere.py b/visualization/paraview_vtk_field_extractor/add_earth_sphere.py similarity index 100% rename from python_scripts/paraview_vtk_field_extractor/add_earth_sphere.py rename to visualization/paraview_vtk_field_extractor/add_earth_sphere.py diff --git a/python_scripts/paraview_vtk_field_extractor/annotate_date.py b/visualization/paraview_vtk_field_extractor/annotate_date.py similarity index 100% rename from python_scripts/paraview_vtk_field_extractor/annotate_date.py rename to visualization/paraview_vtk_field_extractor/annotate_date.py diff --git a/python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py similarity index 100% rename from python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py rename to visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py diff --git a/python_scripts/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py similarity index 100% rename from python_scripts/paraview_vtk_field_extractor/utils.py rename to visualization/paraview_vtk_field_extractor/utils.py From 9ed47ee1229b3bf03b603f56f563b2f14b799705 Mon Sep 17 00:00:00 2001 From: mark-petersen Date: Sat, 9 Jun 2018 07:05:02 -0600 Subject: [PATCH 037/180] Move 'processor_decompositions' from python_scripts to mesh_tools --- {python_scripts => mesh_tools}/processor_decompositions/README | 0 .../processor_decompositions/make_partition_files.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename {python_scripts => mesh_tools}/processor_decompositions/README (100%) rename {python_scripts => mesh_tools}/processor_decompositions/make_partition_files.py (100%) diff --git a/python_scripts/processor_decompositions/README b/mesh_tools/processor_decompositions/README similarity index 100% rename from python_scripts/processor_decompositions/README rename to mesh_tools/processor_decompositions/README diff --git a/python_scripts/processor_decompositions/make_partition_files.py b/mesh_tools/processor_decompositions/make_partition_files.py similarity index 100% rename from python_scripts/processor_decompositions/make_partition_files.py rename to mesh_tools/processor_decompositions/make_partition_files.py From aa93ca65b598fd1d15f4a962ecbf1e4e34a16b25 Mon Sep 17 00:00:00 2001 From: mark-petersen Date: Sat, 9 Jun 2018 07:07:42 -0600 Subject: [PATCH 038/180] Move 'decomposition_fields' from python_scripts to output_processing --- {python_scripts => output_processing}/decomposition_fields/README | 0 .../decomposition_fields/decomposition_fields.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename {python_scripts => output_processing}/decomposition_fields/README (100%) rename {python_scripts => output_processing}/decomposition_fields/decomposition_fields.py (100%) diff --git a/python_scripts/decomposition_fields/README b/output_processing/decomposition_fields/README similarity index 100% rename from python_scripts/decomposition_fields/README rename to output_processing/decomposition_fields/README diff --git a/python_scripts/decomposition_fields/decomposition_fields.py b/output_processing/decomposition_fields/decomposition_fields.py similarity index 100% rename from python_scripts/decomposition_fields/decomposition_fields.py rename to output_processing/decomposition_fields/decomposition_fields.py From 88f3841f3f55c1f1eecf9e535eb5b12619044ffc Mon Sep 17 00:00:00 2001 From: mark-petersen Date: Sat, 9 Jun 2018 07:08:23 -0600 Subject: [PATCH 039/180] Move 'grid_quality' from python_scripts to output_processing --- {python_scripts => output_processing}/grid_quality/README | 0 .../grid_quality/grid_quality.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename {python_scripts => output_processing}/grid_quality/README (100%) rename {python_scripts => output_processing}/grid_quality/grid_quality.py (100%) diff --git a/python_scripts/grid_quality/README b/output_processing/grid_quality/README similarity index 100% rename from python_scripts/grid_quality/README rename to output_processing/grid_quality/README diff --git a/python_scripts/grid_quality/grid_quality.py b/output_processing/grid_quality/grid_quality.py similarity index 100% rename from python_scripts/grid_quality/grid_quality.py rename to output_processing/grid_quality/grid_quality.py From 913b883c5177d0d9c538f022461213ec0be99ea0 Mon Sep 17 00:00:00 2001 From: mark-petersen Date: Sat, 9 Jun 2018 09:58:59 -0600 Subject: [PATCH 040/180] Add atmosphere directory --- atmosphere/README | 1 + 1 file changed, 1 insertion(+) create mode 100644 atmosphere/README diff --git a/atmosphere/README b/atmosphere/README new file mode 100644 index 000000000..ee68f7148 --- /dev/null +++ b/atmosphere/README @@ -0,0 +1 @@ +Readme file for MPAS-Tools atmosphere directory. From c9b6b008928b9bb2d18bf927bf918d3d1040c570 Mon Sep 17 00:00:00 2001 From: mark-petersen Date: Sat, 9 Jun 2018 11:45:40 -0600 Subject: [PATCH 041/180] Move 'seaice_grid_tools' from seaice to mesh_tools --- {seaice => mesh_tools}/seaice_grid_tools/README | 0 .../seaice_grid_tools/gen_seaice_mesh_partition.py | 0 .../seaice_grid_tools/gen_seaice_regions_latitude_bands.py | 0 seaice/README | 1 + 4 files changed, 1 insertion(+) rename {seaice => mesh_tools}/seaice_grid_tools/README (100%) rename {seaice => mesh_tools}/seaice_grid_tools/gen_seaice_mesh_partition.py (100%) rename {seaice => mesh_tools}/seaice_grid_tools/gen_seaice_regions_latitude_bands.py (100%) create mode 100644 seaice/README diff --git a/seaice/seaice_grid_tools/README b/mesh_tools/seaice_grid_tools/README similarity index 100% rename from seaice/seaice_grid_tools/README rename to mesh_tools/seaice_grid_tools/README diff --git a/seaice/seaice_grid_tools/gen_seaice_mesh_partition.py b/mesh_tools/seaice_grid_tools/gen_seaice_mesh_partition.py similarity index 100% rename from seaice/seaice_grid_tools/gen_seaice_mesh_partition.py rename to mesh_tools/seaice_grid_tools/gen_seaice_mesh_partition.py diff --git a/seaice/seaice_grid_tools/gen_seaice_regions_latitude_bands.py b/mesh_tools/seaice_grid_tools/gen_seaice_regions_latitude_bands.py similarity index 100% rename from seaice/seaice_grid_tools/gen_seaice_regions_latitude_bands.py rename to mesh_tools/seaice_grid_tools/gen_seaice_regions_latitude_bands.py diff --git a/seaice/README b/seaice/README new file mode 100644 index 000000000..ae26b3447 --- /dev/null +++ b/seaice/README @@ -0,0 +1 @@ +Readme file for MPAS-Tools seaice directory. From 2b6fadaad54d15589e2ba35f64a9282aba1a6e9a Mon Sep 17 00:00:00 2001 From: "Phillip J. Wolfram" Date: Thu, 8 Mar 2018 10:44:11 -0700 Subject: [PATCH 042/180] Adds missing vim fold --- visualization/paraview_vtk_field_extractor/utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index 12de5ef12..83a50b383 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -34,20 +34,20 @@ def open_netcdf(file_name): return nc_file -def is_valid_mesh_var(mesh_file, variable_name): +def is_valid_mesh_var(mesh_file, variable_name): # {{{ if mesh_file is None: return False if variable_name not in mesh_file.variables: return False - return 'Time' not in mesh_file.variables[variable_name].dimensions + return 'Time' not in mesh_file.variables[variable_name].dimensions # }}} -def get_var(variable_name, mesh_file, time_series_file): +def get_var(variable_name, mesh_file, time_series_file): # {{{ if is_valid_mesh_var(mesh_file, variable_name): return mesh_file.variables[variable_name] else: - return time_series_file.variables[variable_name] + return time_series_file.variables[variable_name] # }}} def setup_time_indices(fn_pattern, xtimeName): # {{{ From b8d361c89447968ae62560461d0d205746a40328 Mon Sep 17 00:00:00 2001 From: "Phillip J. Wolfram" Date: Thu, 8 Mar 2018 11:40:22 -0700 Subject: [PATCH 043/180] Fixes redundant line --- visualization/paraview_vtk_field_extractor/utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index 83a50b383..5943f4cb6 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -927,7 +927,6 @@ def _fix_lon_lat_vertices(vertices, verticesOnCell, validVertices, nCells = verticesOnCell.shape[0] nVertices = len(vertices[0]) - xVertex = vertices[0] xVertex = vertices[0] xDiff = xVertex[verticesOnCell] - lonCell.reshape(nCells, 1) From 513d8654ffe0d506a22f9119c34a47a6b88d2d69 Mon Sep 17 00:00:00 2001 From: "Phillip J. Wolfram" Date: Thu, 8 Mar 2018 12:03:48 -0700 Subject: [PATCH 044/180] Adds capability to handle periodic meshes --- .../paraview_vtk_field_extractor/utils.py | 111 ++++++++++++++++++ 1 file changed, 111 insertions(+) diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index 5943f4cb6..b278d134d 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -594,6 +594,7 @@ def build_cell_geom_lists(nc_file, output_32bit, lonlat): # {{{ if lonlat: lonCell = numpy.rad2deg(nc_file.variables['lonCell'][:]) + latCell = numpy.rad2deg(nc_file.variables['latCell'][:]) nCells = len(nc_file.dimensions['nCells']) @@ -610,6 +611,20 @@ def build_cell_geom_lists(nc_file, output_32bit, lonlat): # {{{ validVertices, lonCell) + if nc_file.is_periodic == 'YES': + if lonlat: + xcoord = lonCell + ycoord = latCell + else: + xcoord = nc_file.variables['xCell'][:] + ycoord = nc_file.variables['yCell'][:] + vertices, verticesOnCell = _fix_periodic_vertices(vertices, + verticesOnCell, + validVertices, + xcoord, ycoord, + nc_file.x_period, + nc_file.y_period) + connectivity = verticesOnCell[validVertices] offsets = numpy.cumsum(nEdgesOnCell, dtype=int) valid_mask = numpy.ones(nCells, bool) @@ -624,6 +639,7 @@ def build_vertex_geom_lists(nc_file, output_32bit, lonlat): # {{{ if lonlat: lonVertex = numpy.rad2deg(nc_file.variables['lonVertex'][:]) + latVertex = numpy.rad2deg(nc_file.variables['latVertex'][:]) vertexDegree = len(nc_file.dimensions['vertexDegree']) @@ -641,6 +657,22 @@ def build_vertex_geom_lists(nc_file, output_32bit, lonlat): # {{{ validVertices, lonVertex[valid_mask]) + if nc_file.is_periodic == 'YES': + # all remaining entries in cellsOnVertex are valid + validVertices = numpy.ones(cellsOnVertex.shape, bool) + if lonlat: + xcoord = lonVertex[valid_mask] + ycoord = latVertex[valid_mask] + else: + xcoord = nc_file.variables['xVertex'][valid_mask] + ycoord = nc_file.variables['yVertex'][valid_mask] + vertices, cellsOnVertex = _fix_periodic_vertices(vertices, + cellsOnVertex, + validVertices, + xcoord, ycoord, + nc_file.x_period, + nc_file.y_period) + connectivity = cellsOnVertex.ravel() validCount = cellsOnVertex.shape[0] offsets = vertexDegree*numpy.arange(1, validCount+1) @@ -691,6 +723,20 @@ def build_edge_geom_lists(nc_file, output_32bit, lonlat): # {{{ vertsOnCell, validVerts, lonEdge[valid_mask]) + if nc_file.is_periodic == 'YES': + if lonlat: + xcoord = lonEdge[valid_mask] + ycoord = latEdge[valid_mask] + else: + xcoord = nc_file.variables['xEdge'][valid_mask] + ycoord = nc_file.variables['yEdge'][valid_mask] + + vertices, cellsOnVertex = _fix_periodic_vertices(vertices, + vertsOnCell, + validVerts, + xcoord, ycoord, + nc_file.x_period, + nc_file.y_period) connectivity = vertsOnCell[validVerts] validCount = numpy.sum(numpy.array(validVerts, int), axis=1) @@ -963,4 +1009,69 @@ def _fix_lon_lat_vertices(vertices, verticesOnCell, validVertices, return vertices, verticesOnCell # }}} +def _fix_periodic_vertices(vertices, verticesOnCell, validVertices, + xCell, yCell, xperiod, yperiod): # {{{ + + nCells = verticesOnCell.shape[0] + nVertices = len(vertices[0]) + + xVertex = vertices[0] + yVertex = vertices[1] + + xDiff = xVertex[verticesOnCell] - xCell.reshape(nCells, 1) + yDiff = yVertex[verticesOnCell] - yCell.reshape(nCells, 1) + + # which cells have vertices that are out of range? + xoutOfRange = numpy.logical_and(validVertices, + numpy.logical_or(xDiff > xperiod / 2.0, + xDiff < -xperiod / 2.0)) + youtOfRange = numpy.logical_and(validVertices, + numpy.logical_or(yDiff > yperiod / 2.0, + yDiff < -yperiod / 2.0)) + + xcellsOutOfRange = numpy.any(xoutOfRange, axis=1) + ycellsOutOfRange = numpy.any(youtOfRange, axis=1) + + xvalid = validVertices[xcellsOutOfRange, :] + yvalid = validVertices[ycellsOutOfRange, :] + + xverticesToChange = numpy.zeros(verticesOnCell.shape, bool) + xverticesToChange[xcellsOutOfRange, :] = xvalid + + yverticesToChange = numpy.zeros(verticesOnCell.shape, bool) + yverticesToChange[ycellsOutOfRange, :] = yvalid + + xDiff = xDiff[xcellsOutOfRange, :][xvalid] + yDiff = yDiff[ycellsOutOfRange, :][yvalid] + xvoc = verticesOnCell[xcellsOutOfRange, :][xvalid] + yvoc = verticesOnCell[ycellsOutOfRange, :][yvalid] + + xnVerticesToAdd = numpy.count_nonzero(xvalid) + ynVerticesToAdd = numpy.count_nonzero(yvalid) + + xverticesToAdd = numpy.arange(xnVerticesToAdd) + nVertices + xv = xVertex[xvoc] + verticesOnCell[xverticesToChange] = xverticesToAdd + + yverticesToAdd = numpy.arange(ynVerticesToAdd) + \ + (nVertices + xnVerticesToAdd) + yv = yVertex[yvoc] + verticesOnCell[yverticesToChange] = yverticesToAdd + + # need to shift points outside periodic domain (assumes that mesh is only within one period) + # can use mod if this is not the case in general + xmask = xDiff > xperiod / 2.0 + xv[xmask] -= xperiod + xmask = xDiff < -xperiod /2.0 + xv[xmask] += xperiod + ymask = yDiff > yperiod / 2.0 + yv[ymask] -= yperiod + ymask = yDiff < -yperiod / 2.0 + yv[ymask] += yperiod + + vertices = (numpy.append(numpy.append(vertices[0], xv), vertices[0][yvoc]), + numpy.append(numpy.append(vertices[1], vertices[1][xvoc]), yv), + numpy.append(numpy.append(vertices[2], vertices[2][xvoc]), vertices[2][yvoc])) + + return vertices, verticesOnCell # }}} # vim: set expandtab: From 5bcaea2adbb7abf2c97eb43f3845884aef72542c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 21 May 2018 12:40:59 +0200 Subject: [PATCH 045/180] Break _fix_periodic_vertices into 2 1D calls This makes the simpler and also fixes an issue where cells with vertices out of range in *both* periodic directions were not being handled correctly. --- .../paraview_vtk_field_extractor/utils.py | 111 +++++++++--------- 1 file changed, 55 insertions(+), 56 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index b278d134d..24f88772d 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -1009,69 +1009,68 @@ def _fix_lon_lat_vertices(vertices, verticesOnCell, validVertices, return vertices, verticesOnCell # }}} + def _fix_periodic_vertices(vertices, verticesOnCell, validVertices, - xCell, yCell, xperiod, yperiod): # {{{ + xCell, yCell, xperiod, yperiod): # {{{ + + vertices, verticesOnCell = _fix_periodic_vertices_1D( + vertices, verticesOnCell, validVertices, xCell, xperiod, dim=0) + vertices, verticesOnCell = _fix_periodic_vertices_1D( + vertices, verticesOnCell, validVertices, yCell, yperiod, dim=1) + + return vertices, verticesOnCell # }}} + + +def _fix_periodic_vertices_1D(vertices, verticesOnCell, validVertices, + coordCell, coordPeriod, dim): # {{{ nCells = verticesOnCell.shape[0] nVertices = len(vertices[0]) - xVertex = vertices[0] - yVertex = vertices[1] + coordVertex = vertices[dim] - xDiff = xVertex[verticesOnCell] - xCell.reshape(nCells, 1) - yDiff = yVertex[verticesOnCell] - yCell.reshape(nCells, 1) + coordDiff = coordVertex[verticesOnCell] - coordCell.reshape(nCells, 1) # which cells have vertices that are out of range? - xoutOfRange = numpy.logical_and(validVertices, - numpy.logical_or(xDiff > xperiod / 2.0, - xDiff < -xperiod / 2.0)) - youtOfRange = numpy.logical_and(validVertices, - numpy.logical_or(yDiff > yperiod / 2.0, - yDiff < -yperiod / 2.0)) - - xcellsOutOfRange = numpy.any(xoutOfRange, axis=1) - ycellsOutOfRange = numpy.any(youtOfRange, axis=1) - - xvalid = validVertices[xcellsOutOfRange, :] - yvalid = validVertices[ycellsOutOfRange, :] - - xverticesToChange = numpy.zeros(verticesOnCell.shape, bool) - xverticesToChange[xcellsOutOfRange, :] = xvalid - - yverticesToChange = numpy.zeros(verticesOnCell.shape, bool) - yverticesToChange[ycellsOutOfRange, :] = yvalid - - xDiff = xDiff[xcellsOutOfRange, :][xvalid] - yDiff = yDiff[ycellsOutOfRange, :][yvalid] - xvoc = verticesOnCell[xcellsOutOfRange, :][xvalid] - yvoc = verticesOnCell[ycellsOutOfRange, :][yvalid] - - xnVerticesToAdd = numpy.count_nonzero(xvalid) - ynVerticesToAdd = numpy.count_nonzero(yvalid) - - xverticesToAdd = numpy.arange(xnVerticesToAdd) + nVertices - xv = xVertex[xvoc] - verticesOnCell[xverticesToChange] = xverticesToAdd - - yverticesToAdd = numpy.arange(ynVerticesToAdd) + \ - (nVertices + xnVerticesToAdd) - yv = yVertex[yvoc] - verticesOnCell[yverticesToChange] = yverticesToAdd - - # need to shift points outside periodic domain (assumes that mesh is only within one period) - # can use mod if this is not the case in general - xmask = xDiff > xperiod / 2.0 - xv[xmask] -= xperiod - xmask = xDiff < -xperiod /2.0 - xv[xmask] += xperiod - ymask = yDiff > yperiod / 2.0 - yv[ymask] -= yperiod - ymask = yDiff < -yperiod / 2.0 - yv[ymask] += yperiod - - vertices = (numpy.append(numpy.append(vertices[0], xv), vertices[0][yvoc]), - numpy.append(numpy.append(vertices[1], vertices[1][xvoc]), yv), - numpy.append(numpy.append(vertices[2], vertices[2][xvoc]), vertices[2][yvoc])) + coordOutOfRange = numpy.logical_and( + validVertices, + numpy.logical_or(coordDiff > coordPeriod / 2.0, + coordDiff < -coordPeriod / 2.0)) + + coordCellsOutOfRange = numpy.any(coordOutOfRange, axis=1) + + coordValid = validVertices[coordCellsOutOfRange, :] + + coordVerticesToChange = numpy.zeros(verticesOnCell.shape, bool) + coordVerticesToChange[coordCellsOutOfRange, :] = coordValid + + coordDiff = coordDiff[coordCellsOutOfRange, :][coordValid] + coordVOC = verticesOnCell[coordCellsOutOfRange, :][coordValid] + + coordNVerticesToAdd = numpy.count_nonzero(coordValid) + + print coordNVerticesToAdd + + coordVerticesToAdd = numpy.arange(coordNVerticesToAdd) + nVertices + coordV = coordVertex[coordVOC] + verticesOnCell[coordVerticesToChange] = coordVerticesToAdd + + # need to shift points outside periodic domain (assumes that mesh is only + # within one period) can use mod if this is not the case in general + coordMask = coordDiff > coordPeriod / 2.0 + coordV[coordMask] -= coordPeriod + coordMask = coordDiff < -coordPeriod / 2.0 + coordV[coordMask] += coordPeriod + + outVertices = [] + for outDim in range(3): + if outDim == dim: + outVertices.append(numpy.append(vertices[outDim], coordV)) + else: + outVertices.append(numpy.append(vertices[outDim], + vertices[outDim][coordVOC])) + + return tuple(outVertices), verticesOnCell # }}} + - return vertices, verticesOnCell # }}} # vim: set expandtab: From 1cb6485a83c6e03f089a0f1fc9f0c73cbb892954 Mon Sep 17 00:00:00 2001 From: Divya Jaganathan Date: Fri, 15 Jun 2018 16:03:33 -0600 Subject: [PATCH 046/180] Add performance scripts --- .../call_to_performance_testing.py | 85 +++++++++++ .../generate_graph.info_with_wgts.py | 82 ++++++++++ .../performance_testing.py | 144 ++++++++++++++++++ ocean/performance_testing/plot_from_files.py | 83 ++++++++++ 4 files changed, 394 insertions(+) create mode 100644 ocean/performance_testing/call_to_performance_testing.py create mode 100644 ocean/performance_testing/generate_graph.info_with_wgts.py create mode 100644 ocean/performance_testing/performance_testing.py create mode 100644 ocean/performance_testing/plot_from_files.py diff --git a/ocean/performance_testing/call_to_performance_testing.py b/ocean/performance_testing/call_to_performance_testing.py new file mode 100644 index 000000000..5dd713d92 --- /dev/null +++ b/ocean/performance_testing/call_to_performance_testing.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python + +""" +Name: call_to_performance_testing.py +Author: Divya Jaganathan +Date: July 6, 2018 + +command format: python call_to_performance_testing.py -c -M -m -p + +""" +import subprocess +import argparse +import shlex +import numpy as np +import os + +parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument( + "-c", + "--cpu_type", + dest="cpu_type", + help="If cori, enter cori-haswell/cori-knl", + default=os.uname()[1][0:2]) +parser.add_argument( + "-M", + "--max_tasks", + dest="max_tasks", + help="Maximum number of tasks", + required=True) +parser.add_argument( + "-m", + "--min_tasks", + dest="min_tasks", + help="Minimum number of tasks", + default=2) +parser.add_argument( + "-r", + "--resolution", + dest="resolution", + help="Resolution ", + default="QU") +args = parser.parse_args() + +cpu_type = args.cpu_type +max_tasks = int(args.max_tasks) +min_tasks = int(args.min_tasks) +res = args.resolution + +job_id = res + "_perf_" + str(max_tasks) +output_name = "slurm_" + job_id + ".out" + +if cpu_type == 'gr': + cores_per_node = 36.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + if NODES_REQUIRED < 70: + qos = "interactive" + else: + qos = "standard" + runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_testing.py' %d %d %s %d" % ( + NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node) +elif cpu_type == 'cori-haswell': + cores_per_node = 32.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "sbatch -N %d -n %d -C haswell --qos=regular -J %s -o %s 'performance_testing.py' %d %d %s %d" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node) +elif cpu_type == 'cori-knl': + cores_per_node = 68.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "sbatch -N %d -n %d -C knl --qos=regular -J %s -o %s 'performance_testing.py' %d %d %s %d" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node) +elif cpu_type == 'ed': + cores_per_node = 24.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "sbatch -N %d -n %d --qos=debug -J %s -o %s 'performance_testing.py' %d %d %s %d" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node) +else: + print "Invalid Machine or have not mentioned haswell or knl on Cori" + + +s_args = shlex.split(runcommand) +print "running", ''.join(s_args) + +subprocess.check_call(s_args) diff --git a/ocean/performance_testing/generate_graph.info_with_wgts.py b/ocean/performance_testing/generate_graph.info_with_wgts.py new file mode 100644 index 000000000..864c23b16 --- /dev/null +++ b/ocean/performance_testing/generate_graph.info_with_wgts.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python + +""" +Name: generate_graph.info_with_wgts.py +Author: Divya Jaganathan +Date: 17 July, 2018 + +Assigns vertex weight to each horizontal cell in graph.info (in gpmetis format) +Reads: , +Writes: graph.info_with_wgts_ + +Flags(s) in call-command: + -x or --vertex_weight=, default=0.0 + -d or --data_file=, default=init.nc + -g or --graph_file=, default=graph.info + +""" + +import numpy as np +import netCDF4 as nc4 +from netCDF4 import MFDataset +import argparse + +parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument( + "-x", + "--vertex_weight", + dest="vertex_weight", + help="Exponent factor in the weighing function defining dependence on depth (maxLevelCell)", + default=0.0) + +parser.add_argument( + "-d", + "--data_file", + dest="data_filename", + help="File containing the maxLevelCell data (Default: init.nc)", + default="init.nc") + +parser.add_argument( + "-g", + "--graph_file", + dest="graph_filename", + help="Unweighed graph file (Default: graph.info)", + default="graph.info") + + +args = parser.parse_args() + +depth_dependence_factor_x = float(args.vertex_weight) +graph_filename = args.graph_filename +data_filename = args.data_filename + +file = MFDataset(data_filename) + +levels = file.variables['maxLevelCell'][:] + +minimum = np.amin(levels) + +ratio = np.divide(levels, minimum) +weights = np.ceil((np.float_power(ratio, depth_dependence_factor_x))) +weights = weights.astype(int) +file.close() + +filename = "graph.info_with_wgts_" + str(depth_dependence_factor_x) +fr = open(graph_filename, 'r') +fw = open(filename, 'w') + +counter = -1 + +for line in fr: + if counter == -1: + temp = line.split("\n", 1)[0] + fw.write("%s 010 \n" % temp) + else: + temp = line.split("\n", 1)[0] + fw.write("%d %s \n" % (weights[counter], temp)) + counter = counter + 1 + +fr.close() +fw.close() diff --git a/ocean/performance_testing/performance_testing.py b/ocean/performance_testing/performance_testing.py new file mode 100644 index 000000000..825fdc439 --- /dev/null +++ b/ocean/performance_testing/performance_testing.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python + +""" + Name: performance_testing.py + Author: Divya Jaganathan + Date: 6 July, 2018 + +""" + + +import subprocess +import numpy as np +import re +import sys +import datetime +from time import strftime +import matplotlib +matplotlib.use('Agg') +import matplotlib.pyplot as plt +import os +import shlex + +os.environ['OMP_NUM_THREADS'] = '1' +os.environ['OMP_PLACES'] = 'threads' + +timenow = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + +time_fr = open("namelist.ocean", 'r') + +for line in time_fr: + m1 = re.search("config_run_duration", line) + if m1: + parts = line.split("=", 1)[1] + subparts = re.split(':|\'|_', parts) + timeparts = subparts[2:5] + dateparts = re.split('-', subparts[1]) + if len(dateparts) == 1: + simulated_time_in_sec = int( + timeparts[0]) * 60 * 60 + int(timeparts[1]) * 60 + int(timeparts[2]) + else: + simulated_time_in_sec = int(dateparts[2]) * 24 * 60 * 60 + int( + timeparts[0]) * 60 * 60 + int(timeparts[1]) * 60 + int(timeparts[2]) + +time_fr.close() + +cells_fr = open("graph.info", 'r') +cells = str(cells_fr.readline().split(" ")[0]) + +nprocs_max = int(sys.argv[1]) +nprocs_min = int(sys.argv[2]) +cpu_type = sys.argv[3] +cores_per_node = float(sys.argv[4]) + +plane_size = str(int(cores_per_node)) + +niter = int(np.log2(nprocs_max)) - int(np.log2(nprocs_min)) + 1 +nsamples_per_procnum = 5 + +time = np.zeros(shape=(1, niter)) +procs = np.zeros(shape=(1, niter)) +SYPD = np.zeros(shape=(1, niter)) + +i = nprocs_max +j = niter + +writefilename = "data_" + cpu_type + "_" + \ + str(nprocs_max) + "_" + timenow + ".txt" +fw = open(writefilename, 'a+') +fw.write( + 'Time: %s \nMachine: %s\nNo. of Cells: %s\nRun duration: %s\nRun time in sec: %d\nFormat: #Procs|Sample Runs|Average|SYPD \n' % + (timenow, cpu_type, cells, timeparts, simulated_time_in_sec)) + +while i >= nprocs_min: + + local_N = int(np.ceil(i / cores_per_node)) + sample = nsamples_per_procnum + foldername = "perf_p" + str(i) + "_gr_openmpi" + subprocess.check_call(['rm', '-rf', foldername]) + subprocess.check_call(['mkdir', foldername]) + fw.write('%s \t' % i) + sum = 0 + + # Generate the log and graph files + subprocess.check_call(['./metis', 'graph.info', str(i)]) + print "metis" + str(i) + "completed" + + while sample >= 1: + args = ['srun', + '-N', + str(local_N), + '-n', + str(i), + '--cpu_bind=verbose,core', + '--distribution=plane=%s' % plane_size, + './ocean_model'] + print "running", ''.join(args) + subprocess.check_call(args) + + # Search for time integration and write to a file + fr = open("log.ocean.0000.out", 'r') + for line in fr: + m = re.search("2 time integration", line) + if m: + numbers = line.split("integration", 1)[1] + first_number = numbers.split()[0] + fw.write('%s \t' % first_number) + sum = sum + float(first_number) + + fname = "log_p" + str(i) + "_s" + str(sample) + filepath = foldername + "/" + fname + sample = sample - 1 + subprocess.check_call(['mv', 'log.ocean.0000.out', filepath]) + + average = sum / nsamples_per_procnum + time[0][j - 1] = average + procs[0][j - 1] = i + SYPD[0][j - 1] = simulated_time_in_sec / (365 * average) + fw.write('%s \t %s\n' % (str(average), str(SYPD[0][j - 1]))) + i = i / 2 + j = j - 1 + +# plotting .. + +subprocess.check_call(['mkdir', '-p', 'data_figures']) + +perfect = SYPD[0][0] / procs[0][0] * procs +plt.loglog(procs[0], SYPD[0], '-or', label='grizzly') +plt.loglog(procs[0], perfect[0], '--k', label='perfect scaling') +plt.title(r'MPAS-Ocean Performance Curve (Broadwell 36-cores No HT)') +plt.xlabel('Number of MPI ranks') +plt.ylabel('Simulated Years Per Day (SYPD)') +plt.legend(loc='upper left') +plt.grid() +plt.xlim((1, nprocs_max * 2)) +plt.tight_layout() +figurenamepath = "data_figures/fig_" + cpu_type + \ + str(nprocs_max) + "_" + timenow + ".png" +plt.savefig(figurenamepath) +subprocess.check_call(['mv', writefilename, 'data_figures']) + +fr.close() +fw.close() + +# End Version diff --git a/ocean/performance_testing/plot_from_files.py b/ocean/performance_testing/plot_from_files.py new file mode 100644 index 000000000..b2998315e --- /dev/null +++ b/ocean/performance_testing/plot_from_files.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python + +""" +Name: plot_from_files.py +Author: Divya Jaganathan +Date: 26 July, 2018 + +Plots a single plot of different performance curves from different performance_data text files in a folder + +""" + +import glob +import matplotlib +matplotlib.use('agg') +import numpy as np +import matplotlib.pyplot as plt +import subprocess + +path = "/lustre/scratch2/turquoise/divjag2005/case_runs/performance_results_4096/*.txt" + +files = glob.glob(path) +files = [files[4], files[0], files[1], files[3], files[2]] +num_files = len(files) +print(num_files) + +no_res_in_a_file = 9 +file_counter = 0 + +array_x = np.zeros(shape=(num_files, no_res_in_a_file)) +array_y = np.zeros(shape=(num_files, no_res_in_a_file)) + +colors = ["g", "k", "m", "r", "b"] +labels = [ + "uniform 60km", + "variable 60to30km", + "uniform 30km", + "variable 60to15km", + "uniform 15km"] + + +for file in files: + + f = open(file, 'r') + ob = f.read().split('\n') + num_lines = len(ob) - 1 + line_counter = 6 + i = 0 + rank_column = 0 + SYPD_column = 7 + + while line_counter < num_lines: + array_x[file_counter][i] = ob[line_counter].split('\t')[rank_column] + array_y[file_counter][i] = ob[line_counter].split('\t')[SYPD_column] + line_counter = line_counter + 1 + i = i + 1 + + font = {'weight': 'bold', + 'size': '14'} + + matplotlib.rc('font', **font) + plt.loglog(array_x[file_counter][0:i - + 1], array_y[file_counter][0:i - + 1], '-o', color=colors[file_counter], label="%s" % + labels[file_counter]) + perfect = (array_y[file_counter][i - 1] / + array_x[file_counter][i - 1]) * array_x[file_counter][0:i - 1] + plt.loglog( + array_x[file_counter][0:i - 1], + perfect, + '--', + color=colors[file_counter]) + file_counter = file_counter + 1 + f.close() + +plt.xlabel('Number of MPI ranks', fontsize=14, weight='bold') +plt.ylabel('SYPD', fontsize=14, weight='bold') +plt.title(' 36 Core Broadwell (No HT)', fontsize=14, weight='bold') +plt.xlim((10, 10000)) +plt.ylim((0.05, 4000)) +plt.tight_layout() +plt.grid() +plt.legend(title='resolution', loc='upper left') +plt.savefig('result.png') From 6476995af386e079ea20b09a1c21697312dd374e Mon Sep 17 00:00:00 2001 From: Divya Jaganathan Date: Mon, 6 Aug 2018 11:34:24 -0600 Subject: [PATCH 047/180] Included Comments --- .../call_to_performance_testing.py | 10 +++++-- .../generate_graph.info_with_wgts.py | 2 ++ .../performance_testing.py | 26 ++++++++++++++++++- 3 files changed, 35 insertions(+), 3 deletions(-) diff --git a/ocean/performance_testing/call_to_performance_testing.py b/ocean/performance_testing/call_to_performance_testing.py index 5dd713d92..c8344a76e 100644 --- a/ocean/performance_testing/call_to_performance_testing.py +++ b/ocean/performance_testing/call_to_performance_testing.py @@ -5,7 +5,9 @@ Author: Divya Jaganathan Date: July 6, 2018 -command format: python call_to_performance_testing.py -c -M -m -p +Submits request for a batch job to carry out successive performance runs starting from maximum number of tasks + +command format: python call_to_performance_testing.py -c -M -m -r """ import subprocess @@ -51,6 +53,10 @@ job_id = res + "_perf_" + str(max_tasks) output_name = "slurm_" + job_id + ".out" +# NODES_REQUIRED to request for resources is calculated assuming no hyperthreads. +# Changes to this can be implemented by changing cores_per_node specific +# to the machine + if cpu_type == 'gr': cores_per_node = 36.0 NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) @@ -76,7 +82,7 @@ runcommand = "sbatch -N %d -n %d --qos=debug -J %s -o %s 'performance_testing.py' %d %d %s %d" % ( NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node) else: - print "Invalid Machine or have not mentioned haswell or knl on Cori" + print "Invalid machine or have not mentioned haswell or knl on Cori" s_args = shlex.split(runcommand) diff --git a/ocean/performance_testing/generate_graph.info_with_wgts.py b/ocean/performance_testing/generate_graph.info_with_wgts.py index 864c23b16..912aeac4d 100644 --- a/ocean/performance_testing/generate_graph.info_with_wgts.py +++ b/ocean/performance_testing/generate_graph.info_with_wgts.py @@ -72,6 +72,8 @@ for line in fr: if counter == -1: temp = line.split("\n", 1)[0] + # 010 indicates that the graph.info file is formatted to include the + # cell weights fw.write("%s 010 \n" % temp) else: temp = line.split("\n", 1)[0] diff --git a/ocean/performance_testing/performance_testing.py b/ocean/performance_testing/performance_testing.py index 825fdc439..1297f7cf8 100644 --- a/ocean/performance_testing/performance_testing.py +++ b/ocean/performance_testing/performance_testing.py @@ -5,6 +5,20 @@ Author: Divya Jaganathan Date: 6 July, 2018 +This script is automatically called by call_to_performance_testing.py to run a batch job to get performance plots and data + +This script can also be used for an interactive job submission using the following command format: + +command format (to run an interactive job) : python performance_testing.py + +Access files required to run this script: + 1. namelist.ocean + 2. graph.info + 3. metis file (rename gpmetis to metis or vice-versa in this script when creating a soft link) + 4. ocean_model (executable file) + +NOTE: When running a large number of tasks (>10k), check the name of log.ocean.0000.out file generated - no. of zeros in the file name changes + """ @@ -20,11 +34,15 @@ import os import shlex +# Setting OMP variables for NO multithreading + os.environ['OMP_NUM_THREADS'] = '1' os.environ['OMP_PLACES'] = 'threads' timenow = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") +# To obtain the run duration used in calculating SYPD + time_fr = open("namelist.ocean", 'r') for line in time_fr: @@ -43,16 +61,22 @@ time_fr.close() +# To store the details on number of cells (~ resolution) + cells_fr = open("graph.info", 'r') cells = str(cells_fr.readline().split(" ")[0]) +cells_fr.close() nprocs_max = int(sys.argv[1]) nprocs_min = int(sys.argv[2]) cpu_type = sys.argv[3] cores_per_node = float(sys.argv[4]) +# plane_size is used to define the plane_distribution flag in srun plane_size = str(int(cores_per_node)) +# Performance data evaluation begins here - + niter = int(np.log2(nprocs_max)) - int(np.log2(nprocs_min)) + 1 nsamples_per_procnum = 5 @@ -141,4 +165,4 @@ fr.close() fw.close() -# End Version +# End From e7bc580d3639cfc0468766d6a49a873c1a41330b Mon Sep 17 00:00:00 2001 From: Divya Jaganathan Date: Mon, 6 Aug 2018 15:28:24 -0600 Subject: [PATCH 048/180] Included files for generating weighted graph.info files and subsequently do performance tests --- .../weights/call_to_performance_testing.py | 101 ++++++++++ .../weights/generate_graph.info_with_wgts.py | 84 ++++++++ .../weights/performance_testing.py | 181 ++++++++++++++++++ 3 files changed, 366 insertions(+) create mode 100644 ocean/performance_testing/weights/call_to_performance_testing.py create mode 100644 ocean/performance_testing/weights/generate_graph.info_with_wgts.py create mode 100644 ocean/performance_testing/weights/performance_testing.py diff --git a/ocean/performance_testing/weights/call_to_performance_testing.py b/ocean/performance_testing/weights/call_to_performance_testing.py new file mode 100644 index 000000000..40bd14fd2 --- /dev/null +++ b/ocean/performance_testing/weights/call_to_performance_testing.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python + +""" +Name: call_to_performance_testing.py +Author: Divya Jaganathan +Date: July 6, 2018 + +Submits request for a batch job to carry out successive performance runs starting from maximum number of tasks + +command format: python call_to_performance_testing.py -c -M -m -r -x + + +""" +import subprocess +import argparse +import shlex +import numpy as np +import os + +parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument( + "-c", + "--cpu_type", + dest="cpu_type", + help="If cori, enter cori-haswell/cori-knl", + default=os.uname()[1][0:2]) +parser.add_argument( + "-M", + "--max_tasks", + dest="max_tasks", + help="Maximum number of tasks", + required=True) +parser.add_argument( + "-m", + "--min_tasks", + dest="min_tasks", + help="Minimum number of tasks", + default=2) +parser.add_argument( + "-r", + "--resolution", + dest="resolution", + help="Resolution ", + default="QU") +parser.add_argument( + "-x", + "--max_vertex_weight", + dest="max_vertex_weight", + help="Maximum Vertex Weight, x", + default="0.0") + +args = parser.parse_args() + +cpu_type = args.cpu_type +max_tasks = int(args.max_tasks) +min_tasks = int(args.min_tasks) +res = args.resolution +x = args.max_vertex_weight + +job_id = res + "_perf_" + str(max_tasks) +output_name = "slurm_" + job_id + ".out" + + +# NODES_REQUIRED to request for resources is calculated assuming no hyperthreads. +# Changes to this can be implemented by changing cores_per_node specific +# to the machine + +if cpu_type == 'gr': + cores_per_node = 36.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + if NODES_REQUIRED < 70: + qos = "interactive" + else: + qos = "standard" + runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node, x) +elif cpu_type == 'cori-haswell': + cores_per_node = 32.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "sbatch -N %d -n %d -C haswell --qos=regular -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node, x) +elif cpu_type == 'cori-knl': + cores_per_node = 68.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "sbatch -N %d -n %d -C knl --qos=regular -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node, x) +elif cpu_type == 'ed': + cores_per_node = 24.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "sbatch -N %d -n %d --qos=debug -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node, x) +else: + print "Invalid machine or have not mentioned haswell or knl on Cori" + + +s_args = shlex.split(runcommand) +print "running", ''.join(s_args) + +subprocess.check_call(s_args) diff --git a/ocean/performance_testing/weights/generate_graph.info_with_wgts.py b/ocean/performance_testing/weights/generate_graph.info_with_wgts.py new file mode 100644 index 000000000..912aeac4d --- /dev/null +++ b/ocean/performance_testing/weights/generate_graph.info_with_wgts.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python + +""" +Name: generate_graph.info_with_wgts.py +Author: Divya Jaganathan +Date: 17 July, 2018 + +Assigns vertex weight to each horizontal cell in graph.info (in gpmetis format) +Reads: , +Writes: graph.info_with_wgts_ + +Flags(s) in call-command: + -x or --vertex_weight=, default=0.0 + -d or --data_file=, default=init.nc + -g or --graph_file=, default=graph.info + +""" + +import numpy as np +import netCDF4 as nc4 +from netCDF4 import MFDataset +import argparse + +parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument( + "-x", + "--vertex_weight", + dest="vertex_weight", + help="Exponent factor in the weighing function defining dependence on depth (maxLevelCell)", + default=0.0) + +parser.add_argument( + "-d", + "--data_file", + dest="data_filename", + help="File containing the maxLevelCell data (Default: init.nc)", + default="init.nc") + +parser.add_argument( + "-g", + "--graph_file", + dest="graph_filename", + help="Unweighed graph file (Default: graph.info)", + default="graph.info") + + +args = parser.parse_args() + +depth_dependence_factor_x = float(args.vertex_weight) +graph_filename = args.graph_filename +data_filename = args.data_filename + +file = MFDataset(data_filename) + +levels = file.variables['maxLevelCell'][:] + +minimum = np.amin(levels) + +ratio = np.divide(levels, minimum) +weights = np.ceil((np.float_power(ratio, depth_dependence_factor_x))) +weights = weights.astype(int) +file.close() + +filename = "graph.info_with_wgts_" + str(depth_dependence_factor_x) +fr = open(graph_filename, 'r') +fw = open(filename, 'w') + +counter = -1 + +for line in fr: + if counter == -1: + temp = line.split("\n", 1)[0] + # 010 indicates that the graph.info file is formatted to include the + # cell weights + fw.write("%s 010 \n" % temp) + else: + temp = line.split("\n", 1)[0] + fw.write("%d %s \n" % (weights[counter], temp)) + counter = counter + 1 + +fr.close() +fw.close() diff --git a/ocean/performance_testing/weights/performance_testing.py b/ocean/performance_testing/weights/performance_testing.py new file mode 100644 index 000000000..582d7a36d --- /dev/null +++ b/ocean/performance_testing/weights/performance_testing.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python + +""" + Name: performance_testing.py + Author: Divya Jaganathan + Date: 6 July, 2018 + +This script is automatically called by call_to_performance_testing.py to run a batch job to get performance plots and data + +Access files required to run this script: + 1. namelist.ocean + 2. graph.info + 3. metis file (rename gpmetis to metis or vice-versa in this script when creating a soft link) + 4. ocean_model (executable file) + +NOTE: When running a large number of tasks (>10k), check the name of log.ocean.0000.out file generated - no. of zeros in the file name changes + +""" + + +import subprocess +import numpy as np +import re +import sys +import datetime +from time import strftime +import matplotlib +matplotlib.use('Agg') +import matplotlib.pyplot as plt +import os +import shlex + +# Setting OMP variables for NO multithreading + +os.environ['OMP_NUM_THREADS'] = '1' +os.environ['OMP_PLACES'] = 'threads' + +timenow = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + +# To obtain the run duration used in calculating SYPD + +time_fr = open("namelist.ocean", 'r') + +for line in time_fr: + m1 = re.search("config_run_duration", line) + if m1: + parts = line.split("=", 1)[1] + subparts = re.split(':|\'|_', parts) + timeparts = subparts[2:5] + dateparts = re.split('-', subparts[1]) + if len(dateparts) == 1: + simulated_time_in_sec = int( + timeparts[0]) * 60 * 60 + int(timeparts[1]) * 60 + int(timeparts[2]) + else: + simulated_time_in_sec = int(dateparts[2]) * 24 * 60 * 60 + int( + timeparts[0]) * 60 * 60 + int(timeparts[1]) * 60 + int(timeparts[2]) + +time_fr.close() + +# To store the details on number of cells (~ resolution) + +cells_fr = open("graph.info", 'r') +cells = str(cells_fr.readline().split(" ")[0]) +cells_fr.close() + +nprocs_max = int(sys.argv[1]) +nprocs_min = int(sys.argv[2]) +cpu_type = sys.argv[3] +cores_per_node = float(sys.argv[4]) +x = float(sys.argv[5]) + +# plane_size is used to define the plane_distribution flag in srun +plane_size = str(int(cores_per_node)) + +# Performance data evaluation begins here - + +niter = int(np.log2(nprocs_max)) - int(np.log2(nprocs_min)) + 1 +nsamples_per_procnum = 5 + +time = np.zeros(shape=(1, niter)) +procs = np.zeros(shape=(1, niter)) +SYPD = np.zeros(shape=(1, niter)) + +#i = nprocs_max +#j = niter + +writefilename = "data_" + cpu_type + "_" + \ + str(nprocs_max) + "_" + timenow + ".txt" +fw = open(writefilename, 'a+') +fw.write( + 'Time: %s \nMachine: %s\nNo. of Cells: %s\nRun duration: %s\nRun time in sec: %d\nFormat: #Procs|Sample Runs|Average|SYPD \n' % + (timenow, cpu_type, cells, timeparts, simulated_time_in_sec)) + + +while x>=0: + + graph_call = "python generate_graph.info_with_wgts.py -d init.nc -g graph.info -x %s" % x + g_args=shlex.split(graph_call) + print "running", ''.join(g_args) + subprocess.check_call(g_args) + + foldername_wgt = "weight"+str(x) + subprocess.check_call(['mkdir', foldername_wgt]) + graph_filename = "graph.info_with_wgts_"+str(x) + i = nprocs_max + j = niter + + while i >= nprocs_min: + + local_N = int(np.ceil(i / cores_per_node)) + sample = nsamples_per_procnum + foldername = foldername_wgt+ "/perf_p" + str(i) + "_gr_openmpi" + subprocess.check_call(['mkdir', '-p', foldername]) + fw.write('%s \t' % i) + sum = 0 + subprocess.check_call(['./metis', graph_filename, str(i)]) + print "metis" + str(i) + "completed" + graph_part_name = graph_filename + ".part."+str(i) + to_name="graph.info.part."+str(i) + subprocess.check_call(['mv',graph_part_name, to_name]) + + while sample >= 1: + args = ['srun', + '-N', + str(local_N), + '-n', + str(i), + '--cpu_bind=verbose,core', + '--distribution=plane=%s' % plane_size, + './ocean_model'] + print "running", ''.join(args) + subprocess.check_call(args) + + # Search for time integration and write to a file + fr = open("log.ocean.0000.out", 'r') + for line in fr: + m = re.search("2 time integration", line) + if m: + numbers = line.split("integration", 1)[1] + first_number = numbers.split()[0] + fw.write('%s \t' % first_number) + sum = sum + float(first_number) + + fname = "log_p" + str(i) + "_s" + str(sample) + filepath = foldername + "/" + fname + sample = sample - 1 + subprocess.check_call(['mv', 'log.ocean.0000.out', filepath]) + + average = sum / nsamples_per_procnum + time[0][j - 1] = average + procs[0][j - 1] = i + SYPD[0][j - 1] = simulated_time_in_sec / (365 * average) + fw.write('%s \t %s\n' % (str(average), str(SYPD[0][j - 1]))) + i = i / 2 + j = j - 1 + subprocess.check_call(['mv',to_name,foldername]) + x=x-0.5 + +# plotting .. + +subprocess.check_call(['mkdir', '-p', 'data_figures']) + +perfect = SYPD[0][0] / procs[0][0] * procs +plt.loglog(procs[0], SYPD[0], '-or', label=str(x)) +plt.loglog(procs[0], perfect[0], '--k', label='perfect scaling') +plt.title(r'MPAS-Ocean Performance Curve (Broadwell 36-cores No HT)') +plt.xlabel('Number of MPI ranks') +plt.ylabel('Simulated Years Per Day (SYPD)') +plt.legend(loc='upper left') +plt.grid() +plt.xlim((1, nprocs_max * 2)) +plt.tight_layout() +figurenamepath = "data_figures/fig_" + cpu_type + \ + str(nprocs_max) + "_" + timenow + ".png" +plt.savefig(figurenamepath) +subprocess.check_call(['mv', writefilename, 'data_figures']) + +fr.close() +fw.close() + +# End From d79dcb6c49e3f01d6ba3c096168f6aa085fca68e Mon Sep 17 00:00:00 2001 From: "Phillip J. Wolfram" Date: Wed, 8 Aug 2018 13:50:35 -0600 Subject: [PATCH 049/180] Allows extraction of single dimension time coord xtime is typically a 2D string. timeMonthly_avg_daysSinceStartOfSim in contrast is a float corresponding to days of simulation. This commit allows extraction of fields with timeMonthly_avg_daysSinceStartOfSim. --- .../paraview_vtk_field_extractor.py | 17 +++++++++++------ .../paraview_vtk_field_extractor/utils.py | 10 +++++++--- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index 0b222e43e..74ca0f6d5 100755 --- a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -196,12 +196,17 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, raise ValueError("xtime variable name {} not found in " "{}".format(xtimeName, time_series_file)) var = time_series_file.variables[xtimeName] - xtime = ''.join(var[local_time_indices[time_index], :]).strip() - date = datetime(int(xtime[0:4]), int(xtime[5:7]), - int(xtime[8:10]), int(xtime[11:13]), - int(xtime[14:16]), int(xtime[17:19])) - years = date2num(date, units='days since 0000-01-01', - calendar='noleap')/365. + if len(var.shape) == 2: + xtime = ''.join(var[local_time_indices[time_index], :]).strip() + date = datetime(int(xtime[0:4]), int(xtime[5:7]), + int(xtime[8:10]), int(xtime[11:13]), + int(xtime[14:16]), int(xtime[17:19])) + years = date2num(date, units='days since 0000-01-01', + calendar='noleap')/365. + else: + xtime = var[local_time_indices[time_index]] + years = xtime/365. + xtime = str(xtime) # write the header for the vtp file vtp_file_prefix = "time_series/{}.{:d}".format(out_prefix, diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index 24f88772d..76be7cc06 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -96,9 +96,13 @@ def setup_time_indices(fn_pattern, xtimeName): # {{{ raise ValueError("xtime variable name {} not found in " "{}".format(xtimeName, file_name)) local_times = [] - xtime = nc_file.variables[xtimeName][:, :] - for index in range(xtime.shape[0]): - local_times.append(''.join(xtime[index, :])) + xtime = nc_file.variables[xtimeName] + if len(xtime.shape) == 2: + xtime = xtime[:, :] + for index in range(xtime.shape[0]): + local_times.append(''.join(xtime[index, :])) + else: + local_times = xtime[:] if(len(local_times) == 0): local_times = ['0'] From 5a101e4dde0b90fe7ff06e0219bd1bb3a33e8d3a Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 9 Aug 2018 17:28:17 +0200 Subject: [PATCH 050/180] Fix checks for "is_periodic" These checks should only be peformed for meshes for which `on_a_sphere == 'No'`, since we do not write out `is_periodic` for meshes on the sphere. --- visualization/paraview_vtk_field_extractor/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index 76be7cc06..94f87b3aa 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -615,7 +615,7 @@ def build_cell_geom_lists(nc_file, output_32bit, lonlat): # {{{ validVertices, lonCell) - if nc_file.is_periodic == 'YES': + if nc_file.on_a_sphere == 'NO' and nc_file.is_periodic == 'YES': if lonlat: xcoord = lonCell ycoord = latCell @@ -661,7 +661,7 @@ def build_vertex_geom_lists(nc_file, output_32bit, lonlat): # {{{ validVertices, lonVertex[valid_mask]) - if nc_file.is_periodic == 'YES': + if nc_file.on_a_sphere == 'NO' and nc_file.is_periodic == 'YES': # all remaining entries in cellsOnVertex are valid validVertices = numpy.ones(cellsOnVertex.shape, bool) if lonlat: @@ -727,7 +727,7 @@ def build_edge_geom_lists(nc_file, output_32bit, lonlat): # {{{ vertsOnCell, validVerts, lonEdge[valid_mask]) - if nc_file.is_periodic == 'YES': + if nc_file.on_a_sphere == 'NO' and nc_file.is_periodic == 'YES': if lonlat: xcoord = lonEdge[valid_mask] ycoord = latEdge[valid_mask] From cf03ffaf425b07b466b81f196cbcaa82676939f4 Mon Sep 17 00:00:00 2001 From: mark-petersen Date: Thu, 23 Aug 2018 15:56:58 -0600 Subject: [PATCH 051/180] Apply autopep8 formatting --- .../weights/performance_testing.py | 116 +++++++++--------- 1 file changed, 58 insertions(+), 58 deletions(-) diff --git a/ocean/performance_testing/weights/performance_testing.py b/ocean/performance_testing/weights/performance_testing.py index 582d7a36d..6aa840c9b 100644 --- a/ocean/performance_testing/weights/performance_testing.py +++ b/ocean/performance_testing/weights/performance_testing.py @@ -92,69 +92,69 @@ (timenow, cpu_type, cells, timeparts, simulated_time_in_sec)) -while x>=0: - - graph_call = "python generate_graph.info_with_wgts.py -d init.nc -g graph.info -x %s" % x - g_args=shlex.split(graph_call) - print "running", ''.join(g_args) - subprocess.check_call(g_args) - - foldername_wgt = "weight"+str(x) - subprocess.check_call(['mkdir', foldername_wgt]) - graph_filename = "graph.info_with_wgts_"+str(x) - i = nprocs_max - j = niter - - while i >= nprocs_min: - - local_N = int(np.ceil(i / cores_per_node)) - sample = nsamples_per_procnum - foldername = foldername_wgt+ "/perf_p" + str(i) + "_gr_openmpi" - subprocess.check_call(['mkdir', '-p', foldername]) - fw.write('%s \t' % i) - sum = 0 - subprocess.check_call(['./metis', graph_filename, str(i)]) - print "metis" + str(i) + "completed" - graph_part_name = graph_filename + ".part."+str(i) - to_name="graph.info.part."+str(i) - subprocess.check_call(['mv',graph_part_name, to_name]) - - while sample >= 1: - args = ['srun', - '-N', - str(local_N), - '-n', - str(i), - '--cpu_bind=verbose,core', - '--distribution=plane=%s' % plane_size, - './ocean_model'] - print "running", ''.join(args) - subprocess.check_call(args) - - # Search for time integration and write to a file - fr = open("log.ocean.0000.out", 'r') - for line in fr: - m = re.search("2 time integration", line) - if m: +while x >= 0: + + graph_call = "python generate_graph.info_with_wgts.py -d init.nc -g graph.info -x %s" % x + g_args = shlex.split(graph_call) + print "running", ''.join(g_args) + subprocess.check_call(g_args) + + foldername_wgt = "weight" + str(x) + subprocess.check_call(['mkdir', foldername_wgt]) + graph_filename = "graph.info_with_wgts_" + str(x) + i = nprocs_max + j = niter + + while i >= nprocs_min: + + local_N = int(np.ceil(i / cores_per_node)) + sample = nsamples_per_procnum + foldername = foldername_wgt + "/perf_p" + str(i) + "_gr_openmpi" + subprocess.check_call(['mkdir', '-p', foldername]) + fw.write('%s \t' % i) + sum = 0 + subprocess.check_call(['./metis', graph_filename, str(i)]) + print "metis" + str(i) + "completed" + graph_part_name = graph_filename + ".part." + str(i) + to_name = "graph.info.part." + str(i) + subprocess.check_call(['mv', graph_part_name, to_name]) + + while sample >= 1: + args = ['srun', + '-N', + str(local_N), + '-n', + str(i), + '--cpu_bind=verbose,core', + '--distribution=plane=%s' % plane_size, + './ocean_model'] + print "running", ''.join(args) + subprocess.check_call(args) + + # Search for time integration and write to a file + fr = open("log.ocean.0000.out", 'r') + for line in fr: + m = re.search("2 time integration", line) + if m: numbers = line.split("integration", 1)[1] first_number = numbers.split()[0] fw.write('%s \t' % first_number) sum = sum + float(first_number) - fname = "log_p" + str(i) + "_s" + str(sample) - filepath = foldername + "/" + fname - sample = sample - 1 - subprocess.check_call(['mv', 'log.ocean.0000.out', filepath]) - - average = sum / nsamples_per_procnum - time[0][j - 1] = average - procs[0][j - 1] = i - SYPD[0][j - 1] = simulated_time_in_sec / (365 * average) - fw.write('%s \t %s\n' % (str(average), str(SYPD[0][j - 1]))) - i = i / 2 - j = j - 1 - subprocess.check_call(['mv',to_name,foldername]) - x=x-0.5 + fname = "log_p" + str(i) + "_s" + str(sample) + filepath = foldername + "/" + fname + sample = sample - 1 + subprocess.check_call(['mv', 'log.ocean.0000.out', filepath]) + + average = sum / nsamples_per_procnum + time[0][j - 1] = average + procs[0][j - 1] = i + SYPD[0][j - 1] = simulated_time_in_sec / (365 * average) + fw.write('%s \t %s\n' % (str(average), str(SYPD[0][j - 1]))) + i = i / 2 + j = j - 1 + subprocess.check_call(['mv', to_name, foldername]) + x = x - 0.5 # plotting .. From 70da9eaaf6f28f01a8ec2253dc5d76331dca8f51 Mon Sep 17 00:00:00 2001 From: Mark Petersen Date: Fri, 24 Aug 2018 07:36:53 -0600 Subject: [PATCH 052/180] Remove copy of file to generate weights --- .../generate_graph.info_with_wgts.py | 84 ------------------- 1 file changed, 84 deletions(-) delete mode 100644 ocean/performance_testing/generate_graph.info_with_wgts.py diff --git a/ocean/performance_testing/generate_graph.info_with_wgts.py b/ocean/performance_testing/generate_graph.info_with_wgts.py deleted file mode 100644 index 912aeac4d..000000000 --- a/ocean/performance_testing/generate_graph.info_with_wgts.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python - -""" -Name: generate_graph.info_with_wgts.py -Author: Divya Jaganathan -Date: 17 July, 2018 - -Assigns vertex weight to each horizontal cell in graph.info (in gpmetis format) -Reads: , -Writes: graph.info_with_wgts_ - -Flags(s) in call-command: - -x or --vertex_weight=, default=0.0 - -d or --data_file=, default=init.nc - -g or --graph_file=, default=graph.info - -""" - -import numpy as np -import netCDF4 as nc4 -from netCDF4 import MFDataset -import argparse - -parser = \ - argparse.ArgumentParser(description=__doc__, - formatter_class=argparse.RawTextHelpFormatter) -parser.add_argument( - "-x", - "--vertex_weight", - dest="vertex_weight", - help="Exponent factor in the weighing function defining dependence on depth (maxLevelCell)", - default=0.0) - -parser.add_argument( - "-d", - "--data_file", - dest="data_filename", - help="File containing the maxLevelCell data (Default: init.nc)", - default="init.nc") - -parser.add_argument( - "-g", - "--graph_file", - dest="graph_filename", - help="Unweighed graph file (Default: graph.info)", - default="graph.info") - - -args = parser.parse_args() - -depth_dependence_factor_x = float(args.vertex_weight) -graph_filename = args.graph_filename -data_filename = args.data_filename - -file = MFDataset(data_filename) - -levels = file.variables['maxLevelCell'][:] - -minimum = np.amin(levels) - -ratio = np.divide(levels, minimum) -weights = np.ceil((np.float_power(ratio, depth_dependence_factor_x))) -weights = weights.astype(int) -file.close() - -filename = "graph.info_with_wgts_" + str(depth_dependence_factor_x) -fr = open(graph_filename, 'r') -fw = open(filename, 'w') - -counter = -1 - -for line in fr: - if counter == -1: - temp = line.split("\n", 1)[0] - # 010 indicates that the graph.info file is formatted to include the - # cell weights - fw.write("%s 010 \n" % temp) - else: - temp = line.split("\n", 1)[0] - fw.write("%d %s \n" % (weights[counter], temp)) - counter = counter + 1 - -fr.close() -fw.close() From 8d2ecd31e46dc5990059ba4951e8292446616a9f Mon Sep 17 00:00:00 2001 From: "Phillip J. Wolfram" Date: Fri, 24 Aug 2018 14:10:34 -0600 Subject: [PATCH 053/180] Fixes lat-lon coordinates for MPAS-O output Fixes issue where non-used (effectively masked) verticesOnCell values differ between 0 and nVertices, which is produced by MPAS-O. The existing script assumes these values must always be 0 and this commit ensures that assumption. --- visualization/paraview_vtk_field_extractor/utils.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index 94f87b3aa..01b431a96 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -604,6 +604,9 @@ def build_cell_geom_lists(nc_file, output_32bit, lonlat): # {{{ nEdgesOnCell = nc_file.variables['nEdgesOnCell'][:] verticesOnCell = nc_file.variables['verticesOnCell'][:, :] - 1 + # MPAS-O sets non-masked values to total number of vertices instead of 0 + # (as produced in mesh workflow) + verticesOnCell[numpy.where(verticesOnCell == len(vertices[0]))] = 0 validVertices = numpy.zeros(verticesOnCell.shape, bool) for vIndex in range(validVertices.shape[1]): From 1df1590542d74f0fc04e9bc968c7dc702dad9e71 Mon Sep 17 00:00:00 2001 From: Mark Petersen Date: Fri, 24 Aug 2018 08:12:07 -0600 Subject: [PATCH 054/180] Add machines. Small alterations in arguments. --- ocean/performance_testing/README | 23 ++++ .../call_to_performance_testing.py | 121 +++++++++++++----- .../performance_testing.py | 96 +++++++------- ocean/performance_testing/plot_from_files.py | 0 4 files changed, 161 insertions(+), 79 deletions(-) create mode 100644 ocean/performance_testing/README mode change 100644 => 100755 ocean/performance_testing/call_to_performance_testing.py mode change 100644 => 100755 ocean/performance_testing/performance_testing.py mode change 100644 => 100755 ocean/performance_testing/plot_from_files.py diff --git a/ocean/performance_testing/README b/ocean/performance_testing/README new file mode 100644 index 000000000..2dd3cf662 --- /dev/null +++ b/ocean/performance_testing/README @@ -0,0 +1,23 @@ +This automated performance testing suite was created by Divya Jaganathan for +the Parallel Computing Summer Research Institute, 2018, with guidance from Mark +Petersen, LANL. + +To run a performance test, start in any MPAS-Ocean run directory and add links to: +- ocean_model +- metis +- from this directory: + call_to_performance_testing.py + performance_testing.py + plot_from_files.py + +There are three ways to run this testing suite: + +1. From a front end node, this command will submit a single job to the queue, of the size required. +./call_to_performance_testing.py -M 128 -m 16 -r EC60to30 + +2. From a compute node, run this directly +./performance_testing.py + +3. The above steps creates plots with just those tests. After running several jobs, + the data can be combined into a single plot with plot_from_files.py. This file is case specific, and + requires the user to alter lines by hand. diff --git a/ocean/performance_testing/call_to_performance_testing.py b/ocean/performance_testing/call_to_performance_testing.py old mode 100644 new mode 100755 index c8344a76e..b00a85493 --- a/ocean/performance_testing/call_to_performance_testing.py +++ b/ocean/performance_testing/call_to_performance_testing.py @@ -5,11 +5,39 @@ Author: Divya Jaganathan Date: July 6, 2018 -Submits request for a batch job to carry out successive performance runs starting from maximum number of tasks +Submits request for a batch job to carry out successive performance runs starting from maximum +number of tasks. Load modules before calling this script. -command format: python call_to_performance_testing.py -c -M -m -r +command format: +./call_to_performance_testing.py -M -m -n -r +Examples: + +On any machine, you can grab tarred run directories here: +https://zenodo.org/record/1252437#.W5FIppNKjUI +add a link to +- metis +- ocean_model executable +- call_to_performance_testing.py (here) +- performance_testing.py (here) + +On any machine log-in node, all you need is: + ./call_to_performance_testing.py +This will submit a single job to the queue, and produce the default test of +64 through 2 by powers of 2, and auto-detect your machine. Load modules before +calling this script, and submission will keep the same modules. + +Or, one can specify everything with flags. This tests 128 to 16 cores by powers of two. + ./call_to_performance_testing.py -M 128 -m 16 -r EC60to30 + +On cori, you have to specify cori-knl or cori-haswell, as follows: + ./call_to_performance_testing.py -M 128 -m 16 -r EC60to30 -n cori-knl + +After the job completes, you will find data and auto-generated plots in these directories: + data_performance + figures_performance """ + import subprocess import argparse import shlex @@ -19,73 +47,102 @@ parser = \ argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) -parser.add_argument( - "-c", - "--cpu_type", - dest="cpu_type", - help="If cori, enter cori-haswell/cori-knl", - default=os.uname()[1][0:2]) parser.add_argument( "-M", "--max_tasks", dest="max_tasks", - help="Maximum number of tasks", - required=True) + help="Maximum number of tasks, defaults to 64.", + default=64) parser.add_argument( "-m", "--min_tasks", dest="min_tasks", - help="Minimum number of tasks", + help="Minimum number of tasks, defaults to 2.", default=2) +parser.add_argument( + "-n", + "--machine_name", + dest="machine_name", + help="This script auto-detects the machine from the node name (e.g. 'gr' for grizzly). Use this flag to override. On cori, enter cori-haswell or cori-knl", + default=os.uname()[1][0:2]) parser.add_argument( "-r", - "--resolution", - dest="resolution", - help="Resolution ", - default="QU") + "--resolution_name", + dest="resolution_name", + help="This label appears on the title of the plot.", + default="MPAS-O") args = parser.parse_args() -cpu_type = args.cpu_type max_tasks = int(args.max_tasks) min_tasks = int(args.min_tasks) -res = args.resolution +machine_name = args.machine_name +resolution_name = args.resolution_name -job_id = res + "_perf_" + str(max_tasks) +job_id = "MPASO_perf_P" + str(max_tasks) + args.resolution_name output_name = "slurm_" + job_id + ".out" # NODES_REQUIRED to request for resources is calculated assuming no hyperthreads. # Changes to this can be implemented by changing cores_per_node specific # to the machine -if cpu_type == 'gr': +if machine_name == 'gr': + machine_long_name = 'grizzly' cores_per_node = 36.0 NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) if NODES_REQUIRED < 70: qos = "interactive" else: qos = "standard" - runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_testing.py' %d %d %s %d" % ( - NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node) -elif cpu_type == 'cori-haswell': + runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +elif machine_name == 'wf': + machine_long_name = 'wolf' + cores_per_node = 16.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + if NODES_REQUIRED < 70: + qos = "interactive" + else: + qos = "standard" + runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +elif machine_name == 'ba': + machine_long_name = 'badger' + cores_per_node = 36.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + if NODES_REQUIRED < 70: + qos = "interactive" + else: + qos = "standard" + runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +elif machine_name == 'cori-haswell': + machine_long_name = 'cori-haswell' cores_per_node = 32.0 NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) - runcommand = "sbatch -N %d -n %d -C haswell --qos=regular -J %s -o %s 'performance_testing.py' %d %d %s %d" % ( - NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node) -elif cpu_type == 'cori-knl': + runcommand = "sbatch -N %d -n %d -C haswell --qos=regular -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +elif machine_name == 'cori-knl': + machine_long_name = 'cori-knl' cores_per_node = 68.0 NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) - runcommand = "sbatch -N %d -n %d -C knl --qos=regular -J %s -o %s 'performance_testing.py' %d %d %s %d" % ( - NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node) -elif cpu_type == 'ed': + runcommand = "sbatch -N %d -n %d -C knl --qos=regular -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +elif machine_name == 'ed': + machine_long_name = 'edison' cores_per_node = 24.0 NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) - runcommand = "sbatch -N %d -n %d --qos=debug -J %s -o %s 'performance_testing.py' %d %d %s %d" % ( - NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node) + runcommand = "sbatch -N %d -n %d --qos=debug -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +elif machine_name[0:5] == 'theta': + machine_long_name = 'theta' + cores_per_node = 64.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "qsub -n %d --jobname=%s -O %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) else: print "Invalid machine or have not mentioned haswell or knl on Cori" +print "running: ", runcommand s_args = shlex.split(runcommand) -print "running", ''.join(s_args) - subprocess.check_call(s_args) diff --git a/ocean/performance_testing/performance_testing.py b/ocean/performance_testing/performance_testing.py old mode 100644 new mode 100755 index 1297f7cf8..8b246b793 --- a/ocean/performance_testing/performance_testing.py +++ b/ocean/performance_testing/performance_testing.py @@ -9,7 +9,8 @@ This script can also be used for an interactive job submission using the following command format: -command format (to run an interactive job) : python performance_testing.py +command format (to run an interactive job) : +./performance_testing.py Access files required to run this script: 1. namelist.ocean @@ -18,10 +19,8 @@ 4. ocean_model (executable file) NOTE: When running a large number of tasks (>10k), check the name of log.ocean.0000.out file generated - no. of zeros in the file name changes - """ - import subprocess import numpy as np import re @@ -41,8 +40,7 @@ timenow = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") -# To obtain the run duration used in calculating SYPD - +# Read namelist to obtain the run duration used in calculating SYPD time_fr = open("namelist.ocean", 'r') for line in time_fr: @@ -61,7 +59,10 @@ time_fr.close() -# To store the details on number of cells (~ resolution) +subprocess.check_call(['mkdir', '-p', 'data_performance']) +subprocess.check_call(['mkdir', '-p', 'figures_performance']) + +# Store the details on number of cells for this resolution cells_fr = open("graph.info", 'r') cells = str(cells_fr.readline().split(" ")[0]) @@ -69,35 +70,40 @@ nprocs_max = int(sys.argv[1]) nprocs_min = int(sys.argv[2]) -cpu_type = sys.argv[3] +machine_long_name = sys.argv[3] cores_per_node = float(sys.argv[4]) +resolution_name = sys.argv[5] # plane_size is used to define the plane_distribution flag in srun plane_size = str(int(cores_per_node)) -# Performance data evaluation begins here - - +# Performance data evaluation begins here niter = int(np.log2(nprocs_max)) - int(np.log2(nprocs_min)) + 1 -nsamples_per_procnum = 5 +nsamples_per_procnum = 3 time = np.zeros(shape=(1, niter)) procs = np.zeros(shape=(1, niter)) SYPD = np.zeros(shape=(1, niter)) i = nprocs_max -j = niter +j = niter - 1 -writefilename = "data_" + cpu_type + "_" + \ +writefilename = "data_performance/" + machine_long_name + "_" + \ str(nprocs_max) + "_" + timenow + ".txt" fw = open(writefilename, 'a+') fw.write( - 'Time: %s \nMachine: %s\nNo. of Cells: %s\nRun duration: %s\nRun time in sec: %d\nFormat: #Procs|Sample Runs|Average|SYPD \n' % - (timenow, cpu_type, cells, timeparts, simulated_time_in_sec)) + 'Time: %s \nMachine: %s\nResolution: %s\nHorizontal cells: %s\nRun duration: %s\nRun time in sec: %d\nFormat: #Procs|Sample Runs|Average|SYPD \n' % + (timenow, + machine_long_name, + resolution_name, + cells, + timeparts, + simulated_time_in_sec)) +fw.flush() while i >= nprocs_min: local_N = int(np.ceil(i / cores_per_node)) - sample = nsamples_per_procnum foldername = "perf_p" + str(i) + "_gr_openmpi" subprocess.check_call(['rm', '-rf', foldername]) subprocess.check_call(['mkdir', foldername]) @@ -108,7 +114,9 @@ subprocess.check_call(['./metis', 'graph.info', str(i)]) print "metis" + str(i) + "completed" - while sample >= 1: + for sample in range(nsamples_per_procnum): + subprocess.check_call( + ['rm', '-rf', 'log*', 'analysis_members', 'output.nc']) args = ['srun', '-N', str(local_N), @@ -117,7 +125,7 @@ '--cpu_bind=verbose,core', '--distribution=plane=%s' % plane_size, './ocean_model'] - print "running", ''.join(args) + print "running", ' '.join(args) subprocess.check_call(args) # Search for time integration and write to a file @@ -129,40 +137,34 @@ first_number = numbers.split()[0] fw.write('%s \t' % first_number) sum = sum + float(first_number) - - fname = "log_p" + str(i) + "_s" + str(sample) + fr.close() + fname = "log_p" + str(i) + "_s" + str(sample + 1) filepath = foldername + "/" + fname - sample = sample - 1 subprocess.check_call(['mv', 'log.ocean.0000.out', filepath]) average = sum / nsamples_per_procnum - time[0][j - 1] = average - procs[0][j - 1] = i - SYPD[0][j - 1] = simulated_time_in_sec / (365 * average) - fw.write('%s \t %s\n' % (str(average), str(SYPD[0][j - 1]))) + time[0][j] = average + procs[0][j] = i + SYPD[0][j] = simulated_time_in_sec / (365 * average) + fw.write('%s \t %s\n' % (str(average), str(SYPD[0][j]))) + fw.flush() + perfect = SYPD[0][j] / procs[0][j] * procs + + # create plot with data so far + plt.clf() + plt.loglog(procs[0][j:], SYPD[0][j:], '-or', + label=resolution_name + ', ' + machine_long_name) + plt.loglog(procs[0][j:], perfect[0][j:], '--k', label='perfect scaling') + plt.title('MPAS-Ocean Performance Curve') + plt.xlabel('Number of MPI ranks') + plt.ylabel('Simulated Years Per Day (SYPD)') + plt.legend(loc='upper left') + plt.grid(which='major') + plt.xlim((procs[0][j] / 2.0, nprocs_max * 2.0)) + plt.tight_layout() + figurenamepath = "figures_performance/" + resolution_name + '_' + \ + machine_long_name + '_' + str(nprocs_max) + "_" + timenow + ".png" + plt.savefig(figurenamepath) i = i / 2 j = j - 1 - -# plotting .. - -subprocess.check_call(['mkdir', '-p', 'data_figures']) - -perfect = SYPD[0][0] / procs[0][0] * procs -plt.loglog(procs[0], SYPD[0], '-or', label='grizzly') -plt.loglog(procs[0], perfect[0], '--k', label='perfect scaling') -plt.title(r'MPAS-Ocean Performance Curve (Broadwell 36-cores No HT)') -plt.xlabel('Number of MPI ranks') -plt.ylabel('Simulated Years Per Day (SYPD)') -plt.legend(loc='upper left') -plt.grid() -plt.xlim((1, nprocs_max * 2)) -plt.tight_layout() -figurenamepath = "data_figures/fig_" + cpu_type + \ - str(nprocs_max) + "_" + timenow + ".png" -plt.savefig(figurenamepath) -subprocess.check_call(['mv', writefilename, 'data_figures']) - -fr.close() fw.close() - -# End diff --git a/ocean/performance_testing/plot_from_files.py b/ocean/performance_testing/plot_from_files.py old mode 100644 new mode 100755 From 77f98273c7ad44b94dea63e8384b0ec639ff5023 Mon Sep 17 00:00:00 2001 From: Mark Petersen Date: Mon, 10 Sep 2018 13:56:40 -0600 Subject: [PATCH 055/180] Update script names --- ocean/performance_testing/README | 8 +++--- ...ormance_testing.py => performance_test.py} | 6 ++-- ...py => submit_performance_test_to_queue.py} | 28 +++++++++---------- 3 files changed, 21 insertions(+), 21 deletions(-) rename ocean/performance_testing/{performance_testing.py => performance_test.py} (95%) rename ocean/performance_testing/{call_to_performance_testing.py => submit_performance_test_to_queue.py} (88%) diff --git a/ocean/performance_testing/README b/ocean/performance_testing/README index 2dd3cf662..a0053df03 100644 --- a/ocean/performance_testing/README +++ b/ocean/performance_testing/README @@ -6,17 +6,17 @@ To run a performance test, start in any MPAS-Ocean run directory and add links t - ocean_model - metis - from this directory: - call_to_performance_testing.py - performance_testing.py + submit_performance_test_to_queue.py + performance_test.py plot_from_files.py There are three ways to run this testing suite: 1. From a front end node, this command will submit a single job to the queue, of the size required. -./call_to_performance_testing.py -M 128 -m 16 -r EC60to30 +./submit_performance_test_to_queue.py -M 128 -m 16 -r EC60to30 2. From a compute node, run this directly -./performance_testing.py +./performance_test.py 3. The above steps creates plots with just those tests. After running several jobs, the data can be combined into a single plot with plot_from_files.py. This file is case specific, and diff --git a/ocean/performance_testing/performance_testing.py b/ocean/performance_testing/performance_test.py similarity index 95% rename from ocean/performance_testing/performance_testing.py rename to ocean/performance_testing/performance_test.py index 8b246b793..e4b6984fd 100755 --- a/ocean/performance_testing/performance_testing.py +++ b/ocean/performance_testing/performance_test.py @@ -1,16 +1,16 @@ #!/usr/bin/env python """ - Name: performance_testing.py + Name: performance_test.py Author: Divya Jaganathan Date: 6 July, 2018 -This script is automatically called by call_to_performance_testing.py to run a batch job to get performance plots and data +This script is automatically called by submit_performance_test_to_queue.py to run a batch job to get performance plots and data This script can also be used for an interactive job submission using the following command format: command format (to run an interactive job) : -./performance_testing.py +./performance_test.py Access files required to run this script: 1. namelist.ocean diff --git a/ocean/performance_testing/call_to_performance_testing.py b/ocean/performance_testing/submit_performance_test_to_queue.py similarity index 88% rename from ocean/performance_testing/call_to_performance_testing.py rename to ocean/performance_testing/submit_performance_test_to_queue.py index b00a85493..ce641f05f 100755 --- a/ocean/performance_testing/call_to_performance_testing.py +++ b/ocean/performance_testing/submit_performance_test_to_queue.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Name: call_to_performance_testing.py +Name: submit_performance_test_to_queue.py Author: Divya Jaganathan Date: July 6, 2018 @@ -9,7 +9,7 @@ number of tasks. Load modules before calling this script. command format: -./call_to_performance_testing.py -M -m -n -r +./submit_performance_test_to_queue.py -M -m -n -r Examples: @@ -18,20 +18,20 @@ add a link to - metis - ocean_model executable -- call_to_performance_testing.py (here) -- performance_testing.py (here) +- submit_performance_test_to_queue.py (here) +- performance_test.py (here) On any machine log-in node, all you need is: - ./call_to_performance_testing.py + ./submit_performance_test_to_queue.py This will submit a single job to the queue, and produce the default test of 64 through 2 by powers of 2, and auto-detect your machine. Load modules before calling this script, and submission will keep the same modules. Or, one can specify everything with flags. This tests 128 to 16 cores by powers of two. - ./call_to_performance_testing.py -M 128 -m 16 -r EC60to30 + ./submit_performance_test_to_queue.py -M 128 -m 16 -r EC60to30 On cori, you have to specify cori-knl or cori-haswell, as follows: - ./call_to_performance_testing.py -M 128 -m 16 -r EC60to30 -n cori-knl + ./submit_performance_test_to_queue.py -M 128 -m 16 -r EC60to30 -n cori-knl After the job completes, you will find data and auto-generated plots in these directories: data_performance @@ -93,7 +93,7 @@ qos = "interactive" else: qos = "standard" - runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_test.py' %d %d %s %d %s" % ( NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) elif machine_name == 'wf': machine_long_name = 'wolf' @@ -103,7 +103,7 @@ qos = "interactive" else: qos = "standard" - runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_test.py' %d %d %s %d %s" % ( NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) elif machine_name == 'ba': machine_long_name = 'badger' @@ -113,31 +113,31 @@ qos = "interactive" else: qos = "standard" - runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_test.py' %d %d %s %d %s" % ( NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) elif machine_name == 'cori-haswell': machine_long_name = 'cori-haswell' cores_per_node = 32.0 NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) - runcommand = "sbatch -N %d -n %d -C haswell --qos=regular -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + runcommand = "sbatch -N %d -n %d -C haswell --qos=regular -J %s -o %s 'performance_test.py' %d %d %s %d %s" % ( NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) elif machine_name == 'cori-knl': machine_long_name = 'cori-knl' cores_per_node = 68.0 NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) - runcommand = "sbatch -N %d -n %d -C knl --qos=regular -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + runcommand = "sbatch -N %d -n %d -C knl --qos=regular -J %s -o %s 'performance_test.py' %d %d %s %d %s" % ( NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) elif machine_name == 'ed': machine_long_name = 'edison' cores_per_node = 24.0 NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) - runcommand = "sbatch -N %d -n %d --qos=debug -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + runcommand = "sbatch -N %d -n %d --qos=debug -J %s -o %s 'performance_test.py' %d %d %s %d %s" % ( NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) elif machine_name[0:5] == 'theta': machine_long_name = 'theta' cores_per_node = 64.0 NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) - runcommand = "qsub -n %d --jobname=%s -O %s 'performance_testing.py' %d %d %s %d %s" % ( + runcommand = "qsub -n %d --jobname=%s -O %s 'performance_test.py' %d %d %s %d %s" % ( NODES_REQUIRED, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) else: print "Invalid machine or have not mentioned haswell or knl on Cori" From 052839698a34dfa8f08c4bbc957ab524a9694fe3 Mon Sep 17 00:00:00 2001 From: "Miles A. Curry" Date: Thu, 18 Oct 2018 13:38:33 -0600 Subject: [PATCH 056/180] Fixes grid_rotate overwrite bug When the same name was given for the source and destination file, grid_rotate would overwrite the source file with the rotation, instead of failing. This fix allows grid_rotate to exit when the source filename is the same as the destination filename as well as any other cp error. --- mesh_tools/grid_rotate/grid_rotate.f90 | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/mesh_tools/grid_rotate/grid_rotate.f90 b/mesh_tools/grid_rotate/grid_rotate.f90 index a7e13b6ed..1f84b3a0c 100644 --- a/mesh_tools/grid_rotate/grid_rotate.f90 +++ b/mesh_tools/grid_rotate/grid_rotate.f90 @@ -83,6 +83,7 @@ subroutine main() real (kind=RKIND) :: cx, cy, cz character(220) :: copyCmd + integer :: copyStat call read_namelist(original_latitude_degrees, original_longitude_degrees, new_latitude_degrees, new_longitude_degrees, birdseye_rotation_counter_clockwise_degrees) @@ -98,7 +99,10 @@ subroutine main() ! Copy original file to output file copyCmd = "cp " // trim(filename) // " " // trim(newFilename) - call system(copyCmd) + copyStat = system(copyCmd) + if(copyStat /= 0) then + return ! If `cp` fails, let it report its error and exit + end if ! Make sure the output file is writeable copyCmd = "chmod u+w " // trim(newFilename) From 1a13f430604bff9128d66714d7a5cb607c56a000 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 16 Nov 2018 16:33:25 -0700 Subject: [PATCH 057/180] Add a script for adding zMid to MPAS-O data zMid is computed from bottomDepth, maxLevelCell and layerThickness from an initial condition file that may be different from the input file to which zMid will be added. --- ocean/add_depth_coord/add_depth_coord.py | 148 +++++++++++++++++++++++ 1 file changed, 148 insertions(+) create mode 100755 ocean/add_depth_coord/add_depth_coord.py diff --git a/ocean/add_depth_coord/add_depth_coord.py b/ocean/add_depth_coord/add_depth_coord.py new file mode 100755 index 000000000..6f5b6a9ab --- /dev/null +++ b/ocean/add_depth_coord/add_depth_coord.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python + +""" +Add a 3D coordinate "zMid" to an MPAS-Ocean output file that defines the +positive-up vertical location of each cell center. +""" +# Authors +# ------- +# Xylar Asay-Davis + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import xarray +import numpy +import netCDF4 +import argparse +import sys + + +def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): + ''' + Write an xarray data set to a NetCDF file using finite fill values + + Parameters + ---------- + ds : xarray.Dataset object + The xarray data set to be written to a file + + fileName : str + The fileName to write the data set to + + fillValues : dict + A dictionary of fill values for each supported data type. By default, + this is the dictionary used by the netCDF4 package. Key entries should + be of the form 'f8' (for float64), 'i4' (for int32), etc. + ''' + # Authors + # ------- + # Xylar Asay-Davis + + encodingDict = {} + variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) + for variableName in variableNames: + dtype = ds[variableName].dtype + for fillType in fillValues: + if dtype == numpy.dtype(fillType): + encodingDict[variableName] = \ + {'_FillValue': fillValues[fillType]} + break + + ds.to_netcdf(fileName, encoding=encodingDict) + + +def compute_zmid(bottomDepth, maxLevelCell, layerThickness): + """ + Computes zMid given data arrays for bottomDepth, maxLevelCell and + layerThickness + + Parameters + ---------- + bottomDepth : ``xarray.DataArray`` + the depth of the ocean bottom (positive) + + maxLevelCell : ``xarray.DataArray`` + the 1-based vertical index of the bottom of the ocean + + layerThickness : ``xarray.DataArray`` + the thickness of MPAS-Ocean layers (possibly as a function of time) + + Returns + ------- + zMid : ``xarray.DataArray`` + the vertical coordinate defining the middle of each layer, masked below + the bathymetry + """ + # Authors + # ------- + # Xylar Asay-Davis + + nVertLevels = layerThickness.sizes['nVertLevels'] + + vertIndex = \ + xarray.DataArray.from_dict({'dims': ('nVertLevels',), + 'data': numpy.arange(nVertLevels)}) + + layerThickness = layerThickness.where(vertIndex < maxLevelCell) + + thicknessSum = layerThickness.sum(dim='nVertLevels') + thicknessCumSum = layerThickness.cumsum(dim='nVertLevels') + zSurface = -bottomDepth+thicknessSum + + zLayerBot = zSurface - thicknessCumSum + + zMid = zLayerBot + 0.5*layerThickness + + zMid = zMid.where(vertIndex < maxLevelCell) + + return zMid + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-c", "--coordFileName", dest="coordFileName", + type=str, required=False, + help="A MPAS-Ocean file with bottomDepth, maxLevelCell" + "and layerThickness but not zMid") + parser.add_argument("-i", "--inFileName", dest="inFileName", type=str, + required=True, + help="An input MPAS-Ocean file that zMid should be" + "added to, used for coords if another file is" + "not provided via -c.") + parser.add_argument("-o", "--outFileName", dest="outFileName", type=str, + required=True, + help="An output MPAS-Ocean file with zMid added") + args = parser.parse_args() + + if args.coordFileName: + coordFileName = args.coordFileName + else: + coordFileName = args.inputFileName + + dsCoord = xarray.open_dataset(coordFileName) + + ds = xarray.open_dataset(args.inFileName) + + ds.coords['zMid'] = compute_zmid(dsCoord.bottomDepth, dsCoord.maxLevelCell, + dsCoord.layerThickness) + ds.zMid.attrs['unit'] = 'm' + + for varName in ds.data_vars: + var = ds[varName] + if 'nCells' in var.dims and 'nVertLevels' in var.dims: + var = var.assign_coords(zMid=ds.zMid) + ds[varName] = var + + if 'history' in ds.attrs: + ds.attrs['history'] = '{}\n{}'.format(' '.join(sys.argv), + ds.attrs['history']) + else: + ds.attrs['history'] = ' '.join(sys.argv) + + write_netcdf(ds, args.outFileName) + + +if __name__ == '__main__': + main() From d7a8931e652eddccb0aeeffef8766cdfa8a1e8e1 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 19 Nov 2018 19:38:18 -0700 Subject: [PATCH 058/180] Add script to add refZMid to an MPAS-O dataset This 1D coordinate is likely more appropriate for CMIP6 output than the 3D zMid coordinate. --- ocean/add_depth_coord/add_refZMid.py | 131 ++++++++++++++++++ .../{add_depth_coord.py => add_zMid.py} | 0 2 files changed, 131 insertions(+) create mode 100755 ocean/add_depth_coord/add_refZMid.py rename ocean/add_depth_coord/{add_depth_coord.py => add_zMid.py} (100%) diff --git a/ocean/add_depth_coord/add_refZMid.py b/ocean/add_depth_coord/add_refZMid.py new file mode 100755 index 000000000..aac6bb9c5 --- /dev/null +++ b/ocean/add_depth_coord/add_refZMid.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python + +""" +Add a 1D coordinate "refZMid" to an MPAS-Ocean output file that defines the +positive-up vertical location of each layer. +""" +# Authors +# ------- +# Xylar Asay-Davis + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import xarray +import numpy +import netCDF4 +import argparse +import sys + + +def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): + ''' + Write an xarray data set to a NetCDF file using finite fill values + + Parameters + ---------- + ds : xarray.Dataset object + The xarray data set to be written to a file + + fileName : str + The fileName to write the data set to + + fillValues : dict + A dictionary of fill values for each supported data type. By default, + this is the dictionary used by the netCDF4 package. Key entries should + be of the form 'f8' (for float64), 'i4' (for int32), etc. + ''' + # Authors + # ------- + # Xylar Asay-Davis + + encodingDict = {} + variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) + for variableName in variableNames: + dtype = ds[variableName].dtype + for fillType in fillValues: + if dtype == numpy.dtype(fillType): + encodingDict[variableName] = \ + {'_FillValue': fillValues[fillType]} + break + + ds.to_netcdf(fileName, encoding=encodingDict) + + +def compute_ref_zmid(refBottomDepth): + """ + Computes refZMid given refBottomDepth + + Parameters + ---------- + refBottomDepth : ``xarray.DataArray`` + the depth of the bottom of each vertical layer in the initial state + (perfect z-level coordinate) + + Returns + ------- + refZMid : ``xarray.DataArray`` + the vertical coordinate defining the middle of each layer + """ + # Authors + # ------- + # Xylar Asay-Davis + + refBottomDepth = refBottomDepth.values + + refZMid = numpy.zeros(refBottomDepth.shape) + + refZMid[0] = 0.5*refBottomDepth[0] + refZMid[1:] = 0.5*(refBottomDepth[1:] + refBottomDepth[0:-1]) + + return refZMid + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-c", "--coordFileName", dest="coordFileName", + type=str, required=False, + help="A MPAS-Ocean file with refBottomDepth") + parser.add_argument("-i", "--inFileName", dest="inFileName", type=str, + required=True, + help="An input MPAS-Ocean file that refZMid should be" + "added to, used for coords if another file is" + "not provided via -c.") + parser.add_argument("-o", "--outFileName", dest="outFileName", type=str, + required=True, + help="An output MPAS-Ocean file with refZMid added") + args = parser.parse_args() + + if args.coordFileName: + coordFileName = args.coordFileName + else: + coordFileName = args.inputFileName + + dsCoord = xarray.open_dataset(coordFileName) + + ds = xarray.open_dataset(args.inFileName) + + ds.coords['refZMid'] = ('nVertLevels', + compute_ref_zmid(dsCoord.refBottomDepth)) + ds.refZMid.attrs['unit'] = 'meters' + ds.refZMid.attrs['long_name'] = 'reference depth of the center of each ' \ + 'vertical level' + + for varName in ds.data_vars: + var = ds[varName] + if 'nVertLevels' in var.dims: + var = var.assign_coords(refZMid=ds.refZMid) + ds[varName] = var + + if 'history' in ds.attrs: + ds.attrs['history'] = '{}\n{}'.format(' '.join(sys.argv), + ds.attrs['history']) + else: + ds.attrs['history'] = ' '.join(sys.argv) + + write_netcdf(ds, args.outFileName) + + +if __name__ == '__main__': + main() diff --git a/ocean/add_depth_coord/add_depth_coord.py b/ocean/add_depth_coord/add_zMid.py similarity index 100% rename from ocean/add_depth_coord/add_depth_coord.py rename to ocean/add_depth_coord/add_zMid.py From ccda6c02a751ebf358f250bb085e45c3af512213 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 19 Nov 2018 20:00:06 -0700 Subject: [PATCH 059/180] Add a script to write out zMid in its own file This script could be used to produce a time series of zMid from a time series of layerThickness and static-in-time bottomDepth and maxLevelCell. --- .../write_time_varying_zMid.py | 150 ++++++++++++++++++ 1 file changed, 150 insertions(+) create mode 100755 ocean/add_depth_coord/write_time_varying_zMid.py diff --git a/ocean/add_depth_coord/write_time_varying_zMid.py b/ocean/add_depth_coord/write_time_varying_zMid.py new file mode 100755 index 000000000..1b10a48bf --- /dev/null +++ b/ocean/add_depth_coord/write_time_varying_zMid.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python + +""" +Write a 3D coordinate "zMid" (optionally with a prefix such as +"timeMonthly_avg_") to an MPAS-Ocean output file. zMid defines the +positive-up vertical location of each cell center. +""" +# Authors +# ------- +# Xylar Asay-Davis + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import xarray +import numpy +import netCDF4 +import argparse +import sys + + +def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): + ''' + Write an xarray data set to a NetCDF file using finite fill values + + Parameters + ---------- + ds : xarray.Dataset object + The xarray data set to be written to a file + + fileName : str + The fileName to write the data set to + + fillValues : dict + A dictionary of fill values for each supported data type. By default, + this is the dictionary used by the netCDF4 package. Key entries should + be of the form 'f8' (for float64), 'i4' (for int32), etc. + ''' + # Authors + # ------- + # Xylar Asay-Davis + + encodingDict = {} + variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) + for variableName in variableNames: + dtype = ds[variableName].dtype + for fillType in fillValues: + if dtype == numpy.dtype(fillType): + encodingDict[variableName] = \ + {'_FillValue': fillValues[fillType]} + break + + ds.to_netcdf(fileName, encoding=encodingDict) + + +def compute_zmid(bottomDepth, maxLevelCell, layerThickness): + """ + Computes zMid given data arrays for bottomDepth, maxLevelCell and + layerThickness + + Parameters + ---------- + bottomDepth : ``xarray.DataArray`` + the depth of the ocean bottom (positive) + + maxLevelCell : ``xarray.DataArray`` + the 1-based vertical index of the bottom of the ocean + + layerThickness : ``xarray.DataArray`` + the thickness of MPAS-Ocean layers (possibly as a function of time) + + Returns + ------- + zMid : ``xarray.DataArray`` + the vertical coordinate defining the middle of each layer, masked below + the bathymetry + """ + # Authors + # ------- + # Xylar Asay-Davis + + nVertLevels = layerThickness.sizes['nVertLevels'] + + vertIndex = \ + xarray.DataArray.from_dict({'dims': ('nVertLevels',), + 'data': numpy.arange(nVertLevels)}) + + layerThickness = layerThickness.where(vertIndex < maxLevelCell) + + thicknessSum = layerThickness.sum(dim='nVertLevels') + thicknessCumSum = layerThickness.cumsum(dim='nVertLevels') + zSurface = -bottomDepth+thicknessSum + + zLayerBot = zSurface - thicknessCumSum + + zMid = zLayerBot + 0.5*layerThickness + + zMid = zMid.where(vertIndex < maxLevelCell) + + return zMid + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-c", "--coordFileName", dest="coordFileName", + type=str, required=False, + help="A MPAS-Ocean file with bottomDepth and " + "maxLevelCell") + parser.add_argument("-i", "--inFileName", dest="inFileName", type=str, + required=True, + help="An input MPAS-Ocean file with some form of" + "layerThickness, and also bottomDepth and" + "maxLevelCell if no coordinate file is provided.") + parser.add_argument("-o", "--outFileName", dest="outFileName", type=str, + required=True, + help="An output MPAS-Ocean file with zMid for each" + "time in the input file") + parser.add_argument("-p", "--prefix", dest="prefix", type=str, + required=False, default="", + help="A prefix on layerThickness (in) and zMid (out)," + "such as 'timeMonthly_avg_'") + args = parser.parse_args() + + if args.coordFileName: + coordFileName = args.coordFileName + else: + coordFileName = args.inputFileName + + dsCoord = xarray.open_dataset(coordFileName) + + dsIn = xarray.open_dataset(args.inFileName) + inVarName = '{}layerThickness'.format(args.prefix) + outVarName = '{}zMid'.format(args.prefix) + layerThickness = dsIn[inVarName] + + zMid = compute_zmid(dsCoord.bottomDepth, dsCoord.maxLevelCell, + layerThickness) + + dsOut = xarray.Dataset() + dsOut[outVarName] = zMid + dsOut[outVarName].attrs['unit'] = 'm' + + dsOut.attrs['history'] = ' '.join(sys.argv) + + write_netcdf(dsOut, args.outFileName) + + +if __name__ == '__main__': + main() From 500e514a6c6e19aa1fd1cd23d6f1725f2a4c7722 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 19 Nov 2018 21:21:41 -0700 Subject: [PATCH 060/180] Rename refZMid to depth --- .../{add_refZMid.py => add_depth.py} | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) rename ocean/add_depth_coord/{add_refZMid.py => add_depth.py} (78%) diff --git a/ocean/add_depth_coord/add_refZMid.py b/ocean/add_depth_coord/add_depth.py similarity index 78% rename from ocean/add_depth_coord/add_refZMid.py rename to ocean/add_depth_coord/add_depth.py index aac6bb9c5..362ebc218 100755 --- a/ocean/add_depth_coord/add_refZMid.py +++ b/ocean/add_depth_coord/add_depth.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Add a 1D coordinate "refZMid" to an MPAS-Ocean output file that defines the +Add a 1D coordinate "depth" to an MPAS-Ocean output file that defines the positive-up vertical location of each layer. """ # Authors @@ -52,9 +52,9 @@ def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): ds.to_netcdf(fileName, encoding=encodingDict) -def compute_ref_zmid(refBottomDepth): +def compute_depth(refBottomDepth): """ - Computes refZMid given refBottomDepth + Computes depth given refBottomDepth Parameters ---------- @@ -64,7 +64,7 @@ def compute_ref_zmid(refBottomDepth): Returns ------- - refZMid : ``xarray.DataArray`` + depth : ``xarray.DataArray`` the vertical coordinate defining the middle of each layer """ # Authors @@ -73,12 +73,12 @@ def compute_ref_zmid(refBottomDepth): refBottomDepth = refBottomDepth.values - refZMid = numpy.zeros(refBottomDepth.shape) + depth = numpy.zeros(refBottomDepth.shape) - refZMid[0] = 0.5*refBottomDepth[0] - refZMid[1:] = 0.5*(refBottomDepth[1:] + refBottomDepth[0:-1]) + depth[0] = 0.5*refBottomDepth[0] + depth[1:] = 0.5*(refBottomDepth[1:] + refBottomDepth[0:-1]) - return refZMid + return depth def main(): @@ -89,12 +89,12 @@ def main(): help="A MPAS-Ocean file with refBottomDepth") parser.add_argument("-i", "--inFileName", dest="inFileName", type=str, required=True, - help="An input MPAS-Ocean file that refZMid should be" + help="An input MPAS-Ocean file that depth should be" "added to, used for coords if another file is" "not provided via -c.") parser.add_argument("-o", "--outFileName", dest="outFileName", type=str, required=True, - help="An output MPAS-Ocean file with refZMid added") + help="An output MPAS-Ocean file with depth added") args = parser.parse_args() if args.coordFileName: @@ -106,16 +106,16 @@ def main(): ds = xarray.open_dataset(args.inFileName) - ds.coords['refZMid'] = ('nVertLevels', - compute_ref_zmid(dsCoord.refBottomDepth)) - ds.refZMid.attrs['unit'] = 'meters' - ds.refZMid.attrs['long_name'] = 'reference depth of the center of each ' \ - 'vertical level' + ds.coords['depth'] = ('nVertLevels', + compute_depth(dsCoord.refBottomDepth)) + ds.depth.attrs['unit'] = 'meters' + ds.depth.attrs['long_name'] = 'reference depth of the center of each ' \ + 'vertical level' for varName in ds.data_vars: var = ds[varName] if 'nVertLevels' in var.dims: - var = var.assign_coords(refZMid=ds.refZMid) + var = var.assign_coords(depth=ds.depth) ds[varName] = var if 'history' in ds.attrs: From 3f69b6a270aebfa0682be95accbef900eb5ebb29 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 29 Nov 2018 22:07:40 -0700 Subject: [PATCH 061/180] PEP8 fixes and some python 3 support --- .../paraview_vtk_field_extractor.py | 24 +- .../paraview_vtk_field_extractor/utils.py | 330 ++++++++++-------- 2 files changed, 196 insertions(+), 158 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index 74ca0f6d5..cb2b7576e 100755 --- a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -54,6 +54,9 @@ Optional modules: progressbar """ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + import os import numpy as np @@ -95,7 +98,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, blockDim = len(time_series_file.dimensions[blockDimName]) # Pre-compute the number of blocks - nBlocks = 1 + blockDim / blocking + nBlocks = 1 + blockDim // blocking nPolygons = len(offsets) nPoints = len(vertices[0]) @@ -147,7 +150,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, field_bar = ProgressBar(widgets=widgets, maxval=nTimes*nHyperSlabs).start() else: - print "Writing time series...." + print("Writing time series....") suffix = blockDimName[1:] if any_var_has_time_dim: @@ -197,7 +200,8 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, "{}".format(xtimeName, time_series_file)) var = time_series_file.variables[xtimeName] if len(var.shape) == 2: - xtime = ''.join(var[local_time_indices[time_index], :]).strip() + xtime = \ + ''.join(var[local_time_indices[time_index], :]).strip() date = datetime(int(xtime[0:4]), int(xtime[5:7]), int(xtime[8:10]), int(xtime[11:13]), int(xtime[14:16]), int(xtime[17:19])) @@ -313,9 +317,9 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if __name__ == "__main__": if use_progress_bar: - print " -- Using progress bars --" + print(" -- Using progress bars --") else: - print " -- Progress bars are not available--" + print(" -- Progress bars are not available--") parser = \ argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) @@ -410,7 +414,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, # Handle cell variables if len(cellVars) > 0: - print " -- Extracting cell fields --" + print(" -- Extracting cell fields --") mesh_file = utils.open_netcdf(args.mesh_filename) @@ -430,10 +434,10 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if separate_mesh_file: mesh_file.close() - print "" + print("") if len(vertexVars) > 0: - print " -- Extracting vertex fields --" + print(" -- Extracting vertex fields --") mesh_file = utils.open_netcdf(args.mesh_filename) @@ -454,10 +458,10 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if separate_mesh_file: mesh_file.close() - print "" + print("") if len(edgeVars) > 0: - print " -- Extracting edge fields --" + print(" -- Extracting edge fields --") mesh_file = utils.open_netcdf(args.mesh_filename) diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index 01b431a96..a4f63180e 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -7,20 +7,26 @@ files on MPAS grids. """ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + try: from evtk.vtk import VtkFile, VtkPolyData except ImportError: from pyevtk.vtk import VtkFile, VtkPolyData -import sys, glob +import sys +import glob import numpy +from builtins import input + from netCDF4 import Dataset as NetCDFFile try: from progressbar import ProgressBar, Percentage, Bar, ETA use_progress_bar = True -except: +except ImportError: use_progress_bar = False @@ -34,20 +40,21 @@ def open_netcdf(file_name): return nc_file -def is_valid_mesh_var(mesh_file, variable_name): # {{{ +def is_valid_mesh_var(mesh_file, variable_name): # {{{ if mesh_file is None: return False if variable_name not in mesh_file.variables: return False - return 'Time' not in mesh_file.variables[variable_name].dimensions # }}} + return 'Time' not in mesh_file.variables[variable_name].dimensions # }}} -def get_var(variable_name, mesh_file, time_series_file): # {{{ + +def get_var(variable_name, mesh_file, time_series_file): # {{{ if is_valid_mesh_var(mesh_file, variable_name): return mesh_file.variables[variable_name] else: - return time_series_file.variables[variable_name] # }}} + return time_series_file.variables[variable_name] # }}} def setup_time_indices(fn_pattern, xtimeName): # {{{ @@ -70,8 +77,8 @@ def setup_time_indices(fn_pattern, xtimeName): # {{{ all_times = [] if len(file_list) == 0: - print "No files to process." - print "Exiting..." + print("No files to process.") + print("Exiting...") sys.exit(0) if use_progress_bar: @@ -79,14 +86,14 @@ def setup_time_indices(fn_pattern, xtimeName): # {{{ ETA()] time_bar = ProgressBar(widgets=widgets, maxval=len(file_list)).start() else: - print "Build time indices..." + print("Build time indices...") i_file = 0 for file_name in file_list: try: nc_file = open_netcdf(file_name) except IOError: - print "Warning: could not open {}".format(file_name) + print("Warning: could not open {}".format(file_name)) continue if 'Time' not in nc_file.dimensions or xtimeName is None: @@ -126,14 +133,8 @@ def setup_time_indices(fn_pattern, xtimeName): # {{{ return (local_indices, file_names) # }}} -# Parses the indices to be extracted along a given dimension. -# The index_string can be fomatted as follows: -# -- no indices are to be extracted -# n -- the index n is to be extracted -# m,n,p -- the list of indices is to be extracted -# m:n -- all indices from m to n are to be extracted (including m but -# excluding n, in the typical python indexing convention) -def parse_extra_dim(dim_name, index_string, time_series_file, mesh_file):#{{{ +def parse_extra_dim(dim_name, index_string, time_series_file, mesh_file): + # {{{ """ Parses the indices to be extracted along a given dimension. The index_string can be fomatted as follows: @@ -183,18 +184,18 @@ def parse_extra_dim(dim_name, index_string, time_series_file, mesh_file):#{{{ # zero-pad integer indices if len(numerical_indices) > 0: max_index = numpy.amax(numerical_indices) - pad = int(numpy.log10(max(max_index,1)))+1 - template = '%%0%dd'%pad + pad = int(numpy.log10(max(max_index, 1)))+1 + template = '%%0%dd' % pad for i in range(len(indices)): try: val = int(indices[i]) except ValueError: continue - indices[i] = template%(val) + indices[i] = template % (val) return indices -#}}} +# }}} def parse_time_indices(index_string, time_indices, time_file_names): # {{{ @@ -306,24 +307,28 @@ def parse_index_string(index_string, dim_size): # {{{ return indices, numerical_indices # }}} -# Parses a list of dimensions and corresponding indices separated by equals signs. -# Optionally, a max_index_count (typically 1) can be provided, indicating that -# indices beyond max_index_count-1 will be ignored in each dimension. -# Optionally, topo_dim contains the name of a dimension associated with the -# surface or bottom topography (e.g. nVertLevels for MPAS-Ocean) -# If too_dim is provided, topo_cell_indices_name can optionally be either -# a constant value for the index vertical index to the topography or -# the name of a field with dimension nCells that contains the vertical index of -# the topography. def parse_extra_dims(dimension_list, time_series_file, mesh_file, - max_index_count=None):#{{{ + max_index_count=None): # {{{ + ''' + Parses a list of dimensions and corresponding indices separated by equals + signs. Optionally, a max_index_count (typically 1) can be provided, + indicating that indices beyond max_index_count-1 will be ignored in each + dimension. Optionally, topo_dim contains the name of a dimension associated + with the surface or bottom topography (e.g. nVertLevels for MPAS-Ocean) + If too_dim is provided, topo_cell_indices_name can optionally be either + a constant value for the index vertical index to the topography or + the name of a field with dimension nCells that contains the vertical index + of the topography. + ''' + if not dimension_list: return {} extra_dims = {} for dim_item in dimension_list: - (dimName,index_string) = dim_item.split('=') - indices = parse_extra_dim(dimName, index_string, time_series_file, mesh_file) + (dimName, index_string) = dim_item.split('=') + indices = parse_extra_dim(dimName, index_string, time_series_file, + mesh_file) if indices is not None: if max_index_count is None or len(indices) <= max_index_count: extra_dims[dimName] = indices @@ -331,20 +336,22 @@ def parse_extra_dims(dimension_list, time_series_file, mesh_file, extra_dims[dimName] = indices[0:max_index_count] return extra_dims -#}}} - +# }}} -# Creates a list of variables names to be extracted. Prompts for indices -# of any extra dimensions that were not specified on the command line. -# extra_dims should be a dictionary of indices along extra dimensions (as -# opposed to "basic" dimensions). basic_dims is a list of dimension names -# that should be excluded from extra_dims. include_dims is a list of -# possible dimensions, one of which must be in each vairable to be extracted -# (used in expanding command line placeholders "all", "allOnCells", etc.) -def setup_dimension_values_and_sort_vars(time_series_file, mesh_file, variable_list, extra_dims, - basic_dims=['nCells', 'nEdges', 'nVertices', 'Time'], - include_dims=['nCells', 'nEdges', 'nVertices']):#{{{ +def setup_dimension_values_and_sort_vars( + time_series_file, mesh_file, variable_list, extra_dims, + basic_dims=['nCells', 'nEdges', 'nVertices', 'Time'], + include_dims=['nCells', 'nEdges', 'nVertices']): # {{{ + ''' + Creates a list of variables names to be extracted. Prompts for indices + of any extra dimensions that were not specified on the command line. + extra_dims should be a dictionary of indices along extra dimensions (as + opposed to "basic" dimensions). basic_dims is a list of dimension names + that should be excluded from extra_dims. include_dims is a list of + possible dimensions, one of which must be in each vairable to be extracted + (used in expanding command line placeholders "all", "allOnCells", etc.) + ''' def add_var(variables, variable_name, include_dims, exclude_dims=None): if variable_name in variable_names: @@ -371,17 +378,20 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): variable_names = [] exclude_dims = ['Time'] for variable_name in time_series_file.variables: - add_var(time_series_file.variables, str(variable_name), include_dims, exclude_dims=None) + add_var(time_series_file.variables, str(variable_name), + include_dims, exclude_dims=None) if mesh_file is not None: for variable_name in mesh_file.variables: - add_var(mesh_file.variables, str(variable_name), include_dims, exclude_dims) + add_var(mesh_file.variables, str(variable_name), include_dims, + exclude_dims) else: variable_names = variable_list.split(',') - for suffix in ['Cells','Edges','Vertices']: - include_dim = 'n%s'%suffix - if ('allOn%s'%suffix in variable_names) and (include_dim in include_dims): - variable_names.remove('allOn%s'%suffix) + for suffix in ['Cells', 'Edges', 'Vertices']: + include_dim = 'n%s' % suffix + if ('allOn%s' % suffix in variable_names) and (include_dim in + include_dims): + variable_names.remove('allOn%s' % suffix) exclude_dims = ['Time'] for variable_name in time_series_file.variables: add_var(time_series_file.variables, str(variable_name), @@ -389,7 +399,8 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): if mesh_file is not None: for variable_name in mesh_file.variables: add_var(mesh_file.variables, str(variable_name), - include_dims=[include_dim], exclude_dims=exclude_dims) + include_dims=[include_dim], + exclude_dims=exclude_dims) variable_names.sort() @@ -402,27 +413,31 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): nc_file = time_series_file field_dims = nc_file.variables[variable_name].dimensions for dim in field_dims: - if ((dim in basic_dims) or (dim in extra_dims) or (dim in promptDimNames)): + if ((dim in basic_dims) or (dim in extra_dims) + or (dim in promptDimNames)): # this dimension has already been accounted for continue promptDimNames.append(str(dim)) if display_prompt: - print "" - print "Need to define additional dimension values" + print("") + print("Need to define additional dimension values") display_prompt = False dim_size = len(nc_file.dimensions[dim]) valid = False while not valid: - print "Valid range for dimension %s between 0 and %d"%(dim, dim_size-1) - index_string = raw_input("Enter a value for dimension %s: "%(dim)) - indices = parse_extra_dim(str(dim), index_string, time_series_file, mesh_file) + print("Valid range for dimension %s between 0 and %d" + "" % (dim, dim_size-1)) + index_string = input("Enter a value for dimension %s: " + "" % (dim)) + indices = parse_extra_dim(str(dim), index_string, + time_series_file, mesh_file) valid = indices is not None if valid: extra_dims[str(dim)] = indices else: - print " -- Invalid value, please re-enter --" + print(" -- Invalid value, please re-enter --") empty_dims = [] for dim in extra_dims: @@ -431,7 +446,8 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): for variable_name in variable_names: - field_dims = get_var(variable_name, mesh_file, time_series_file).dimensions + field_dims = get_var(variable_name, mesh_file, + time_series_file).dimensions skip = False for dim in field_dims: if dim in empty_dims: @@ -450,21 +466,22 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): elif len(indices) == 1: dim_vals = [] for index0 in indices[0]: - dim_vals.append([index0]) + dim_vals.append([index0]) elif len(indices) == 2: dim_vals = [] for index0 in indices[0]: for index1 in indices[1]: - dim_vals.append([index0,index1]) + dim_vals.append([index0, index1]) elif len(indices) == 3: dim_vals = [] for index0 in indices[0]: for index1 in indices[1]: for index2 in indices[2]: - dim_vals.append([index0,index1,index2]) + dim_vals.append([index0, index1, index2]) else: - print "variable %s has too many extra dimensions and will be skipped."%variable_name - continue + print("variable %s has too many extra dimensions and will be " + "skipped." % variable_name) + continue if "nCells" in field_dims: cellVars.append(variable_name) @@ -477,39 +494,44 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): del dim_vals return (all_dim_vals, cellVars, vertexVars, edgeVars) -#}}} - -# Print a summary of the time levels, mesh file, transects file (optional) -# and variables to be extracted. -def summarize_extraction(mesh_file, time_indices, cellVars, vertexVars, edgeVars, - transects_file=None):#{{{ - print "" - print "Extracting a total of %d time levels."%(len(time_indices)) - print "Using file '%s' as the mesh file for this extraction."%(mesh_file) +# }}} + + +def summarize_extraction(mesh_file, time_indices, cellVars, vertexVars, + edgeVars, transects_file=None): # {{{ + ''' + print a summary of the time levels, mesh file, transects file (optional) + and variables to be extracted. + ''' + + print("") + print("Extracting a total of %d time levels." % (len(time_indices))) + print("Using file '%s' as the mesh file for this extraction." + "" % (mesh_file)) if transects_file is not None: - print "Using file '%s' as the transects file."%(transects_file) - print "" - print "" - print "The following variables will be extracted from the input file(s)." - print "" + print("Using file '%s' as the transects file." % (transects_file)) + print("") + print("") + print("The following variables will be extracted from the input file(s).") + print("") if len(cellVars) > 0: - print " Variables with 'nCells' as a dimension:" + print(" Variables with 'nCells' as a dimension:") for variable_name in cellVars: - print " name: %s"%(variable_name) + print(" name: %s" % (variable_name)) if len(vertexVars) > 0: - print " Variables with 'nVertices' as a dimension:" + print(" Variables with 'nVertices' as a dimension:") for variable_name in vertexVars: - print " name: %s"%(variable_name) + print(" name: %s" % (variable_name)) if len(edgeVars) > 0: - print " Variables with 'nEdges' as adimension:" + print(" Variables with 'nEdges' as adimension:") for variable_name in edgeVars: - print " name: %s"%(variable_name) + print(" name: %s" % (variable_name)) - print "" -#}}} + print("") +# }}} def write_pvd_header(path, prefix): # {{{ @@ -521,18 +543,18 @@ def write_pvd_header(path, prefix): # {{{ return pvd_file # }}} -def get_hyperslab_name_and_dims(var_name, extra_dim_vals):#{{{ +def get_hyperslab_name_and_dims(var_name, extra_dim_vals): # {{{ if(extra_dim_vals is None): - return ([var_name],None) + return ([var_name], None) if(len(extra_dim_vals) == 0): - return ([],None) + return ([], None) out_var_names = [] for hyper_slab in extra_dim_vals: pieces = [var_name] pieces.extend(hyper_slab) out_var_names.append('_'.join(pieces)) return (out_var_names, extra_dim_vals) -#}}} +# }}} def write_vtp_header(path, prefix, active_var_index, var_indices, @@ -591,7 +613,7 @@ def write_vtp_header(path, prefix, active_var_index, var_indices, def build_cell_geom_lists(nc_file, output_32bit, lonlat): # {{{ - print "Build geometry for fields on cells..." + print("Build geometry for fields on cells...") vertices = _build_location_list_xyz(nc_file, 'Vertex', output_32bit, lonlat) @@ -640,7 +662,7 @@ def build_cell_geom_lists(nc_file, output_32bit, lonlat): # {{{ def build_vertex_geom_lists(nc_file, output_32bit, lonlat): # {{{ - print "Build geometry for fields on vertices...." + print("Build geometry for fields on vertices....") vertices = _build_location_list_xyz(nc_file, 'Cell', output_32bit, lonlat) @@ -699,6 +721,7 @@ def build_edge_geom_lists(nc_file, output_32bit, lonlat): # {{{ if lonlat: lonEdge = numpy.rad2deg(nc_file.variables['lonEdge'][:]) + latEdge = numpy.rad2deg(nc_file.variables['latEdge'][:]) nEdges = len(nc_file.dimensions['nEdges']) nCells = len(nc_file.dimensions['nCells']) @@ -761,14 +784,17 @@ def get_field_sign(field_name): return (field_name, sign) -def read_field(var_name, mesh_file, time_series_file, extra_dim_vals, time_index, - block_indices, outType, sign=1):#{{{ - def read_field_with_dims(field_var, dim_vals, temp_shape, outType, index_arrays):#{{{ +def read_field(var_name, mesh_file, time_series_file, extra_dim_vals, + time_index, block_indices, outType, sign=1): # {{{ + + def read_field_with_dims(field_var, dim_vals, temp_shape, outType, + index_arrays): # {{{ temp_field = numpy.zeros(temp_shape, dtype=outType) inDims = len(dim_vals) if inDims <= 0 or inDims > 5: - print 'reading field %s with %s dimensions not supported.'%(var_name, inDims) + print('reading field %s with %s dimensions not supported.' + '' % (var_name, inDims)) sys.exit(1) if inDims == 1: @@ -793,27 +819,26 @@ def read_field_with_dims(field_var, dim_vals, temp_shape, outType, index_arrays) outDims = len(temp_field.shape) if outDims <= 0 or outDims > 4: - print 'something went wrong reading field %s, resulting in a temp array with %s dimensions.'%(var_name, outDims) + print('something went wrong reading field %s, resulting in a temp ' + 'array with %s dimensions.' % (var_name, outDims)) sys.exit(1) block_indices = numpy.arange(temp_field.shape[0]) if outDims == 1: field = temp_field elif outDims == 2: - field = temp_field[block_indices,index_arrays[0]] + field = temp_field[block_indices, index_arrays[0]] elif outDims == 3: - field = temp_field[block_indices,index_arrays[0],index_arrays[1]] + field = temp_field[block_indices, index_arrays[0], index_arrays[1]] elif outDims == 4: - field = temp_field[block_indices,index_arrays[0],index_arrays[1],index_arrays[2]] - - return field + field = temp_field[block_indices, index_arrays[0], index_arrays[1], + index_arrays[2]] - -#}}} + return field # }}} field_var = get_var(var_name, mesh_file, time_series_file) try: missing_val = field_var.missing_value - except: + except AttributeError: missing_val = -9999999790214767953607394487959552.000000 dim_vals = [] @@ -824,7 +849,7 @@ def read_field_with_dims(field_var, dim_vals, temp_shape, outType, index_arrays) index_arrays = [] for i in range(field_var.ndim): - dim = field_var.dimensions[i] + dim = field_var.dimensions[i] if dim == 'Time': dim_vals.append(time_index) elif dim in ['nCells', 'nEdges', 'nVertices']: @@ -841,118 +866,127 @@ def read_field_with_dims(field_var, dim_vals, temp_shape, outType, index_arrays) dim_vals.append(numpy.arange(shape[i])) temp_shape = temp_shape + (shape[i],) - index_array_var = get_var(extra_dim_val, mesh_file, time_series_file) + index_array_var = get_var(extra_dim_val, mesh_file, + time_series_file) # read the appropriate indices from the index_array_var - index_array = numpy.maximum(0,numpy.minimum(shape[i]-1, index_array_var[block_indices]-1)) + index_array = numpy.maximum(0, numpy.minimum( + shape[i]-1, index_array_var[block_indices]-1)) index_arrays.append(index_array) extra_dim_index += 1 - field = read_field_with_dims(field_var, dim_vals, temp_shape, outType, index_arrays) field[field == missing_val] = numpy.nan - return sign*field -#}}} + return sign*field # }}} def compute_zInterface(minLevelCell, maxLevelCell, layerThicknessCell, - zMinCell, zMaxCell, dtype, cellsOnEdge=None):#{{{ + zMinCell, zMaxCell, dtype, cellsOnEdge=None): + # {{{ - (nCells,nLevels) = layerThicknessCell.shape + (nCells, nLevels) = layerThicknessCell.shape - cellMask = numpy.ones((nCells,nLevels), bool) + cellMask = numpy.ones((nCells, nLevels), bool) for iLevel in range(nLevels): if minLevelCell is not None: - cellMask[:,iLevel] = numpy.logical_and(cellMask[:,iLevel], iLevel >= minLevelCell) + cellMask[:, iLevel] = numpy.logical_and(cellMask[:, iLevel], + iLevel >= minLevelCell) if maxLevelCell is not None: - cellMask[:,iLevel] = numpy.logical_and(cellMask[:,iLevel], iLevel <= maxLevelCell) + cellMask[:, iLevel] = numpy.logical_and(cellMask[:, iLevel], + iLevel <= maxLevelCell) - zInterfaceCell = numpy.zeros((nCells,nLevels+1),dtype=dtype) + zInterfaceCell = numpy.zeros((nCells, nLevels+1), dtype=dtype) for iLevel in range(nLevels): - zInterfaceCell[:,iLevel+1] = (zInterfaceCell[:,iLevel] - + cellMask[:,iLevel]*layerThicknessCell[:,iLevel]) + zInterfaceCell[:, iLevel+1] = \ + zInterfaceCell[:, iLevel] \ + + cellMask[:, iLevel]*layerThicknessCell[:, iLevel] if zMinCell is not None: minLevel = minLevelCell.copy() minLevel[minLevel < 0] = nLevels-1 - zOffsetCell = zMinCell - zInterfaceCell[numpy.arange(0,nCells),minLevel] + zOffsetCell = zMinCell - zInterfaceCell[numpy.arange(0, nCells), + minLevel] else: - zOffsetCell = zMaxCell - zInterfaceCell[numpy.arange(0,nCells),maxLevelCell+1] + zOffsetCell = zMaxCell - zInterfaceCell[numpy.arange(0, nCells), + maxLevelCell+1] for iLevel in range(nLevels+1): - zInterfaceCell[:,iLevel] += zOffsetCell + zInterfaceCell[:, iLevel] += zOffsetCell if cellsOnEdge is None: - return zInterfaceCell + return zInterfaceCell else: nEdges = cellsOnEdge.shape[0] - zInterfaceEdge = numpy.zeros((nEdges,nLevels+1),dtype=dtype) + zInterfaceEdge = numpy.zeros((nEdges, nLevels+1), dtype=dtype) # Get a list of valid cells on edges and a mask of which are valid - cellsOnEdgeMask = numpy.logical_and(cellsOnEdge >= 0, cellsOnEdge < nCells) + cellsOnEdgeMask = numpy.logical_and(cellsOnEdge >= 0, + cellsOnEdge < nCells) cellIndicesOnEdge = [] - cellIndicesOnEdge.append(cellsOnEdge[cellsOnEdgeMask[:,0],0]) - cellIndicesOnEdge.append(cellsOnEdge[cellsOnEdgeMask[:,1],1]) + cellIndicesOnEdge.append(cellsOnEdge[cellsOnEdgeMask[:, 0], 0]) + cellIndicesOnEdge.append(cellsOnEdge[cellsOnEdgeMask[:, 1], 1]) for iLevel in range(nLevels): edgeMask = numpy.zeros(nEdges, bool) layerThicknessEdge = numpy.zeros(nEdges, float) denom = numpy.zeros(nEdges, float) for index in range(2): - mask = cellsOnEdgeMask[:,index] + mask = cellsOnEdgeMask[:, index] cellIndices = cellIndicesOnEdge[index] - cellMaskLocal = cellMask[cellIndices,iLevel] + cellMaskLocal = cellMask[cellIndices, iLevel] - edgeMask[mask] = numpy.logical_or(edgeMask[mask], cellMaskLocal) + edgeMask[mask] = numpy.logical_or(edgeMask[mask], + cellMaskLocal) - layerThicknessEdge[mask] += cellMaskLocal*layerThicknessCell[cellIndices,iLevel] + layerThicknessEdge[mask] += \ + cellMaskLocal*layerThicknessCell[cellIndices, iLevel] denom[mask] += 1.0*cellMaskLocal layerThicknessEdge[edgeMask] /= denom[edgeMask] - zInterfaceEdge[:,iLevel+1] = (zInterfaceEdge[:,iLevel] - + edgeMask*layerThicknessEdge) + zInterfaceEdge[:, iLevel+1] = (zInterfaceEdge[:, iLevel] + + edgeMask*layerThicknessEdge) if zMinCell is not None: refLevelEdge = numpy.zeros(nEdges, int) for index in range(2): - mask = cellsOnEdgeMask[:,index] + mask = cellsOnEdgeMask[:, index] cellIndices = cellIndicesOnEdge[index] - refLevelEdge[mask] = numpy.maximum(refLevelEdge[mask], minLevel[cellIndices]) + refLevelEdge[mask] = numpy.maximum(refLevelEdge[mask], + minLevel[cellIndices]) else: refLevelEdge = (nLevels-1)*numpy.ones(nEdges, int) for index in range(2): - mask = cellsOnEdgeMask[:,index] + mask = cellsOnEdgeMask[:, index] cellIndices = cellIndicesOnEdge[index] - refLevelEdge[mask] = numpy.minimum(refLevelEdge[mask], maxLevelCell[cellIndices]+1) - + refLevelEdge[mask] = numpy.minimum(refLevelEdge[mask], + maxLevelCell[cellIndices]+1) zOffsetEdge = numpy.zeros(nEdges, float) # add the average of zInterfaceCell at each adjacent cell denom = numpy.zeros(nEdges, float) for index in range(2): - mask = cellsOnEdgeMask[:,index] + mask = cellsOnEdgeMask[:, index] cellIndices = cellIndicesOnEdge[index] - zOffsetEdge[mask] += zInterfaceCell[cellIndices,refLevelEdge[mask]] + zOffsetEdge[mask] += zInterfaceCell[cellIndices, + refLevelEdge[mask]] denom[mask] += 1.0 mask = denom > 0. zOffsetEdge[mask] /= denom[mask] # subtract the depth of zInterfaceEdge at the level of the bottom - zOffsetEdge -= zInterfaceEdge[numpy.arange(nEdges),refLevelEdge] + zOffsetEdge -= zInterfaceEdge[numpy.arange(nEdges), refLevelEdge] for iLevel in range(nLevels+1): - zInterfaceEdge[:,iLevel] += zOffsetEdge - - return (zInterfaceCell, zInterfaceEdge) + zInterfaceEdge[:, iLevel] += zOffsetEdge -#}}} + return (zInterfaceCell, zInterfaceEdge) # }}} def _build_location_list_xyz(nc_file, suffix, output_32bit, lonlat): # {{{ @@ -1056,7 +1090,7 @@ def _fix_periodic_vertices_1D(vertices, verticesOnCell, validVertices, coordNVerticesToAdd = numpy.count_nonzero(coordValid) - print coordNVerticesToAdd + print(coordNVerticesToAdd) coordVerticesToAdd = numpy.arange(coordNVerticesToAdd) + nVertices coordV = coordVertex[coordVOC] From 487b17355472d679a58e4501e63507a032d6a443 Mon Sep 17 00:00:00 2001 From: Stephen Price Date: Mon, 3 Dec 2018 14:21:31 -0800 Subject: [PATCH 062/180] Add error handling for sphereRadius=0 Add simple error handling to avoid division by zero when sphereRadius field is set to 0, which is commonly the case for planar meshes. Setting to 1 in this case ensures that the grid_area values in the output SCRIP file take on the same values as the areaCell field in the input MPAS input mesh file. --- .../create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py index 0dcd02d62..5aedbc1a4 100755 --- a/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py +++ b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py @@ -53,6 +53,10 @@ areaCell = fin.variables['areaCell'][:] sphereRadius = float(fin.sphere_radius) +if sphereRadius <= 0: + print " -- field 'sphereRadius' = 0 (using planar mesh?); setting = 1 " + sphereRadius = 1.0 + if options.landiceMasks: landIceMask = fin.variables['landIceMask'][:] From 5f964cbc2021fae618534a74faac8b4400b18638 Mon Sep 17 00:00:00 2001 From: Stephen Price Date: Tue, 4 Dec 2018 13:40:57 -0800 Subject: [PATCH 063/180] add warnings for sphereRadius=0 and on_a_sphere=0 This updates previous error handling for this script so that the following occurs: 1) if 'on_a_sphere' attribute = 'NO' (e.g., for planar, polar stereo meshes), scrip file generation code throws warning that areas between the spherical and planar meshes are likely to be unequal 2) if 'sphereRadius' attribute = 0 (e.g., for periodic hex mesh), scrip file generation code throws warning that conservative mapping file generation will fail (will result in divide by zero giving 'grid_area' values of inf in output scrip file) --- .../create_SCRIP_file_from_MPAS_mesh.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py index 5aedbc1a4..813fe30dd 100755 --- a/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py +++ b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py @@ -52,10 +52,14 @@ maxVertices = len(fin.dimensions['maxEdges']) areaCell = fin.variables['areaCell'][:] sphereRadius = float(fin.sphere_radius) +on_a_sphere = str(fin.on_a_sphere) + if sphereRadius <= 0: - print " -- field 'sphereRadius' = 0 (using planar mesh?); setting = 1 " - sphereRadius = 1.0 + print " -- WARNING: conservative remapping is NOT possible when 'sphereRadius' <= 0 because 'grid_area' field will be infinite (from division by 0)" + +if on_a_sphere == "NO": + print " -- WARNING: 'on_a_sphere' attribute is 'NO', which means that there may be some disagreement regarding area between the planar (source) and spherical (target) mesh" if options.landiceMasks: landIceMask = fin.variables['landIceMask'][:] From 88de07668c8f4a487de43e98940f113f96ae63e9 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 1 Dec 2018 14:47:14 -0700 Subject: [PATCH 064/180] Add support for creating topo geometry In this mode, this requires: * extracting edge polygons, not just cell polygons * adding a boundaryMask field indicating parts of edge polygons with no neighboring cell * extracting fields on cells as point fields, not cell fields in ParaView * Sampling fields that are funcitons of depth at the topgraphy index (e.g. maxLevelCell) --- .../paraview_vtk_field_extractor.py | 160 +++++++++++---- .../paraview_vtk_field_extractor/utils.py | 190 ++++++++++++++++-- 2 files changed, 293 insertions(+), 57 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index cb2b7576e..288b6f91f 100755 --- a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -9,7 +9,7 @@ It can extract a field across multiple files by passing in a regular expression for the filename patter. As an example, one can run the script using: -`./paraview_vtk_field_extractor.py -v areaCell,latVertex -f "hist.comp.*.nc"` + ./paraview_vtk_field_extractor.py -v areaCell,latVertex -f "hist.comp.*.nc" To extract a time series of areaCell,latVertex that spans multiple files. By default, time-independent fields on cells are written to a file @@ -31,7 +31,7 @@ a single index, or a comma-separated list of indices or a range of indices indices (separated by 1 or 2 colons). For example, -`-d maxEdges= nVertLeves=0:10:2 nParticles=0,2,4,6,8` + -d maxEdges= nVertLeves=0:10:2 nParticles=0,2,4,6,8 will ignore any fields with dimension maxEdges, extract every other layer from the first 10 vertical levels (each into its own field) and extract the five @@ -40,19 +40,47 @@ An index array can also be specified in this way (and these can be mixed with integer indices in a comma-separated list but not in a colon-separated range): -`-d nVertLeves=0,maxLevelCell` + -d nVertLeves=0,maxLevelCell will extract fields from the first vertical level and the vertical level with index given by maxLevelCell. +The extractor includes optional support for extracting geometry appropriate +for displaying variables at the depth of a topographic feature (typically the +top or bottom of the domain) for MPAS components with a spatially variable +top or bottom index (e.g. `maxLevelCell` in MPAS-Ocean). This is accomplished +with flags such as: + + --topo_dim=nVertLevels --topo_cell_index=maxLevelCell + +Fields on cells are sampled at the topographic index and the geometry includes +polygons corresponding to edges so that vertical faces between adjacent cells +can be displayed. Fields are extracted as normal except that they are sampled +as point data rather than cell data, allowing computations in ParaView to +display the topography. A mask field is also included indicating which parts +of edge polygons correspond to the boundary of the domain (boundaryMask == 1) +and which parts of cell and edge polygons are interior (boundaryMask == 0). +Together, this can be used to plot topography by using a calculator filter like +the following: + + coords*(1.0 - 100.0*(1 - boundaryMask)*bottomDepth/mag(coords)) + +If this is entered into a Calculator Filter in ParaView with the "coordinate +result" box checked, the result will to display the MPAS-Ocean topography, +exaggerated by a factor of 100, with a value of zero along boundary points of +edge polygons (a "water-tight" surface). + Requirements: This script requires access to the following non standard modules: -pyevtk (available from opengeostat channel) -netCDF4 +evtk (available from e3sm channel) +netcdf4 numpy +for python 2.7: +future + Optional modules: -progressbar +progressbar2 """ from __future__ import absolute_import, division, print_function, \ unicode_literals @@ -78,7 +106,8 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, out_dir, blocking, all_dim_vals, blockDimName, variable_list, vertices, connectivity, offsets, valid_mask, output_32bit, combine_output, append, - xtimeName): # {{{ + xtimeName, topo_dim=None, topo_cell_indices=None, + cell_to_point_map=None, boundary_mask=None): # {{{ if len(variable_list) == 0: return @@ -97,6 +126,15 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, else: blockDim = len(time_series_file.dimensions[blockDimName]) + if boundary_mask is not None: + variable_list.append('boundaryMask') + all_dim_vals['boundaryMask'] = None + pointData = True + cellData = False + else: + pointData = False + cellData = True + # Pre-compute the number of blocks nBlocks = 1 + blockDim // blocking @@ -109,7 +147,9 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, nHyperSlabs = 0 for iVar in range(nVars): var_name = variable_list[iVar] - if xtimeName is not None: + if boundary_mask is not None and var_name == 'boundaryMask': + var_has_time_dim[iVar] = False + elif xtimeName is not None: if var_name in time_series_file.variables: var_has_time_dim[iVar] = \ 'Time' in time_series_file.variables[var_name].dimensions @@ -128,6 +168,14 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, any_var_has_time_dim = np.any(var_has_time_dim) + if topo_dim is not None: + if (mesh_file is not None) and (topo_dim in mesh_file.dimensions): + nTopoLevels = len(mesh_file.dimensions[topo_dim]) + else: + nTopoLevels = len(time_series_file.dimensions[topo_dim]) + else: + nTopoLevels = None + try: os.makedirs(out_dir) except OSError: @@ -177,8 +225,8 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, nPoints, nPolygons, outType, - cellData=True, - pointData=False, + cellData=cellData, + pointData=pointData, xtime=None) prev_file = "" @@ -200,8 +248,8 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, "{}".format(xtimeName, time_series_file)) var = time_series_file.variables[xtimeName] if len(var.shape) == 2: - xtime = \ - ''.join(var[local_time_indices[time_index], :]).strip() + xtime = var[local_time_indices[time_index], + :].tostring().decode('utf-8').strip() date = datetime(int(xtime[0:4]), int(xtime[5:7]), int(xtime[8:10]), int(xtime[11:13]), int(xtime[14:16]), int(xtime[17:19])) @@ -238,8 +286,8 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, nPoints, nPolygons, outType, - cellData=True, - pointData=False, + cellData=cellData, + pointData=pointData, xtime=xtime) # add time step to pdv file @@ -271,21 +319,34 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, else: dim_vals = None - field = np.zeros(blockDim, dtype=outType) - - for iBlock in np.arange(0, nBlocks): - blockStart = iBlock * blocking - blockEnd = min((iBlock + 1) * blocking, blockDim) - cellIndices = np.arange(blockStart, blockEnd) - field_block = \ - utils.read_field(var_name, mesh_file, - time_series_file, dim_vals, - local_time_indices[time_index], - cellIndices, outType) - - field[blockStart:blockEnd] = field_block - - field = field[valid_mask] + if boundary_mask is not None and var_name == 'boundaryMask': + field = np.array(boundary_mask, dtype=outType) + else: + field = np.zeros(blockDim, dtype=outType) + + for iBlock in np.arange(0, nBlocks): + blockStart = iBlock * blocking + blockEnd = min((iBlock + 1) * blocking, blockDim) + block_indices = np.arange(blockStart, blockEnd) + if topo_cell_indices is None: + block_topo_cell_indices = None + else: + block_topo_cell_indices = \ + topo_cell_indices[block_indices] + field_block = utils.read_field( + var_name, mesh_file, time_series_file, + dim_vals, local_time_indices[time_index], + block_indices, outType, topo_dim=topo_dim, + topo_cell_indices=block_topo_cell_indices, + nTopoLevels=nTopoLevels) + + field[blockStart:blockEnd] = field_block + + field = field[valid_mask] + + if cell_to_point_map is not None: + # map field from cells to points + field = field[cell_to_point_map] vtkFile.appendData(field) @@ -366,6 +427,12 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, "for files with a Time dimension but no xtime" "variable (e.g. mesh file)", required=False) + parser.add_argument("--topo_dim", dest="topo_dim", required=False, + help="Dimension and range for topography dimension") + parser.add_argument("--topo_cell_index", dest="topo_cell_index", + required=False, + help="Index array indicating the bottom of the domain " + "(default is the topo_dim-1 for all cells)") args = parser.parse_args() if not args.output_32bit: @@ -399,12 +466,20 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, mesh_file = utils.open_netcdf(args.mesh_filename) else: mesh_file = None - extra_dims = utils.parse_extra_dims(args.dimension_list, time_series_file, - mesh_file) + extra_dims, topo_cell_indices = \ + utils.parse_extra_dims(args.dimension_list, time_series_file, + mesh_file, topo_dim=args.topo_dim, + topo_cell_index_name=args.topo_cell_index) + basic_dims = ['nCells', 'nEdges', 'nVertices', 'Time'] + include_dims = ['nCells', 'nEdges', 'nVertices'] + if args.topo_dim is not None: + basic_dims.append(args.topo_dim) + include_dims = ['nCells'] + (all_dim_vals, cellVars, vertexVars, edgeVars) = \ - utils.setup_dimension_values_and_sort_vars(time_series_file, mesh_file, - args.variable_list, - extra_dims) + utils.setup_dimension_values_and_sort_vars( + time_series_file, mesh_file, args.variable_list, extra_dims, + basic_dims=basic_dims) time_series_file.close() if(mesh_file is not None): mesh_file.close() @@ -419,8 +494,15 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, mesh_file = utils.open_netcdf(args.mesh_filename) # Build cell geometry - (vertices, connectivity, offsets, valid_mask) = \ - utils.build_cell_geom_lists(mesh_file, use_32bit, args.lonlat) + if args.topo_dim is None: + (vertices, connectivity, offsets, valid_mask) = \ + utils.build_cell_geom_lists(mesh_file, use_32bit, args.lonlat) + cell_to_point_map = None + boundary_mask = None + else: + (vertices, connectivity, offsets, valid_mask, cell_to_point_map, + boundary_mask) = utils.build_topo_point_and_polygon_lists( + mesh_file, use_32bit, args.lonlat) if not separate_mesh_file: mesh_file.close() @@ -430,7 +512,11 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, args.out_dir, args.blocking, all_dim_vals, 'nCells', cellVars, vertices, connectivity, offsets, valid_mask, use_32bit, - args.combine_output, args.append, args.xtime) + args.combine_output, args.append, args.xtime, + topo_dim=args.topo_dim, + topo_cell_indices=topo_cell_indices, + cell_to_point_map=cell_to_point_map, + boundary_mask=boundary_mask) if separate_mesh_file: mesh_file.close() diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index a4f63180e..c53141dd2 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -107,7 +107,7 @@ def setup_time_indices(fn_pattern, xtimeName): # {{{ if len(xtime.shape) == 2: xtime = xtime[:, :] for index in range(xtime.shape[0]): - local_times.append(''.join(xtime[index, :])) + local_times.append(xtime[index, :].tostring()) else: local_times = xtime[:] @@ -308,34 +308,50 @@ def parse_index_string(index_string, dim_size): # {{{ def parse_extra_dims(dimension_list, time_series_file, mesh_file, - max_index_count=None): # {{{ + topo_dim=None, topo_cell_index_name=None, + max_index_count=None): + # {{{ ''' Parses a list of dimensions and corresponding indices separated by equals signs. Optionally, a max_index_count (typically 1) can be provided, indicating that indices beyond max_index_count-1 will be ignored in each dimension. Optionally, topo_dim contains the name of a dimension associated with the surface or bottom topography (e.g. nVertLevels for MPAS-Ocean) - If too_dim is provided, topo_cell_indices_name can optionally be either - a constant value for the index vertical index to the topography or - the name of a field with dimension nCells that contains the vertical index - of the topography. + If topo_dim is provided, topo_cell_index_name can optionally be either + a constant value for the vertical index to the topography or the name of a + field with dimension nCells that contains the vertical index of the + topography. ''' - if not dimension_list: - return {} - extra_dims = {} - for dim_item in dimension_list: - (dimName, index_string) = dim_item.split('=') - indices = parse_extra_dim(dimName, index_string, time_series_file, - mesh_file) - if indices is not None: - if max_index_count is None or len(indices) <= max_index_count: - extra_dims[dimName] = indices + topo_cell_indices = None + + if dimension_list is not None: + for dim_item in dimension_list: + (dimName, index_string) = dim_item.split('=') + indices = parse_extra_dim(dimName, index_string, time_series_file, + mesh_file) + if indices is not None: + if max_index_count is None or len(indices) <= max_index_count: + extra_dims[dimName] = indices + else: + extra_dims[dimName] = indices[0:max_index_count] + + if topo_dim is not None: + if topo_cell_index_name is not None: + if (mesh_file is not None) and \ + (topo_cell_index_name in mesh_file.variables): + topo_cell_indices = \ + mesh_file.variables[topo_cell_index_name][:]-1 else: - extra_dims[dimName] = indices[0:max_index_count] + topo_cell_indices = \ + time_series_file.variables[topo_cell_index_name][:]-1 + else: + index = len(mesh_file.dimensions[topo_dim])-1 + nCells = len(mesh_file.dimensions['nCells']) + topo_cell_indices = index*numpy.ones(nCells, int) - return extra_dims + return extra_dims, topo_cell_indices # }}} @@ -459,7 +475,7 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): # Setting dimension values: indices = [] for dim in field_dims: - if dim not in ['Time', 'nCells', 'nEdges', 'nVertices']: + if dim not in basic_dims: indices.append(extra_dims[dim]) if len(indices) == 0: dim_vals = None @@ -611,6 +627,128 @@ def write_vtp_header(path, prefix, active_var_index, var_indices, return vtkFile # }}} +def build_topo_point_and_polygon_lists(nc_file, output_32bit, lonlat): # {{{ + + vertices, _, _, _ = build_cell_geom_lists(nc_file, output_32bit, lonlat) + + xVertex, yVertex, zVertex = vertices + + nCells = len(nc_file.dimensions['nCells']) + nEdges = len(nc_file.dimensions['nEdges']) + + nEdgesOnCell = nc_file.variables['nEdgesOnCell'][:] + verticesOnCell = nc_file.variables['verticesOnCell'][:, :]-1 + verticesOnEdge = nc_file.variables['verticesOnEdge'][:, :]-1 + edgesOnCell = nc_file.variables['edgesOnCell'][:, :]-1 + cellsOnEdge = nc_file.variables['cellsOnEdge'][:, :]-1 + + # 4 points for each edge face + nPoints = 4*nEdges + # 1 polygon for each edge and cell + nPolygons = nEdges + nCells + + if output_32bit: + dtype = 'f4' + else: + dtype = 'f8' + + X = numpy.zeros(nPoints, dtype) + Y = numpy.zeros(nPoints, dtype) + Z = numpy.zeros(nPoints, dtype) + + # a polygon with nEdgesOnCell vertices per cell plus a polygon with 4 + # vertices per edge + totalEdgesOnCells = numpy.sum(nEdgesOnCell) + connectivity = numpy.zeros(4*nEdges + totalEdgesOnCells, dtype=int) + offsets = numpy.zeros(nPolygons, dtype=int) + + outIndex = 0 + + print("Build edge connectivity...") + + # the points on each edge face are simply the points in order + connectivity[0:4*nEdges] = numpy.arange(4*nEdges) + # the offset to the next polygon in the connectivity array is 4 more points + offsets[0:nEdges] = 4*numpy.arange(1, nEdges+1) + + # The points on an edge are vertex 0, 1, 1, 0 on that edge, making a + # vertical rectangle if the points are offset + iEdges, voe = numpy.meshgrid(numpy.arange(nEdges), [0, 1, 1, 0], + indexing='ij') + iVerts = verticesOnEdge[iEdges, voe].ravel() + X = xVertex[iVerts] + Y = yVertex[iVerts] + Z = zVertex[iVerts] + + # we want to know the cells corresponding to each point. The first two + # points correspond to the first cell, the second two to the second cell + # (if any). + iEdges, coe = numpy.meshgrid(numpy.arange(nEdges), [0, 0, 1, 1], + indexing='ij') + iCells = cellsOnEdge[iEdges, coe] + + # If there *is* a second cell on the edge, it's an interior edge. If not, + # it's a boundary edge + boundary_mask = iCells == -1 + assert(numpy.all(coe[boundary_mask] == 1)) + + # For boundary edges, we'll point to the only adjacent cell for both the + # first and second cell on the edge (but boundary_mask) will keep track of + # which is which for later. + coe[boundary_mask] = 0 + cell_to_point_map = cellsOnEdge[iEdges, coe].ravel() + + # Build cells + if use_progress_bar: + widgets = ['Build cell connectivity: ', Percentage(), ' ', Bar(), ' ', + ETA()] + bar = ProgressBar(widgets=widgets, maxval=nCells).start() + else: + print("Build cell connectivity...") + + outIndex = 4*nEdges + + for iCell in range(nCells): + neoc = nEdgesOnCell[iCell] + eocs = edgesOnCell[iCell, 0:neoc] + vocs = verticesOnCell[iCell, 0:neoc] + for index in range(neoc): + iVert = vocs[index] + iEdge = eocs[index] + # which vertex on the edge corresponds to iVert? + coes = cellsOnEdge[iEdge, :] + voes = verticesOnEdge[iEdge, :] + + if coes[0] == iCell: + if voes[0] == iVert: + voe = 0 + else: + voe = 1 + else: + if voes[0] == iVert: + voe = 3 + else: + voe = 2 + + connectivity[outIndex + index] = 4*iEdge + voe + + outIndex += neoc + offsets[nEdges + iCell] = outIndex + + if use_progress_bar: + bar.update(iCell) + + if use_progress_bar: + bar.finish() + + valid_mask = numpy.ones(nCells, bool) + + return (X, Y, Z), connectivity, offsets, valid_mask, \ + cell_to_point_map, boundary_mask.ravel() + +# }}} + + def build_cell_geom_lists(nc_file, output_32bit, lonlat): # {{{ print("Build geometry for fields on cells...") @@ -786,7 +924,8 @@ def get_field_sign(field_name): def read_field(var_name, mesh_file, time_series_file, extra_dim_vals, - time_index, block_indices, outType, sign=1): # {{{ + time_index, block_indices, outType, sign=1, + topo_dim=None, topo_cell_indices=None, nTopoLevels=None): # {{{ def read_field_with_dims(field_var, dim_vals, temp_shape, outType, index_arrays): # {{{ @@ -816,6 +955,15 @@ def read_field_with_dims(field_var, dim_vals, temp_shape, outType, dim_vals[4]], dtype=outType) + if topo_dim is not None and topo_dim in field_var.dimensions: + if len(temp_field.shape) != 2: + raise ValueError('Field with dimensions {} not supported in ' + 'topogrpahy extraction mode.'.format( + field_var.dimensions)) + # sample the depth-dependent field at the index of the topography + temp_field = temp_field[numpy.arange(temp_field.shape[0]), + topo_cell_indices] + outDims = len(temp_field.shape) if outDims <= 0 or outDims > 4: @@ -855,6 +1003,8 @@ def read_field_with_dims(field_var, dim_vals, temp_shape, outType, elif dim in ['nCells', 'nEdges', 'nVertices']: dim_vals.append(block_indices) temp_shape = temp_shape + (len(block_indices),) + elif topo_dim is not None and dim == topo_dim: + dim_vals.append(numpy.arange(nTopoLevels)) else: extra_dim_val = extra_dim_vals[extra_dim_index] try: From 4680ccea04143a0ffdd1da59869872c37f2333e9 Mon Sep 17 00:00:00 2001 From: "Phillip J. Wolfram" Date: Wed, 5 Dec 2018 09:27:16 -0700 Subject: [PATCH 065/180] Fixes python2/3 compatability for unicode This resolves implicit casts of python strings to unicode by explicit casts to `str` to mitigate errors in calls to vtk of form ``` File "/lustre/scratch4/turquoise/.mdt1/pwolfram/pwolfram/Delaware_inundation/USDEQU300cr1/MPAS-Tools/visualization/paraview_vtk_field_extractor/utils.py", line 592, in write_vtp_header vtkFile.openElement("Points") File "/users/pwolfram/lib/python2.7/site-packages/evtk/vtk.py", line 472, in openElement self.xml.openElement(tagName) File "/users/pwolfram/lib/python2.7/site-packages/evtk/xml.py", line 48, in openElement self.stream.write(str.encode(st)) TypeError: descriptor 'encode' requires a 'str' object but received a 'unicode' ``` --- .../paraview_vtk_field_extractor/utils.py | 40 ++++++++++--------- 1 file changed, 22 insertions(+), 18 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index c53141dd2..c9b806ed1 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -580,42 +580,46 @@ def write_vtp_header(path, prefix, active_var_index, var_indices, vtkFile = VtkFile("{}/{}".format(path, prefix), VtkPolyData) if xtime is not None: - vtkFile.openElement("metadata") - vtkFile.openElement("xtime") - vtkFile.xml.addText(xtime) - vtkFile.closeElement("xtime") - vtkFile.closeElement("metadata") + vtkFile.openElement(str("metadata")) + vtkFile.openElement(str("xtime")) + vtkFile.xml.addText(str(xtime)) + vtkFile.closeElement(str("xtime")) + vtkFile.closeElement(str("metadata")) vtkFile.openElement(vtkFile.ftype.name) vtkFile.openPiece(npoints=nPoints, npolys=nPolygons) - vtkFile.openElement("Points") - vtkFile.addData("points", vertices) - vtkFile.closeElement("Points") + vtkFile.openElement(str("Points")) + vtkFile.addData(str("points"), vertices) + vtkFile.closeElement(str("Points")) - vtkFile.openElement("Polys") - vtkFile.addData("connectivity", connectivity) - vtkFile.addData("offsets", offsets) - vtkFile.closeElement("Polys") + vtkFile.openElement(str("Polys")) + vtkFile.addData(str("connectivity"), connectivity) + vtkFile.addData(str("offsets"), offsets) + vtkFile.closeElement(str("Polys")) if(cellData): - vtkFile.openData("Cell", scalars=variable_list[active_var_index]) + vtkFile.openData(str("Cell"), + scalars=[str(var) for var in + variable_list[active_var_index]]) for iVar in var_indices: var_name = variable_list[iVar] (out_var_names, dim_list) = \ get_hyperslab_name_and_dims(var_name, all_dim_vals[var_name]) for out_var_name in out_var_names: - vtkFile.addHeader(out_var_name, outType, nPolygons, 1) - vtkFile.closeData("Cell") + vtkFile.addHeader(str(out_var_name), outType, nPolygons, 1) + vtkFile.closeData(str("Cell")) if(pointData): - vtkFile.openData("Point", scalars=variable_list[active_var_index]) + vtkFile.openData(str("Point"), + scalars=[str(var) for var in + variable_list[active_var_index]]) for iVar in var_indices: var_name = variable_list[iVar] (out_var_names, dim_list) = \ get_hyperslab_name_and_dims(var_name, all_dim_vals[var_name]) for out_var_name in out_var_names: - vtkFile.addHeader(out_var_name, outType, nPoints, 1) - vtkFile.closeData("Point") + vtkFile.addHeader(str(out_var_name), outType, nPoints, 1) + vtkFile.closeData(str("Point")) vtkFile.closePiece() vtkFile.closeElement(vtkFile.ftype.name) From 45dca9e70954799225e68c9882e6fb029d380c78 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 5 Dec 2018 15:42:58 -0700 Subject: [PATCH 066/180] Fix periodicity with topography --- .../paraview_vtk_field_extractor.py | 6 +- .../paraview_vtk_field_extractor/utils.py | 159 +++++++++++------- 2 files changed, 103 insertions(+), 62 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index 288b6f91f..6cf7a77ec 100755 --- a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -63,12 +63,12 @@ Together, this can be used to plot topography by using a calculator filter like the following: - coords*(1.0 - 100.0*(1 - boundaryMask)*bottomDepth/mag(coords)) + coords*(1.0 + 100.0/mag(coords)*((1 - boundaryMask)*(-bottomDepth) + 10.0*boundaryMask)) If this is entered into a Calculator Filter in ParaView with the "coordinate result" box checked, the result will to display the MPAS-Ocean topography, -exaggerated by a factor of 100, with a value of zero along boundary points of -edge polygons (a "water-tight" surface). +exaggerated by a factor of 100, with a value equivalent to 10 m along boundary +points of edge polygons (a "water-tight" surface). Requirements: This script requires access to the following non standard modules: diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index c9b806ed1..826ec714c 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -633,74 +633,48 @@ def write_vtp_header(path, prefix, active_var_index, var_indices, def build_topo_point_and_polygon_lists(nc_file, output_32bit, lonlat): # {{{ - vertices, _, _, _ = build_cell_geom_lists(nc_file, output_32bit, lonlat) + if output_32bit: + dtype = 'f4' + else: + dtype = 'f8' - xVertex, yVertex, zVertex = vertices + xVertex, yVertex, zVertex = \ + _build_location_list_xyz(nc_file, 'Vertex', output_32bit, lonlat) nCells = len(nc_file.dimensions['nCells']) nEdges = len(nc_file.dimensions['nEdges']) + maxEdges = len(nc_file.dimensions['maxEdges']) nEdgesOnCell = nc_file.variables['nEdgesOnCell'][:] verticesOnCell = nc_file.variables['verticesOnCell'][:, :]-1 - verticesOnEdge = nc_file.variables['verticesOnEdge'][:, :]-1 edgesOnCell = nc_file.variables['edgesOnCell'][:, :]-1 - cellsOnEdge = nc_file.variables['cellsOnEdge'][:, :]-1 + verticesOnEdge = nc_file.variables['verticesOnEdge'][:] - 1 + cellsOnEdge = nc_file.variables['cellsOnEdge'][:] - 1 # 4 points for each edge face nPoints = 4*nEdges # 1 polygon for each edge and cell nPolygons = nEdges + nCells - if output_32bit: - dtype = 'f4' - else: - dtype = 'f8' - X = numpy.zeros(nPoints, dtype) Y = numpy.zeros(nPoints, dtype) Z = numpy.zeros(nPoints, dtype) - # a polygon with nEdgesOnCell vertices per cell plus a polygon with 4 - # vertices per edge - totalEdgesOnCells = numpy.sum(nEdgesOnCell) - connectivity = numpy.zeros(4*nEdges + totalEdgesOnCells, dtype=int) - offsets = numpy.zeros(nPolygons, dtype=int) - outIndex = 0 - print("Build edge connectivity...") - - # the points on each edge face are simply the points in order - connectivity[0:4*nEdges] = numpy.arange(4*nEdges) - # the offset to the next polygon in the connectivity array is 4 more points - offsets[0:nEdges] = 4*numpy.arange(1, nEdges+1) - # The points on an edge are vertex 0, 1, 1, 0 on that edge, making a # vertical rectangle if the points are offset iEdges, voe = numpy.meshgrid(numpy.arange(nEdges), [0, 1, 1, 0], indexing='ij') iVerts = verticesOnEdge[iEdges, voe].ravel() - X = xVertex[iVerts] - Y = yVertex[iVerts] - Z = zVertex[iVerts] + X[:] = xVertex[iVerts] + Y[:] = yVertex[iVerts] + Z[:] = zVertex[iVerts] + vertices = (X, Y, Z) - # we want to know the cells corresponding to each point. The first two - # points correspond to the first cell, the second two to the second cell - # (if any). - iEdges, coe = numpy.meshgrid(numpy.arange(nEdges), [0, 0, 1, 1], - indexing='ij') - iCells = cellsOnEdge[iEdges, coe] - - # If there *is* a second cell on the edge, it's an interior edge. If not, - # it's a boundary edge - boundary_mask = iCells == -1 - assert(numpy.all(coe[boundary_mask] == 1)) - - # For boundary edges, we'll point to the only adjacent cell for both the - # first and second cell on the edge (but boundary_mask) will keep track of - # which is which for later. - coe[boundary_mask] = 0 - cell_to_point_map = cellsOnEdge[iEdges, coe].ravel() + verticesOnPolygon = -1*numpy.ones((nPolygons, maxEdges), int) + verticesOnPolygon[0:nEdges, 0:4] = \ + numpy.arange(4*nEdges).reshape(nEdges, 4) # Build cells if use_progress_bar: @@ -710,7 +684,7 @@ def build_topo_point_and_polygon_lists(nc_file, output_32bit, lonlat): # {{{ else: print("Build cell connectivity...") - outIndex = 4*nEdges + outIndex = nEdges for iCell in range(nCells): neoc = nEdgesOnCell[iCell] @@ -734,10 +708,9 @@ def build_topo_point_and_polygon_lists(nc_file, output_32bit, lonlat): # {{{ else: voe = 2 - connectivity[outIndex + index] = 4*iEdge + voe + verticesOnPolygon[nEdges+iCell, index] = 4*iEdge + voe outIndex += neoc - offsets[nEdges + iCell] = outIndex if use_progress_bar: bar.update(iCell) @@ -745,12 +718,80 @@ def build_topo_point_and_polygon_lists(nc_file, output_32bit, lonlat): # {{{ if use_progress_bar: bar.finish() - valid_mask = numpy.ones(nCells, bool) + validVerts = verticesOnPolygon >= 0 - return (X, Y, Z), connectivity, offsets, valid_mask, \ - cell_to_point_map, boundary_mask.ravel() + if lonlat: + lonEdge = numpy.rad2deg(nc_file.variables['lonEdge'][:]) + latEdge = numpy.rad2deg(nc_file.variables['latEdge'][:]) + lonCell = numpy.rad2deg(nc_file.variables['lonCell'][:]) + latCell = numpy.rad2deg(nc_file.variables['latCell'][:]) + lonPolygon = numpy.append(lonEdge, lonCell) + latPolygon = numpy.append(latEdge, latCell) -# }}} + vertices, verticesOnPolygon = _fix_lon_lat_vertices(vertices, + verticesOnPolygon, + validVerts, + lonPolygon) + + if nc_file.on_a_sphere == 'NO' and nc_file.is_periodic == 'YES': + if lonlat: + xcoord = lonPolygon + ycoord = latPolygon + else: + xEdge = numpy.rad2deg(nc_file.variables['xEdge'][:]) + yEdge = numpy.rad2deg(nc_file.variables['yEdge'][:]) + xCell = numpy.rad2deg(nc_file.variables['xCell'][:]) + yCell = numpy.rad2deg(nc_file.variables['yCell'][:]) + xcoord = numpy.append(xEdge, xCell) + ycoord = numpy.append(yEdge, yCell) + + vertices, verticesOnPolygon = _fix_periodic_vertices(vertices, + verticesOnPolygon, + validVerts, + xcoord, ycoord, + nc_file.x_period, + nc_file.y_period) + + nPoints = len(vertices[0]) + + # we want to know the cells corresponding to each point. The first two + # points correspond to the first cell, the second two to the second cell + # (if any). + cell_to_point_map = -1*numpy.ones((nPoints), int) + boundary_mask = numpy.zeros((nPoints), bool) + + # first cell on edge always exists + coe = cellsOnEdge[:, 0].copy() + for index in range(2): + voe = verticesOnPolygon[0:nEdges, index] + cell_to_point_map[voe] = coe + boundary_mask[voe] = False + + # second cell on edge may not exist + coe = cellsOnEdge[:, 1].copy() + mask = coe == -1 + # use the first cell if the second doesn't exist + coe[mask] = cellsOnEdge[:, 0][mask] + for index in range(2, 4): + voe = verticesOnPolygon[0:nEdges, index] + cell_to_point_map[voe] = coe + boundary_mask[voe] = mask + + # for good measure, make sure vertices on cell are also accounted for + for index in range(maxEdges): + iCells = numpy.arange(nCells) + voc = verticesOnPolygon[nEdges:nEdges+nCells, index] + mask = index < nEdgesOnCell + cell_to_point_map[voc[mask]] = iCells[mask] + boundary_mask[voc[mask]] = False + + connectivity = verticesOnPolygon[validVerts] + validCount = numpy.sum(numpy.array(validVerts, int), axis=1) + offsets = numpy.cumsum(validCount, dtype=int) + valid_mask = numpy.ones(nCells, bool) + + return vertices, connectivity, offsets, valid_mask, \ + cell_to_point_map, boundary_mask.ravel() # }}} def build_cell_geom_lists(nc_file, output_32bit, lonlat): # {{{ @@ -891,10 +932,10 @@ def build_edge_geom_lists(nc_file, output_32bit, lonlat): # {{{ validVerts = validVerts[valid_mask, :] if lonlat: - vertices, cellsOnVertex = _fix_lon_lat_vertices(vertices, - vertsOnCell, - validVerts, - lonEdge[valid_mask]) + vertices, vertsOnCell = _fix_lon_lat_vertices(vertices, + vertsOnCell, + validVerts, + lonEdge[valid_mask]) if nc_file.on_a_sphere == 'NO' and nc_file.is_periodic == 'YES': if lonlat: xcoord = lonEdge[valid_mask] @@ -903,12 +944,12 @@ def build_edge_geom_lists(nc_file, output_32bit, lonlat): # {{{ xcoord = nc_file.variables['xEdge'][valid_mask] ycoord = nc_file.variables['yEdge'][valid_mask] - vertices, cellsOnVertex = _fix_periodic_vertices(vertices, - vertsOnCell, - validVerts, - xcoord, ycoord, - nc_file.x_period, - nc_file.y_period) + vertices, vertsOnCell = _fix_periodic_vertices(vertices, + vertsOnCell, + validVerts, + xcoord, ycoord, + nc_file.x_period, + nc_file.y_period) connectivity = vertsOnCell[validVerts] validCount = numpy.sum(numpy.array(validVerts, int), axis=1) From 1dd9c0f4ac3087a89d419d1937e0f81922c99b9e Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 6 Dec 2018 01:12:04 -0700 Subject: [PATCH 067/180] Remove debug print statement --- visualization/paraview_vtk_field_extractor/utils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index 826ec714c..3e1364c90 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -1285,8 +1285,6 @@ def _fix_periodic_vertices_1D(vertices, verticesOnCell, validVertices, coordNVerticesToAdd = numpy.count_nonzero(coordValid) - print(coordNVerticesToAdd) - coordVerticesToAdd = numpy.arange(coordNVerticesToAdd) + nVertices coordV = coordVertex[coordVOC] verticesOnCell[coordVerticesToChange] = coordVerticesToAdd From 50300ae871fb183421517b909fc4e6d1867f8829 Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Fri, 4 Jan 2019 14:24:55 -0700 Subject: [PATCH 068/180] Add meshDensity field to doubly-periodic hexagonal grid.nc files The MPAS mesh spec v1.0 includes a meshDensity field, and this commit adds that field to the output of the doubly-periodic mesh generation program. --- mesh_tools/periodic_hex/module_write_netcdf.F | 9 +++++++++ mesh_tools/periodic_hex/periodic_grid.F | 4 ++++ 2 files changed, 13 insertions(+) diff --git a/mesh_tools/periodic_hex/module_write_netcdf.F b/mesh_tools/periodic_hex/module_write_netcdf.F index ba430aedd..0fcdd5194 100644 --- a/mesh_tools/periodic_hex/module_write_netcdf.F +++ b/mesh_tools/periodic_hex/module_write_netcdf.F @@ -46,6 +46,7 @@ module write_netcdf integer :: wrVarIDedgesOnVertex integer :: wrVarIDcellsOnVertex integer :: wrVarIDkiteAreasOnVertex + integer :: wrVarIDmeshDensity integer :: wrVarIDfEdge integer :: wrVarIDfVertex integer :: wrVarIDh_s @@ -227,6 +228,8 @@ subroutine write_netcdf_init( & dimlist( 1) = wrDimIDvertexDegree dimlist( 2) = wrDimIDnVertices nferr = nf_def_var(wr_ncid, 'kiteAreasOnVertex', NF_DOUBLE, 2, dimlist, wrVarIDkiteAreasOnVertex) + dimlist( 1) = wrDimIDnCells + nferr = nf_def_var(wr_ncid, 'meshDensity', NF_DOUBLE, 1, dimlist, wrVarIDmeshDensity) dimlist( 1) = wrDimIDnEdges nferr = nf_def_var(wr_ncid, 'fEdge', NF_DOUBLE, 1, dimlist, wrVarIDfEdge) dimlist( 1) = wrDimIDnVertices @@ -312,6 +315,7 @@ subroutine write_netcdf_fields( & edgesOnVertex, & cellsOnVertex, & kiteAreasOnVertex, & + meshDensity, & fEdge, & fVertex, & h_s, & @@ -366,6 +370,7 @@ subroutine write_netcdf_fields( & integer, dimension(:,:), intent(in) :: edgesOnVertex integer, dimension(:,:), intent(in) :: cellsOnVertex real (kind=8), dimension(:,:), intent(in) :: kiteAreasOnVertex + real (kind=8), dimension(:), intent(in) :: meshDensity real (kind=8), dimension(:), intent(in) :: fEdge real (kind=8), dimension(:), intent(in) :: fVertex real (kind=8), dimension(:), intent(in) :: h_s @@ -548,6 +553,10 @@ subroutine write_netcdf_fields( & count2( 1) = 3 count2( 2) = wrLocalnVertices nferr = nf_put_vara_double(wr_ncid, wrVarIDkiteAreasOnVertex, start2, count2, kiteAreasOnVertex) + + start1(1) = 1 + count1( 1) = wrLocalnCells + nferr = nf_put_vara_double(wr_ncid, wrVarIDmeshDensity, start1, count1, meshDensity) start1(1) = 1 count1( 1) = wrLocalnEdges diff --git a/mesh_tools/periodic_hex/periodic_grid.F b/mesh_tools/periodic_hex/periodic_grid.F index ef208a598..04ef7658f 100644 --- a/mesh_tools/periodic_hex/periodic_grid.F +++ b/mesh_tools/periodic_hex/periodic_grid.F @@ -22,6 +22,7 @@ program hexagonal_periodic_grid real (kind=8), allocatable, dimension(:) :: latCell, lonCell, xCell, yCell, zCell real (kind=8), allocatable, dimension(:) :: latEdge, lonEdge, xEdge, yEdge, zEdge real (kind=8), allocatable, dimension(:) :: latVertex, lonVertex, xVertex, yVertex, zVertex + real (kind=8), allocatable, dimension(:) :: meshDensity real (kind=8), allocatable, dimension(:,:) :: weightsOnEdge, kiteAreasOnVertex real (kind=8), allocatable, dimension(:) :: fEdge, fVertex, h_s real (kind=8), allocatable, dimension(:,:,:) :: u, v, h, vh, circulation, vorticity, ke @@ -81,6 +82,7 @@ program hexagonal_periodic_grid allocate(xVertex(nVertices)) allocate(yVertex(nVertices)) allocate(zVertex(nVertices)) + allocate(meshDensity(nCells)) allocate(fEdge(nEdges)) allocate(fVertex(nVertices)) @@ -271,6 +273,7 @@ program hexagonal_periodic_grid end do end do + meshDensity(:) = 1.0 ! ! fill in initial conditions below @@ -329,6 +332,7 @@ program hexagonal_periodic_grid edgesOnVertex, & cellsOnVertex, & kiteAreasOnVertex, & + meshDensity, & fEdge, & fVertex, & h_s, & From 8d6573815196fd191779dfc0e079187a5943190d Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 4 Jan 2019 23:47:26 +0100 Subject: [PATCH 069/180] Rename nVertLevels --> depth Transpose zMid to have the right dimension order (Time, nCells, depth) --- ocean/add_depth_coord/add_depth.py | 6 ++++-- ocean/add_depth_coord/add_zMid.py | 15 +++++++++------ ocean/add_depth_coord/write_time_varying_zMid.py | 13 ++++++++----- 3 files changed, 21 insertions(+), 13 deletions(-) diff --git a/ocean/add_depth_coord/add_depth.py b/ocean/add_depth_coord/add_depth.py index 362ebc218..61c05440e 100755 --- a/ocean/add_depth_coord/add_depth.py +++ b/ocean/add_depth_coord/add_depth.py @@ -103,10 +103,12 @@ def main(): coordFileName = args.inputFileName dsCoord = xarray.open_dataset(coordFileName) + dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) ds = xarray.open_dataset(args.inFileName) + ds = ds.rename({'nVertLevels': 'depth'}) - ds.coords['depth'] = ('nVertLevels', + ds.coords['depth'] = ('depth', compute_depth(dsCoord.refBottomDepth)) ds.depth.attrs['unit'] = 'meters' ds.depth.attrs['long_name'] = 'reference depth of the center of each ' \ @@ -114,7 +116,7 @@ def main(): for varName in ds.data_vars: var = ds[varName] - if 'nVertLevels' in var.dims: + if 'depth' in var.dims: var = var.assign_coords(depth=ds.depth) ds[varName] = var diff --git a/ocean/add_depth_coord/add_zMid.py b/ocean/add_depth_coord/add_zMid.py index 6f5b6a9ab..b08a8b84f 100755 --- a/ocean/add_depth_coord/add_zMid.py +++ b/ocean/add_depth_coord/add_zMid.py @@ -78,16 +78,16 @@ def compute_zmid(bottomDepth, maxLevelCell, layerThickness): # ------- # Xylar Asay-Davis - nVertLevels = layerThickness.sizes['nVertLevels'] + nDepth = layerThickness.sizes['depth'] vertIndex = \ - xarray.DataArray.from_dict({'dims': ('nVertLevels',), - 'data': numpy.arange(nVertLevels)}) + xarray.DataArray.from_dict({'dims': ('depth',), + 'data': numpy.arange(nDepth)}) layerThickness = layerThickness.where(vertIndex < maxLevelCell) - thicknessSum = layerThickness.sum(dim='nVertLevels') - thicknessCumSum = layerThickness.cumsum(dim='nVertLevels') + thicknessSum = layerThickness.sum(dim='depth') + thicknessCumSum = layerThickness.cumsum(dim='depth') zSurface = -bottomDepth+thicknessSum zLayerBot = zSurface - thicknessCumSum @@ -95,6 +95,7 @@ def compute_zmid(bottomDepth, maxLevelCell, layerThickness): zMid = zLayerBot + 0.5*layerThickness zMid = zMid.where(vertIndex < maxLevelCell) + zMid = zMid.transpose('Time', 'nCells', 'depth') return zMid @@ -122,8 +123,10 @@ def main(): coordFileName = args.inputFileName dsCoord = xarray.open_dataset(coordFileName) + dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) ds = xarray.open_dataset(args.inFileName) + ds = ds.rename({'nVertLevels': 'depth'}) ds.coords['zMid'] = compute_zmid(dsCoord.bottomDepth, dsCoord.maxLevelCell, dsCoord.layerThickness) @@ -131,7 +134,7 @@ def main(): for varName in ds.data_vars: var = ds[varName] - if 'nCells' in var.dims and 'nVertLevels' in var.dims: + if 'nCells' in var.dims and 'depth' in var.dims: var = var.assign_coords(zMid=ds.zMid) ds[varName] = var diff --git a/ocean/add_depth_coord/write_time_varying_zMid.py b/ocean/add_depth_coord/write_time_varying_zMid.py index 1b10a48bf..2894ed50e 100755 --- a/ocean/add_depth_coord/write_time_varying_zMid.py +++ b/ocean/add_depth_coord/write_time_varying_zMid.py @@ -79,16 +79,16 @@ def compute_zmid(bottomDepth, maxLevelCell, layerThickness): # ------- # Xylar Asay-Davis - nVertLevels = layerThickness.sizes['nVertLevels'] + nDepth = layerThickness.sizes['depth'] vertIndex = \ - xarray.DataArray.from_dict({'dims': ('nVertLevels',), - 'data': numpy.arange(nVertLevels)}) + xarray.DataArray.from_dict({'dims': ('depth',), + 'data': numpy.arange(nDepth)}) layerThickness = layerThickness.where(vertIndex < maxLevelCell) - thicknessSum = layerThickness.sum(dim='nVertLevels') - thicknessCumSum = layerThickness.cumsum(dim='nVertLevels') + thicknessSum = layerThickness.sum(dim='depth') + thicknessCumSum = layerThickness.cumsum(dim='depth') zSurface = -bottomDepth+thicknessSum zLayerBot = zSurface - thicknessCumSum @@ -96,6 +96,7 @@ def compute_zmid(bottomDepth, maxLevelCell, layerThickness): zMid = zLayerBot + 0.5*layerThickness zMid = zMid.where(vertIndex < maxLevelCell) + zMid = zMid.transpose('Time', 'nCells', 'depth') return zMid @@ -128,8 +129,10 @@ def main(): coordFileName = args.inputFileName dsCoord = xarray.open_dataset(coordFileName) + dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) dsIn = xarray.open_dataset(args.inFileName) + dsIn = dsIn.rename({'nVertLevels': 'depth'}) inVarName = '{}layerThickness'.format(args.prefix) outVarName = '{}zMid'.format(args.prefix) layerThickness = dsIn[inVarName] From 33bf7f5a1fc6b1bcfdaa16548c4e927a4613425e Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 7 Jan 2019 17:44:59 +0100 Subject: [PATCH 070/180] Update attributes for CF compliance --- ocean/add_depth_coord/add_depth.py | 5 ++++- ocean/add_depth_coord/add_zMid.py | 4 +++- ocean/add_depth_coord/write_time_varying_zMid.py | 4 +++- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/ocean/add_depth_coord/add_depth.py b/ocean/add_depth_coord/add_depth.py index 61c05440e..11be1b908 100755 --- a/ocean/add_depth_coord/add_depth.py +++ b/ocean/add_depth_coord/add_depth.py @@ -110,7 +110,10 @@ def main(): ds.coords['depth'] = ('depth', compute_depth(dsCoord.refBottomDepth)) - ds.depth.attrs['unit'] = 'meters' + ds.depth.attrs['units'] = 'meters' + ds.depth.attrs['positive'] = 'down' + ds.depth.attrs['standard_name'] = 'depth' + ds.depth.attrs['long_name'] = 'reference depth of the center of each ' \ 'vertical level' diff --git a/ocean/add_depth_coord/add_zMid.py b/ocean/add_depth_coord/add_zMid.py index b08a8b84f..f8ab2914b 100755 --- a/ocean/add_depth_coord/add_zMid.py +++ b/ocean/add_depth_coord/add_zMid.py @@ -130,7 +130,9 @@ def main(): ds.coords['zMid'] = compute_zmid(dsCoord.bottomDepth, dsCoord.maxLevelCell, dsCoord.layerThickness) - ds.zMid.attrs['unit'] = 'm' + ds.zMid.attrs['units'] = 'meters' + ds.zMid.attrs['positive'] = 'up' + ds.zMid.attrs['standard_name'] = 'depth' for varName in ds.data_vars: var = ds[varName] diff --git a/ocean/add_depth_coord/write_time_varying_zMid.py b/ocean/add_depth_coord/write_time_varying_zMid.py index 2894ed50e..ea992fc1b 100755 --- a/ocean/add_depth_coord/write_time_varying_zMid.py +++ b/ocean/add_depth_coord/write_time_varying_zMid.py @@ -142,7 +142,9 @@ def main(): dsOut = xarray.Dataset() dsOut[outVarName] = zMid - dsOut[outVarName].attrs['unit'] = 'm' + dsOut[outVarName].attrs['units'] = 'meters' + dsOut[outVarName].attrs['positive'] = 'up' + dsOut[outVarName].attrs['standard_name'] = 'depth' dsOut.attrs['history'] = ' '.join(sys.argv) From 4cbbd418154ec44337d5d0d4b2a251cf818d5bde Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 8 Jan 2019 10:36:15 +0100 Subject: [PATCH 071/180] Remove standard_name for zMid (*not* depth) --- ocean/add_depth_coord/add_zMid.py | 1 - ocean/add_depth_coord/write_time_varying_zMid.py | 1 - 2 files changed, 2 deletions(-) diff --git a/ocean/add_depth_coord/add_zMid.py b/ocean/add_depth_coord/add_zMid.py index f8ab2914b..89b4f4049 100755 --- a/ocean/add_depth_coord/add_zMid.py +++ b/ocean/add_depth_coord/add_zMid.py @@ -132,7 +132,6 @@ def main(): dsCoord.layerThickness) ds.zMid.attrs['units'] = 'meters' ds.zMid.attrs['positive'] = 'up' - ds.zMid.attrs['standard_name'] = 'depth' for varName in ds.data_vars: var = ds[varName] diff --git a/ocean/add_depth_coord/write_time_varying_zMid.py b/ocean/add_depth_coord/write_time_varying_zMid.py index ea992fc1b..f689e4da2 100755 --- a/ocean/add_depth_coord/write_time_varying_zMid.py +++ b/ocean/add_depth_coord/write_time_varying_zMid.py @@ -144,7 +144,6 @@ def main(): dsOut[outVarName] = zMid dsOut[outVarName].attrs['units'] = 'meters' dsOut[outVarName].attrs['positive'] = 'up' - dsOut[outVarName].attrs['standard_name'] = 'depth' dsOut.attrs['history'] = ' '.join(sys.argv) From 82d007dcdef70f0b857cb4ccb735fc20968dc1a2 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 12 Jan 2019 17:25:52 +0100 Subject: [PATCH 072/180] Handle datasets without vertical dimension --- ocean/add_depth_coord/add_depth.py | 39 +++++++++++++++--------------- ocean/add_depth_coord/add_zMid.py | 32 ++++++++++++------------ 2 files changed, 37 insertions(+), 34 deletions(-) diff --git a/ocean/add_depth_coord/add_depth.py b/ocean/add_depth_coord/add_depth.py index 11be1b908..a7633c399 100755 --- a/ocean/add_depth_coord/add_depth.py +++ b/ocean/add_depth_coord/add_depth.py @@ -102,26 +102,27 @@ def main(): else: coordFileName = args.inputFileName - dsCoord = xarray.open_dataset(coordFileName) - dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) - ds = xarray.open_dataset(args.inFileName) - ds = ds.rename({'nVertLevels': 'depth'}) - - ds.coords['depth'] = ('depth', - compute_depth(dsCoord.refBottomDepth)) - ds.depth.attrs['units'] = 'meters' - ds.depth.attrs['positive'] = 'down' - ds.depth.attrs['standard_name'] = 'depth' - - ds.depth.attrs['long_name'] = 'reference depth of the center of each ' \ - 'vertical level' - - for varName in ds.data_vars: - var = ds[varName] - if 'depth' in var.dims: - var = var.assign_coords(depth=ds.depth) - ds[varName] = var + if 'nVertLevels' in ds.dims: + ds = ds.rename({'nVertLevels': 'depth'}) + + dsCoord = xarray.open_dataset(coordFileName) + dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) + + ds.coords['depth'] = ('depth', + compute_depth(dsCoord.refBottomDepth)) + ds.depth.attrs['units'] = 'meters' + ds.depth.attrs['positive'] = 'down' + ds.depth.attrs['standard_name'] = 'depth' + + ds.depth.attrs['long_name'] = 'reference depth of the center of ' \ + 'each vertical level' + + for varName in ds.data_vars: + var = ds[varName] + if 'depth' in var.dims: + var = var.assign_coords(depth=ds.depth) + ds[varName] = var if 'history' in ds.attrs: ds.attrs['history'] = '{}\n{}'.format(' '.join(sys.argv), diff --git a/ocean/add_depth_coord/add_zMid.py b/ocean/add_depth_coord/add_zMid.py index 89b4f4049..a45011a6a 100755 --- a/ocean/add_depth_coord/add_zMid.py +++ b/ocean/add_depth_coord/add_zMid.py @@ -122,22 +122,24 @@ def main(): else: coordFileName = args.inputFileName - dsCoord = xarray.open_dataset(coordFileName) - dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) - ds = xarray.open_dataset(args.inFileName) - ds = ds.rename({'nVertLevels': 'depth'}) - - ds.coords['zMid'] = compute_zmid(dsCoord.bottomDepth, dsCoord.maxLevelCell, - dsCoord.layerThickness) - ds.zMid.attrs['units'] = 'meters' - ds.zMid.attrs['positive'] = 'up' - - for varName in ds.data_vars: - var = ds[varName] - if 'nCells' in var.dims and 'depth' in var.dims: - var = var.assign_coords(zMid=ds.zMid) - ds[varName] = var + if 'nVertLevels' in ds.dims: + ds = ds.rename({'nVertLevels': 'depth'}) + + dsCoord = xarray.open_dataset(coordFileName) + dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) + + ds.coords['zMid'] = compute_zmid(dsCoord.bottomDepth, + dsCoord.maxLevelCell, + dsCoord.layerThickness) + ds.zMid.attrs['units'] = 'meters' + ds.zMid.attrs['positive'] = 'up' + + for varName in ds.data_vars: + var = ds[varName] + if 'nCells' in var.dims and 'depth' in var.dims: + var = var.assign_coords(zMid=ds.zMid) + ds[varName] = var if 'history' in ds.attrs: ds.attrs['history'] = '{}\n{}'.format(' '.join(sys.argv), From 70525e45ec0ce889ebdb99a2211d1be807763b0f Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 13 Jan 2019 11:14:13 +0100 Subject: [PATCH 073/180] Add date stamp to history; fix handling of _FillValue _FillValue is preserved for all existing fields (via mask_and_scale=False) and is added explicitly as an attribute for new coordinates/variables. Then, during saving, _FillValue is used for encoding (and removed as an attribute in the xarray DataArrays). --- ocean/add_depth_coord/add_depth.py | 37 +++++++++--------- ocean/add_depth_coord/add_zMid.py | 38 +++++++++++-------- .../write_time_varying_zMid.py | 33 +++++++++------- 3 files changed, 60 insertions(+), 48 deletions(-) diff --git a/ocean/add_depth_coord/add_depth.py b/ocean/add_depth_coord/add_depth.py index a7633c399..511665acb 100755 --- a/ocean/add_depth_coord/add_depth.py +++ b/ocean/add_depth_coord/add_depth.py @@ -13,14 +13,16 @@ import xarray import numpy -import netCDF4 import argparse import sys +from datetime import datetime -def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): +def write_netcdf(ds, fileName): ''' - Write an xarray data set to a NetCDF file using finite fill values + Write an xarray data set to a NetCDF file making use of the _FillValue + attributes of each variable. This function should be used for data sets + opened with mask_and_scale=False. Parameters ---------- @@ -29,11 +31,6 @@ def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): fileName : str The fileName to write the data set to - - fillValues : dict - A dictionary of fill values for each supported data type. By default, - this is the dictionary used by the netCDF4 package. Key entries should - be of the form 'f8' (for float64), 'i4' (for int32), etc. ''' # Authors # ------- @@ -42,12 +39,12 @@ def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): encodingDict = {} variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) for variableName in variableNames: - dtype = ds[variableName].dtype - for fillType in fillValues: - if dtype == numpy.dtype(fillType): - encodingDict[variableName] = \ - {'_FillValue': fillValues[fillType]} - break + if '_FillValue' in ds[variableName].attrs: + encodingDict[variableName] = \ + {'_FillValue': ds[variableName].attrs['_FillValue']} + del ds[variableName].attrs['_FillValue'] + else: + encodingDict[variableName] = {'_FillValue': None} ds.to_netcdf(fileName, encoding=encodingDict) @@ -102,11 +99,11 @@ def main(): else: coordFileName = args.inputFileName - ds = xarray.open_dataset(args.inFileName) + ds = xarray.open_dataset(args.inFileName, mask_and_scale=False) if 'nVertLevels' in ds.dims: ds = ds.rename({'nVertLevels': 'depth'}) - dsCoord = xarray.open_dataset(coordFileName) + dsCoord = xarray.open_dataset(coordFileName, mask_and_scale=False) dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) ds.coords['depth'] = ('depth', @@ -124,11 +121,15 @@ def main(): var = var.assign_coords(depth=ds.depth) ds[varName] = var + time = datetime.now().strftime('%c') + + history = '{}: {}'.format(time, ' '.join(sys.argv)) + if 'history' in ds.attrs: - ds.attrs['history'] = '{}\n{}'.format(' '.join(sys.argv), + ds.attrs['history'] = '{}\n{}'.format(history, ds.attrs['history']) else: - ds.attrs['history'] = ' '.join(sys.argv) + ds.attrs['history'] = history write_netcdf(ds, args.outFileName) diff --git a/ocean/add_depth_coord/add_zMid.py b/ocean/add_depth_coord/add_zMid.py index a45011a6a..b1c8d7ee1 100755 --- a/ocean/add_depth_coord/add_zMid.py +++ b/ocean/add_depth_coord/add_zMid.py @@ -16,11 +16,14 @@ import netCDF4 import argparse import sys +from datetime import datetime -def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): +def write_netcdf(ds, fileName): ''' - Write an xarray data set to a NetCDF file using finite fill values + Write an xarray data set to a NetCDF file making use of the _FillValue + attributes of each variable. This function should be used for data sets + opened with mask_and_scale=False. Parameters ---------- @@ -29,11 +32,6 @@ def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): fileName : str The fileName to write the data set to - - fillValues : dict - A dictionary of fill values for each supported data type. By default, - this is the dictionary used by the netCDF4 package. Key entries should - be of the form 'f8' (for float64), 'i4' (for int32), etc. ''' # Authors # ------- @@ -42,12 +40,12 @@ def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): encodingDict = {} variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) for variableName in variableNames: - dtype = ds[variableName].dtype - for fillType in fillValues: - if dtype == numpy.dtype(fillType): - encodingDict[variableName] = \ - {'_FillValue': fillValues[fillType]} - break + if '_FillValue' in ds[variableName].attrs: + encodingDict[variableName] = \ + {'_FillValue': ds[variableName].attrs['_FillValue']} + del ds[variableName].attrs['_FillValue'] + else: + encodingDict[variableName] = {'_FillValue': None} ds.to_netcdf(fileName, encoding=encodingDict) @@ -122,18 +120,22 @@ def main(): else: coordFileName = args.inputFileName - ds = xarray.open_dataset(args.inFileName) + ds = xarray.open_dataset(args.inFileName, mask_and_scale=False) if 'nVertLevels' in ds.dims: ds = ds.rename({'nVertLevels': 'depth'}) + # dsCoord doesn't have masking disabled because we want it for zMid dsCoord = xarray.open_dataset(coordFileName) dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) ds.coords['zMid'] = compute_zmid(dsCoord.bottomDepth, dsCoord.maxLevelCell, dsCoord.layerThickness) + fillValue = netCDF4.default_fillvals['f8'] + ds.coords['zMid'] = ds.zMid.where(ds.zMid.notnull(), other=fillValue) ds.zMid.attrs['units'] = 'meters' ds.zMid.attrs['positive'] = 'up' + ds.zMid.attrs['_FillValue'] = fillValue for varName in ds.data_vars: var = ds[varName] @@ -141,11 +143,15 @@ def main(): var = var.assign_coords(zMid=ds.zMid) ds[varName] = var + time = datetime.now().strftime('%c') + + history = '{}: {}'.format(time, ' '.join(sys.argv)) + if 'history' in ds.attrs: - ds.attrs['history'] = '{}\n{}'.format(' '.join(sys.argv), + ds.attrs['history'] = '{}\n{}'.format(history, ds.attrs['history']) else: - ds.attrs['history'] = ' '.join(sys.argv) + ds.attrs['history'] = history write_netcdf(ds, args.outFileName) diff --git a/ocean/add_depth_coord/write_time_varying_zMid.py b/ocean/add_depth_coord/write_time_varying_zMid.py index f689e4da2..777f5ea22 100755 --- a/ocean/add_depth_coord/write_time_varying_zMid.py +++ b/ocean/add_depth_coord/write_time_varying_zMid.py @@ -17,11 +17,14 @@ import netCDF4 import argparse import sys +from datetime import datetime -def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): +def write_netcdf(ds, fileName): ''' - Write an xarray data set to a NetCDF file using finite fill values + Write an xarray data set to a NetCDF file making use of the _FillValue + attributes of each variable. This function should be used for data sets + opened with mask_and_scale=False. Parameters ---------- @@ -30,11 +33,6 @@ def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): fileName : str The fileName to write the data set to - - fillValues : dict - A dictionary of fill values for each supported data type. By default, - this is the dictionary used by the netCDF4 package. Key entries should - be of the form 'f8' (for float64), 'i4' (for int32), etc. ''' # Authors # ------- @@ -43,12 +41,12 @@ def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): encodingDict = {} variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) for variableName in variableNames: - dtype = ds[variableName].dtype - for fillType in fillValues: - if dtype == numpy.dtype(fillType): - encodingDict[variableName] = \ - {'_FillValue': fillValues[fillType]} - break + if '_FillValue' in ds[variableName].attrs: + encodingDict[variableName] = \ + {'_FillValue': ds[variableName].attrs['_FillValue']} + del ds[variableName].attrs['_FillValue'] + else: + encodingDict[variableName] = {'_FillValue': None} ds.to_netcdf(fileName, encoding=encodingDict) @@ -142,10 +140,17 @@ def main(): dsOut = xarray.Dataset() dsOut[outVarName] = zMid + fillValue = netCDF4.default_fillvals['f8'] + dsOut[outVarName] = dsOut[outVarName].where(dsOut[outVarName].notnull(), + other=fillValue) dsOut[outVarName].attrs['units'] = 'meters' dsOut[outVarName].attrs['positive'] = 'up' + dsOut[outVarName].attrs['_FillValue'] = fillValue + + time = datetime.now().strftime('%c') - dsOut.attrs['history'] = ' '.join(sys.argv) + history = '{}: {}'.format(time, ' '.join(sys.argv)) + dsOut.attrs['history'] = history write_netcdf(dsOut, args.outFileName) From aac48af18a58829c0b3c9a0740c23fb216e45fd6 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 16 Jan 2019 22:11:09 +0100 Subject: [PATCH 074/180] Support xtime=none command-line argument With this value, Time is still extracted but without xtime (so that extracted time values are just time indices). --- .../paraview_vtk_field_extractor.py | 46 ++++++++++--------- .../paraview_vtk_field_extractor/utils.py | 29 +++++++----- 2 files changed, 43 insertions(+), 32 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index 6cf7a77ec..b09fc242f 100755 --- a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -243,22 +243,26 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, xtime = None years = float(time_index) else: - if xtimeName not in time_series_file.variables: - raise ValueError("xtime variable name {} not found in " - "{}".format(xtimeName, time_series_file)) - var = time_series_file.variables[xtimeName] - if len(var.shape) == 2: - xtime = var[local_time_indices[time_index], - :].tostring().decode('utf-8').strip() - date = datetime(int(xtime[0:4]), int(xtime[5:7]), - int(xtime[8:10]), int(xtime[11:13]), - int(xtime[14:16]), int(xtime[17:19])) - years = date2num(date, units='days since 0000-01-01', - calendar='noleap')/365. + if xtimeName == 'none': + xtime = '{}'.format(time_index) + years = float(time_index) else: - xtime = var[local_time_indices[time_index]] - years = xtime/365. - xtime = str(xtime) + if xtimeName not in time_series_file.variables: + raise ValueError("xtime variable name {} not found in " + "{}".format(xtimeName, time_series_file)) + var = time_series_file.variables[xtimeName] + if len(var.shape) == 2: + xtime = var[local_time_indices[time_index], + :].tostring().decode('utf-8').strip() + date = datetime(int(xtime[0:4]), int(xtime[5:7]), + int(xtime[8:10]), int(xtime[11:13]), + int(xtime[14:16]), int(xtime[17:19])) + years = date2num(date, units='days since 0000-01-01', + calendar='noleap')/365. + else: + xtime = var[local_time_indices[time_index]] + years = xtime/365. + xtime = str(xtime) # write the header for the vtp file vtp_file_prefix = "time_series/{}.{:d}".format(out_prefix, @@ -412,21 +416,21 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, parser.add_argument("-o", "--out_dir", dest="out_dir", help="the output directory.", default='vtk_files', metavar="DIR") - parser.add_argument("-x", "--xtime", dest="xtime", - help="the name of the xtime variable", default='xtime', - metavar="XTIME") + parser.add_argument("-x", "--xtime", dest="xtime", default='xtime', + metavar="XTIME", + help="the name of the xtime variable or 'none' to " + "extract Time dim without xtime") parser.add_argument("-l", "--lonlat", dest="lonlat", help="If set, the resulting points are in lon-lat " "space, not Cartesian.", action="store_true") parser.add_argument("-t", "--time", dest="time", help="Indices for the time dimension", metavar="TIME", required=False) - parser.add_argument("--ignore_time", dest="ignore_time", + parser.add_argument("--ignore_time", dest="ignore_time", required=False, action="store_true", help="ignore the Time dimension if it exists " "for files with a Time dimension but no xtime" - "variable (e.g. mesh file)", - required=False) + "variable (e.g. mesh file)") parser.add_argument("--topo_dim", dest="topo_dim", required=False, help="Dimension and range for topography dimension") parser.add_argument("--topo_cell_index", dest="topo_cell_index", diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index 3e1364c90..e69813227 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -89,6 +89,7 @@ def setup_time_indices(fn_pattern, xtimeName): # {{{ print("Build time indices...") i_file = 0 + allTIndex = 0 for file_name in file_list: try: nc_file = open_netcdf(file_name) @@ -99,20 +100,26 @@ def setup_time_indices(fn_pattern, xtimeName): # {{{ if 'Time' not in nc_file.dimensions or xtimeName is None: local_times = ['0'] else: - if xtimeName not in nc_file.variables: - raise ValueError("xtime variable name {} not found in " - "{}".format(xtimeName, file_name)) local_times = [] - xtime = nc_file.variables[xtimeName] - if len(xtime.shape) == 2: - xtime = xtime[:, :] - for index in range(xtime.shape[0]): - local_times.append(xtime[index, :].tostring()) + if xtimeName == 'none': + # no xtime variable so just use integers converted to strings + for index in range(len(nc_file.dimensions['Time'])): + local_times.append(allTIndex) + allTIndex += 1 else: - local_times = xtime[:] + if xtimeName not in nc_file.variables: + raise ValueError("xtime variable name {} not found in " + "{}".format(xtimeName, file_name)) + xtime = nc_file.variables[xtimeName] + if len(xtime.shape) == 2: + xtime = xtime[:, :] + for index in range(xtime.shape[0]): + local_times.append(xtime[index, :].tostring()) + else: + local_times = xtime[:] - if(len(local_times) == 0): - local_times = ['0'] + if(len(local_times) == 0): + local_times = ['0'] nTime = len(local_times) From b90629bf4eaccd194a96e2f3e165411b8658045d Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 18 Jan 2019 11:45:24 +0100 Subject: [PATCH 075/180] Add depth_bnds and valid_min/max to add_depth script --- ocean/add_depth_coord/add_depth.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/ocean/add_depth_coord/add_depth.py b/ocean/add_depth_coord/add_depth.py index 511665acb..40f97369f 100755 --- a/ocean/add_depth_coord/add_depth.py +++ b/ocean/add_depth_coord/add_depth.py @@ -51,7 +51,7 @@ def write_netcdf(ds, fileName): def compute_depth(refBottomDepth): """ - Computes depth given refBottomDepth + Computes depth and depth bounds given refBottomDepth Parameters ---------- @@ -63,6 +63,8 @@ def compute_depth(refBottomDepth): ------- depth : ``xarray.DataArray`` the vertical coordinate defining the middle of each layer + depth_bnds : ``xarray.DataArray`` + the vertical coordinate defining the top and bottom of each layer """ # Authors # ------- @@ -70,12 +72,14 @@ def compute_depth(refBottomDepth): refBottomDepth = refBottomDepth.values - depth = numpy.zeros(refBottomDepth.shape) + depth_bnds = numpy.zeros((len(refBottomDepth), 2)) - depth[0] = 0.5*refBottomDepth[0] - depth[1:] = 0.5*(refBottomDepth[1:] + refBottomDepth[0:-1]) + depth_bnds[0, 0] = 0. + depth_bnds[1:, 0] = refBottomDepth[0:-1] + depth_bnds[:, 1] = refBottomDepth + depth = 0.5*(depth_bnds[:, 0] + depth_bnds[:, 1]) - return depth + return depth, depth_bnds def main(): @@ -106,14 +110,19 @@ def main(): dsCoord = xarray.open_dataset(coordFileName, mask_and_scale=False) dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) - ds.coords['depth'] = ('depth', - compute_depth(dsCoord.refBottomDepth)) + depth, depth_bnds = compute_depth(dsCoord.refBottomDepth) + ds.coords['depth'] = ('depth', depth) + ds.depth.attrs['long_name'] = 'reference depth of the center of ' \ + 'each vertical level' + ds.depth.attrs['standard_name'] = 'depth' ds.depth.attrs['units'] = 'meters' ds.depth.attrs['positive'] = 'down' - ds.depth.attrs['standard_name'] = 'depth' + ds.depth.attrs['valid_min'] = depth_bnds[0, 0] + ds.depth.attrs['valid_max'] = depth_bnds[-1, 1] + ds.depth.attrs['bounds'] = 'depth_bnds' - ds.depth.attrs['long_name'] = 'reference depth of the center of ' \ - 'each vertical level' + ds.coords['depth_bnds'] = (('depth', 'nbnd'), depth_bnds) + ds.depth.attrs['long_name'] = 'Gridcell depth interfaces' for varName in ds.data_vars: var = ds[varName] From ba5e58e58e1a17e6caff2e010e35a1c3eaf0b5ab Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 18 Jan 2019 18:11:08 +0100 Subject: [PATCH 076/180] Add axis attribute to depth --- ocean/add_depth_coord/add_depth.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ocean/add_depth_coord/add_depth.py b/ocean/add_depth_coord/add_depth.py index 40f97369f..1382ecce1 100755 --- a/ocean/add_depth_coord/add_depth.py +++ b/ocean/add_depth_coord/add_depth.py @@ -116,13 +116,14 @@ def main(): 'each vertical level' ds.depth.attrs['standard_name'] = 'depth' ds.depth.attrs['units'] = 'meters' + ds.depth.attrs['axis'] = 'Z' ds.depth.attrs['positive'] = 'down' ds.depth.attrs['valid_min'] = depth_bnds[0, 0] ds.depth.attrs['valid_max'] = depth_bnds[-1, 1] ds.depth.attrs['bounds'] = 'depth_bnds' ds.coords['depth_bnds'] = (('depth', 'nbnd'), depth_bnds) - ds.depth.attrs['long_name'] = 'Gridcell depth interfaces' + ds.depth_bnds.attrs['long_name'] = 'Gridcell depth interfaces' for varName in ds.data_vars: var = ds[varName] From 7cf6e3d777a931fbb87fdf83725db7dc3b95ef48 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 26 Jan 2019 12:58:51 +0100 Subject: [PATCH 077/180] Fix computation of nBlocks The value was incorrect when the size of the dimension was exactly equal to the blocking size. --- .../paraview_vtk_field_extractor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index b09fc242f..8189bceba 100755 --- a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -136,7 +136,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, cellData = True # Pre-compute the number of blocks - nBlocks = 1 + blockDim // blocking + nBlocks = int(np.ceil(blockDim / blocking)) nPolygons = len(offsets) nPoints = len(vertices[0]) From 2d7bafa70fbd436e709c2603f3374b53d6c9a98e Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 26 Jan 2019 13:14:54 +0100 Subject: [PATCH 078/180] Strip is_periodic and on_a_sphere strings This is because some Fortran code, such as periodic_hex, writes them out with trailing spaces. --- visualization/paraview_vtk_field_extractor/utils.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/visualization/paraview_vtk_field_extractor/utils.py index e69813227..4c4f9f715 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/visualization/paraview_vtk_field_extractor/utils.py @@ -740,7 +740,8 @@ def build_topo_point_and_polygon_lists(nc_file, output_32bit, lonlat): # {{{ validVerts, lonPolygon) - if nc_file.on_a_sphere == 'NO' and nc_file.is_periodic == 'YES': + if nc_file.on_a_sphere.strip() == 'NO' and \ + nc_file.is_periodic.strip() == 'YES': if lonlat: xcoord = lonPolygon ycoord = latPolygon @@ -830,7 +831,8 @@ def build_cell_geom_lists(nc_file, output_32bit, lonlat): # {{{ validVertices, lonCell) - if nc_file.on_a_sphere == 'NO' and nc_file.is_periodic == 'YES': + if nc_file.on_a_sphere.strip() == 'NO' and \ + nc_file.is_periodic.strip() == 'YES': if lonlat: xcoord = lonCell ycoord = latCell @@ -876,7 +878,8 @@ def build_vertex_geom_lists(nc_file, output_32bit, lonlat): # {{{ validVertices, lonVertex[valid_mask]) - if nc_file.on_a_sphere == 'NO' and nc_file.is_periodic == 'YES': + if nc_file.on_a_sphere.strip() == 'NO' and \ + nc_file.is_periodic.strip() == 'YES': # all remaining entries in cellsOnVertex are valid validVertices = numpy.ones(cellsOnVertex.shape, bool) if lonlat: @@ -943,7 +946,8 @@ def build_edge_geom_lists(nc_file, output_32bit, lonlat): # {{{ vertsOnCell, validVerts, lonEdge[valid_mask]) - if nc_file.on_a_sphere == 'NO' and nc_file.is_periodic == 'YES': + if nc_file.on_a_sphere.strip() == 'NO' and \ + nc_file.is_periodic.strip() == 'YES': if lonlat: xcoord = lonEdge[valid_mask] ycoord = latEdge[valid_mask] From 8cbf6b63f3102a85f6a54659c6dad1b3e6e6c4f1 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Mon, 28 Jan 2019 09:45:44 -0700 Subject: [PATCH 079/180] Fix bug/typo in print statement in create LI grid script --- .../create_landice_grid_from_generic_MPAS_grid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/landice/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py b/landice/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py index c50c71b37..5ed987b0e 100755 --- a/landice/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py +++ b/landice/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py @@ -169,7 +169,7 @@ layerInterfaces[k] = 4.0/3.0 * (1.0 - ((k+1.0-1.0)/(nInterfaces-1.0) + 1.0)**-2) for k in range(nVertLevels): layerThicknessFractionsData[k] = layerInterfaces[k+1] - layerInterfaces[k] - print "Setting layerThicknessFractions to:", layerThicknessFractionData + print "Setting layerThicknessFractions to:", layerThicknessFractionsData else: sys.exit('Unknown method for vertical spacing method (--vert): '+options.vertMethod) From 61e64f37c1f2df553688061260851a058a458ece Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Mon, 28 Jan 2019 09:46:21 -0700 Subject: [PATCH 080/180] Fix some CISM-style variable names in interplation script --- landice/landice_grid_tools/interpolate_to_mpasli_grid.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/landice/landice_grid_tools/interpolate_to_mpasli_grid.py b/landice/landice_grid_tools/interpolate_to_mpasli_grid.py index a3eb9997d..c16a9cbf5 100755 --- a/landice/landice_grid_tools/interpolate_to_mpasli_grid.py +++ b/landice/landice_grid_tools/interpolate_to_mpasli_grid.py @@ -525,7 +525,7 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): if not options.thicknessOnly: fieldInfo['bedTopography'] = {'InputName':'topg', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} fieldInfo['sfcMassBal'] = {'InputName':'acab', 'scalefactor':910.0/(3600.0*24.0*365.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} # Assuming default CISM density - fieldInfo['floatingBasalMassBal'] = {'InputName':'bmb', 'scalefactor':910.0/(3600.0*24.0*365.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} # Assuming default CISM density + fieldInfo['floatingBasalMassBal'] = {'InputName':'subm', 'scalefactor':910.0/(3600.0*24.0*365.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} # Assuming default CISM density #fieldInfo['temperature'] = {'InputName':'temp', 'scalefactor':1.0, 'offset':273.15, 'gridType':'x1', 'vertDim':True} fieldInfo['temperature'] = {'InputName':'tempstag', 'scalefactor':1.0, 'offset':273.15, 'gridType':'x1', 'vertDim':True} # pick one or the other fieldInfo['basalHeatFlux'] = {'InputName':'bheatflx', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} @@ -535,10 +535,10 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): # fields for observed surface speed and associated error, observed thickness change fieldInfo['observedSurfaceVelocityX'] = {'InputName':'vx', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} fieldInfo['observedSurfaceVelocityY'] = {'InputName':'vy', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['observedSurfaceVelocityUncertainty'] = {'InputName':'verr', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} + fieldInfo['observedSurfaceVelocityUncertainty'] = {'InputName':'vErr', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} fieldInfo['observedThicknessTendency'] = {'InputName':'dHdt', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} fieldInfo['observedThicknessTendencyUncertainty'] = {'InputName':'dHdtErr', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['thicknessUncertainty'] = {'InputName':'thkerr', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} + fieldInfo['thicknessUncertainty'] = {'InputName':'thkErr', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} elif filetype=='mpas': From 04cf55a44c71d3410643af50ea3a1f9c39f3a84d Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Mon, 28 Jan 2019 09:50:03 -0700 Subject: [PATCH 081/180] Add option to include effectivePressure in MALI input files This is used as the friction field in some sliding laws. --- .../create_landice_grid_from_generic_MPAS_grid.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/landice/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py b/landice/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py index 5ed987b0e..fdf854f39 100755 --- a/landice/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py +++ b/landice/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py @@ -18,6 +18,7 @@ parser.add_option("-l", "--level", dest="levels", help="Number of vertical levels to use in the output file. Defaults to the number in the input file", metavar="FILENAME") parser.add_option("-v", "--vert", dest="vertMethod", help="Method of vertical layer spacing: uniform, glimmer. Glimmer spacing follows Eq. 35 of Rutt, I. C., M. Hagdorn, N. R. J. Hulton, and A. J. Payne (2009), The Glimmer community ice sheet model, J. Geophys. Res., 114, F02004, doi:10.1029/2008JF001015", default='uniform', metavar="FILENAME") parser.add_option("--beta", dest="beta", action="store_true", help="Use this flag to include the field 'beta' in the resulting file.") +parser.add_option("--effecpress", dest="effecpress", action="store_true", help="Use this flag to include the field 'effectivePressure' in the resulting file.") parser.add_option("--diri", dest="dirichlet", action="store_true", help="Use this flag to include the fields 'dirichletVelocityMask', 'uReconstructX', 'uReconstructY' needed for specifying Dirichlet velocity boundary conditions in the resulting file.") parser.add_option("--thermal", dest="thermal", action="store_true", help="Use this flag to include the fields 'temperature', 'surfaceAirTemperature', 'basalHeatFlux' needed for specifying thermal initial conditions in the resulting file.") parser.add_option("--hydro", dest="hydro", action="store_true", help="Use this flag to include the fields 'waterThickness', 'tillWaterThickness', 'basalMeltInput', 'externalWaterInput', 'frictionAngle', 'waterPressure', 'waterFluxMask' needed for specifying hydro initial conditions in the resulting file.") @@ -198,6 +199,11 @@ newvar[:] = 1.0e8 # Give a default beta that won't have much sliding. print 'Added optional variable: beta' +if options.effecpress: + newvar = fileout.createVariable('effectivePressure', datatype, ('Time', 'nCells')) + newvar[:] = 1.0e8 # Give a default effective pressure that won't have much sliding. + print 'Added optional variable: effectivePressure' + if options.dirichlet: newvar = fileout.createVariable('dirichletVelocityMask', datatypeInt, ('Time', 'nCells', 'nVertInterfaces')) newvar[:] = 0 # default: no Dirichlet b.c. From 921b952e19b994ab487e6caff5e0cedea35ffdd0 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Mon, 28 Jan 2019 10:05:15 -0700 Subject: [PATCH 082/180] Reorganize LI tools directory MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The new dir structure is like: landice ├── mesh_tools_li │   └── misc └── output_processing_li The _li suffix is meant to avoid potential confusion with the general directories with the same names one level up. The old dir structure was flat and confusingly named: landice └── landice_grid_tools --- .../conversion_exodus_init_to_mpasli_mesh.py | 0 .../create_landice_grid_from_generic_MPAS_grid.py | 0 landice/{landice_grid_tools => mesh_tools_li}/define_cullMask.py | 0 .../interpolate_to_mpasli_grid.py | 0 .../mark_domain_boundaries_dirichlet.py | 0 .../misc}/README_grid_generation_workflow.txt | 0 .../{landice_grid_tools => mesh_tools_li/misc}/calibrate_beta.py | 0 .../misc}/copy_etopo_to_MPAS_sphere_grid.py | 0 .../misc}/mpas_mesh_to_landice_ic_batch.sh | 0 .../misc}/prepare_pattyn_temperature_field_for_interpolation.m | 0 .../convert_landice_bitmasks.py | 0 .../plot_globalStats.py | 0 .../plot_mass_balance.py | 0 13 files changed, 0 insertions(+), 0 deletions(-) rename landice/{landice_grid_tools => mesh_tools_li}/conversion_exodus_init_to_mpasli_mesh.py (100%) rename landice/{landice_grid_tools => mesh_tools_li}/create_landice_grid_from_generic_MPAS_grid.py (100%) rename landice/{landice_grid_tools => mesh_tools_li}/define_cullMask.py (100%) rename landice/{landice_grid_tools => mesh_tools_li}/interpolate_to_mpasli_grid.py (100%) rename landice/{landice_grid_tools => mesh_tools_li}/mark_domain_boundaries_dirichlet.py (100%) rename landice/{landice_grid_tools => mesh_tools_li/misc}/README_grid_generation_workflow.txt (100%) rename landice/{landice_grid_tools => mesh_tools_li/misc}/calibrate_beta.py (100%) rename landice/{landice_grid_tools => mesh_tools_li/misc}/copy_etopo_to_MPAS_sphere_grid.py (100%) rename landice/{landice_grid_tools => mesh_tools_li/misc}/mpas_mesh_to_landice_ic_batch.sh (100%) rename landice/{landice_grid_tools => mesh_tools_li/misc}/prepare_pattyn_temperature_field_for_interpolation.m (100%) rename landice/{landice_grid_tools => output_processing_li}/convert_landice_bitmasks.py (100%) rename landice/{landice_grid_tools => output_processing_li}/plot_globalStats.py (100%) rename landice/{landice_grid_tools => output_processing_li}/plot_mass_balance.py (100%) diff --git a/landice/landice_grid_tools/conversion_exodus_init_to_mpasli_mesh.py b/landice/mesh_tools_li/conversion_exodus_init_to_mpasli_mesh.py similarity index 100% rename from landice/landice_grid_tools/conversion_exodus_init_to_mpasli_mesh.py rename to landice/mesh_tools_li/conversion_exodus_init_to_mpasli_mesh.py diff --git a/landice/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py b/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py similarity index 100% rename from landice/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py rename to landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py diff --git a/landice/landice_grid_tools/define_cullMask.py b/landice/mesh_tools_li/define_cullMask.py similarity index 100% rename from landice/landice_grid_tools/define_cullMask.py rename to landice/mesh_tools_li/define_cullMask.py diff --git a/landice/landice_grid_tools/interpolate_to_mpasli_grid.py b/landice/mesh_tools_li/interpolate_to_mpasli_grid.py similarity index 100% rename from landice/landice_grid_tools/interpolate_to_mpasli_grid.py rename to landice/mesh_tools_li/interpolate_to_mpasli_grid.py diff --git a/landice/landice_grid_tools/mark_domain_boundaries_dirichlet.py b/landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py similarity index 100% rename from landice/landice_grid_tools/mark_domain_boundaries_dirichlet.py rename to landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py diff --git a/landice/landice_grid_tools/README_grid_generation_workflow.txt b/landice/mesh_tools_li/misc/README_grid_generation_workflow.txt similarity index 100% rename from landice/landice_grid_tools/README_grid_generation_workflow.txt rename to landice/mesh_tools_li/misc/README_grid_generation_workflow.txt diff --git a/landice/landice_grid_tools/calibrate_beta.py b/landice/mesh_tools_li/misc/calibrate_beta.py similarity index 100% rename from landice/landice_grid_tools/calibrate_beta.py rename to landice/mesh_tools_li/misc/calibrate_beta.py diff --git a/landice/landice_grid_tools/copy_etopo_to_MPAS_sphere_grid.py b/landice/mesh_tools_li/misc/copy_etopo_to_MPAS_sphere_grid.py similarity index 100% rename from landice/landice_grid_tools/copy_etopo_to_MPAS_sphere_grid.py rename to landice/mesh_tools_li/misc/copy_etopo_to_MPAS_sphere_grid.py diff --git a/landice/landice_grid_tools/mpas_mesh_to_landice_ic_batch.sh b/landice/mesh_tools_li/misc/mpas_mesh_to_landice_ic_batch.sh similarity index 100% rename from landice/landice_grid_tools/mpas_mesh_to_landice_ic_batch.sh rename to landice/mesh_tools_li/misc/mpas_mesh_to_landice_ic_batch.sh diff --git a/landice/landice_grid_tools/prepare_pattyn_temperature_field_for_interpolation.m b/landice/mesh_tools_li/misc/prepare_pattyn_temperature_field_for_interpolation.m similarity index 100% rename from landice/landice_grid_tools/prepare_pattyn_temperature_field_for_interpolation.m rename to landice/mesh_tools_li/misc/prepare_pattyn_temperature_field_for_interpolation.m diff --git a/landice/landice_grid_tools/convert_landice_bitmasks.py b/landice/output_processing_li/convert_landice_bitmasks.py similarity index 100% rename from landice/landice_grid_tools/convert_landice_bitmasks.py rename to landice/output_processing_li/convert_landice_bitmasks.py diff --git a/landice/landice_grid_tools/plot_globalStats.py b/landice/output_processing_li/plot_globalStats.py similarity index 100% rename from landice/landice_grid_tools/plot_globalStats.py rename to landice/output_processing_li/plot_globalStats.py diff --git a/landice/landice_grid_tools/plot_mass_balance.py b/landice/output_processing_li/plot_mass_balance.py similarity index 100% rename from landice/landice_grid_tools/plot_mass_balance.py rename to landice/output_processing_li/plot_mass_balance.py From 55a0fc8ea754547434c847751d30a8bbf555572d Mon Sep 17 00:00:00 2001 From: Michael Duda Date: Wed, 30 Jan 2019 15:39:02 -0700 Subject: [PATCH 083/180] Remove shallow-water IC fields from periodic hex meshes The periodic_hex mesh generator previously added fields to the grid.nc file to serve as placeholders for ICs for the shallow-water model. However, these fields are not part of the MPAS mesh specification, and have been removed. Specifically, the following fields are deleted in this commit: fEdge fVertex h_s u uBC v h vh circulation vorticity ke tracers Additionally, the dimensions nVertLevels and nTracers have also been removed. --- mesh_tools/periodic_hex/module_write_netcdf.F | 172 +----------------- mesh_tools/periodic_hex/periodic_grid.F | 84 +-------- 2 files changed, 5 insertions(+), 251 deletions(-) diff --git a/mesh_tools/periodic_hex/module_write_netcdf.F b/mesh_tools/periodic_hex/module_write_netcdf.F index 0fcdd5194..debbb8ac5 100644 --- a/mesh_tools/periodic_hex/module_write_netcdf.F +++ b/mesh_tools/periodic_hex/module_write_netcdf.F @@ -9,8 +9,6 @@ module write_netcdf integer :: wrDimIDmaxEdges2 integer :: wrDimIDTWO integer :: wrDimIDvertexDegree - integer :: wrDimIDnVertLevels - integer :: wrDimIDnTracers integer :: wrVarIDlatCell integer :: wrVarIDlonCell integer :: wrVarIDxCell @@ -47,25 +45,11 @@ module write_netcdf integer :: wrVarIDcellsOnVertex integer :: wrVarIDkiteAreasOnVertex integer :: wrVarIDmeshDensity - integer :: wrVarIDfEdge - integer :: wrVarIDfVertex - integer :: wrVarIDh_s - integer :: wrVarIDu - integer :: wrVarIDuBC - integer :: wrVarIDv - integer :: wrVarIDh - integer :: wrVarIDvh - integer :: wrVarIDcirculation - integer :: wrVarIDvorticity - integer :: wrVarIDke - integer :: wrVarIDtracers integer :: wrLocalnCells integer :: wrLocalnEdges integer :: wrLocalnVertices integer :: wrLocalmaxEdges - integer :: wrLocalnVertLevels - integer :: wrLocalnTracers contains @@ -74,8 +58,6 @@ subroutine write_netcdf_init( & nEdges, & nVertices, & maxEdges, & - nVertLevels, & - nTracers, & vertexDegree, & dc, & nx, & @@ -90,8 +72,6 @@ subroutine write_netcdf_init( & integer, intent(in) :: nEdges integer, intent(in) :: nVertices integer, intent(in) :: maxEdges - integer, intent(in) :: nVertLevels - integer, intent(in) :: nTracers integer, intent(in) :: vertexDegree real (kind=8), intent(in) :: dc integer, intent(in) :: nx @@ -109,8 +89,6 @@ subroutine write_netcdf_init( & wrLocalnEdges = nEdges wrLocalnVertices = nVertices wrLocalmaxEdges = maxEdges - wrLocalnVertLevels = nVertLevels - wrLocalnTracers = nTracers on_a_sphere = 'NO' is_periodic = 'YES' @@ -130,8 +108,6 @@ subroutine write_netcdf_init( & nferr = nf_def_dim(wr_ncid, 'maxEdges2', 2*maxEdges, wrDimIDmaxEdges2) nferr = nf_def_dim(wr_ncid, 'TWO', 2, wrDimIDTWO) nferr = nf_def_dim(wr_ncid, 'vertexDegree', vertexDegree, wrDimIDvertexDegree) - nferr = nf_def_dim(wr_ncid, 'nVertLevels', nVertLevels, wrDimIDnVertLevels) - nferr = nf_def_dim(wr_ncid, 'nTracers', nTracers, wrDimIDnTracers) nferr = nf_def_dim(wr_ncid, 'Time', NF_UNLIMITED, wrDimIDTime) @@ -230,48 +206,6 @@ subroutine write_netcdf_init( & nferr = nf_def_var(wr_ncid, 'kiteAreasOnVertex', NF_DOUBLE, 2, dimlist, wrVarIDkiteAreasOnVertex) dimlist( 1) = wrDimIDnCells nferr = nf_def_var(wr_ncid, 'meshDensity', NF_DOUBLE, 1, dimlist, wrVarIDmeshDensity) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'fEdge', NF_DOUBLE, 1, dimlist, wrVarIDfEdge) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'fVertex', NF_DOUBLE, 1, dimlist, wrVarIDfVertex) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'h_s', NF_DOUBLE, 1, dimlist, wrVarIDh_s) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'u', NF_DOUBLE, 3, dimlist, wrVarIDu) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'uBC', NF_INT, 2, dimlist, wrVarIDuBC) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'v', NF_DOUBLE, 3, dimlist, wrVarIDv) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'h', NF_DOUBLE, 3, dimlist, wrVarIDh) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'vh', NF_DOUBLE, 3, dimlist, wrVarIDvh) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnVertices - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'circulation', NF_DOUBLE, 3, dimlist, wrVarIDcirculation) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnVertices - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'vorticity', NF_DOUBLE, 3, dimlist, wrVarIDvorticity) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'ke', NF_DOUBLE, 3, dimlist, wrVarIDke) - dimlist( 1) = wrDimIDnTracers - dimlist( 2) = wrDimIDnVertLevels - dimlist( 3) = wrDimIDnCells - dimlist( 4) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'tracers', NF_DOUBLE, 4, dimlist, wrVarIDtracers) nferr = nf_enddef(wr_ncid) @@ -279,7 +213,6 @@ end subroutine write_netcdf_init subroutine write_netcdf_fields( & - time, & latCell, & lonCell, & xCell, & @@ -315,26 +248,13 @@ subroutine write_netcdf_fields( & edgesOnVertex, & cellsOnVertex, & kiteAreasOnVertex, & - meshDensity, & - fEdge, & - fVertex, & - h_s, & - uBC, & - u, & - v, & - h, & - vh, & - circulation, & - vorticity, & - ke, & - tracers & + meshDensity & ) implicit none include 'netcdf.inc' - integer, intent(in) :: time real (kind=8), dimension(:), intent(in) :: latCell real (kind=8), dimension(:), intent(in) :: lonCell real (kind=8), dimension(:), intent(in) :: xCell @@ -371,39 +291,16 @@ subroutine write_netcdf_fields( & integer, dimension(:,:), intent(in) :: cellsOnVertex real (kind=8), dimension(:,:), intent(in) :: kiteAreasOnVertex real (kind=8), dimension(:), intent(in) :: meshDensity - real (kind=8), dimension(:), intent(in) :: fEdge - real (kind=8), dimension(:), intent(in) :: fVertex - real (kind=8), dimension(:), intent(in) :: h_s - integer, dimension(:,:), intent(in) :: uBC - real (kind=8), dimension(:,:,:), intent(in) :: u - real (kind=8), dimension(:,:,:), intent(in) :: v - real (kind=8), dimension(:,:,:), intent(in) :: h - real (kind=8), dimension(:,:,:), intent(in) :: vh - real (kind=8), dimension(:,:,:), intent(in) :: circulation - real (kind=8), dimension(:,:,:), intent(in) :: vorticity - real (kind=8), dimension(:,:,:), intent(in) :: ke - real (kind=8), dimension(:,:,:,:), intent(in) :: tracers integer :: nferr integer, dimension(1) :: start1, count1 integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 start1(1) = 1 start2(1) = 1 start2(2) = 1 - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - - start4(1) = 1 - start4(2) = 1 - start4(3) = 1 - start4(4) = 1 - start1(1) = 1 count1( 1) = wrLocalnCells nferr = nf_put_vara_double(wr_ncid, wrVarIDlatCell, start1, count1, latCell) @@ -558,73 +455,6 @@ subroutine write_netcdf_fields( & count1( 1) = wrLocalnCells nferr = nf_put_vara_double(wr_ncid, wrVarIDmeshDensity, start1, count1, meshDensity) - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDfEdge, start1, count1, fEdge) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDfVertex, start1, count1, fVertex) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDh_s, start1, count1, h_s) - - start2(2) = 1 - count2( 1) = wrLocalnVertLevels - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDuBC, start2, count2, u) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDu, start3, count3, u) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDv, start3, count3, v) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDh, start3, count3, h) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDvh, start3, count3, vh) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnVertices - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDcirculation, start3, count3, circulation) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnVertices - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDvorticity, start3, count3, vorticity) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDke, start3, count3, ke) - - start4(4) = time - count4( 1) = wrLocalnTracers - count4( 2) = wrLocalnVertLevels - count4( 3) = wrLocalnCells - count4( 4) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDtracers, start4, count4, tracers) - - end subroutine write_netcdf_fields diff --git a/mesh_tools/periodic_hex/periodic_grid.F b/mesh_tools/periodic_hex/periodic_grid.F index 04ef7658f..f6e78b125 100644 --- a/mesh_tools/periodic_hex/periodic_grid.F +++ b/mesh_tools/periodic_hex/periodic_grid.F @@ -16,7 +16,7 @@ program hexagonal_periodic_grid integer, allocatable, dimension(:) :: nEdgesOnCell, nEdgesOnEdge integer, allocatable, dimension(:,:) :: cellsOnCell, edgesOnCell, verticesOnCell integer, allocatable, dimension(:,:) :: cellsOnEdge, edgesOnEdge, verticesOnEdge - integer, allocatable, dimension(:,:) :: edgesOnVertex, cellsOnVertex, uBC + integer, allocatable, dimension(:,:) :: edgesOnVertex, cellsOnVertex real (kind=8), allocatable, dimension(:) :: areaTriangle, areaCell, angleEdge real (kind=8), allocatable, dimension(:) :: dcEdge, dvEdge real (kind=8), allocatable, dimension(:) :: latCell, lonCell, xCell, yCell, zCell @@ -24,15 +24,11 @@ program hexagonal_periodic_grid real (kind=8), allocatable, dimension(:) :: latVertex, lonVertex, xVertex, yVertex, zVertex real (kind=8), allocatable, dimension(:) :: meshDensity real (kind=8), allocatable, dimension(:,:) :: weightsOnEdge, kiteAreasOnVertex - real (kind=8), allocatable, dimension(:) :: fEdge, fVertex, h_s - real (kind=8), allocatable, dimension(:,:,:) :: u, v, h, vh, circulation, vorticity, ke - real (kind=8), allocatable, dimension(:,:,:,:) :: tracers integer :: i, j, np, iCell integer :: nCells, nEdges, nVertices integer :: iRow, iCol, ii, jj integer :: nprocx, nprocy - real (kind=8) :: r character (len=32) :: decomp_fname call cell_indexing_read_nl() @@ -84,19 +80,6 @@ program hexagonal_periodic_grid allocate(zVertex(nVertices)) allocate(meshDensity(nCells)) - allocate(fEdge(nEdges)) - allocate(fVertex(nVertices)) - allocate(h_s(nCells)) - allocate(uBC(nVertLevels, nEdges)) - - allocate(u(nVertLevels,nEdges,1)) - allocate(v(nVertLevels,nEdges,1)) - allocate(vh(nVertLevels,nEdges,1)) - allocate(h(nVertLevels,nCells,1)) - allocate(circulation(nVertLevels,nVertices,1)) - allocate(vorticity(nVertLevels,nVertices,1)) - allocate(ke(nVertLevels,nCells,1)) - allocate(tracers(nTracers,nVertLevels,nCells,1)) do iRow = 1, ny do iCol = 1, nx @@ -275,44 +258,12 @@ program hexagonal_periodic_grid meshDensity(:) = 1.0 - ! - ! fill in initial conditions below - ! NOTE: these initial conditions will likely be removed - ! from the grid.nc files at some point (soon). - ! Initialize fields in grid - ! - - fEdge(:) = 1.0e-4 - fVertex(:) = 1.0e-4 - - h_s(:) = 0.0 - u(:,:,:) = 0.0 - v(:,:,:) = 0.0 - vh(:,:,:) = 0.0 - circulation(:,:,:) = 0.0 - vorticity(:,:,:) = 0.0 - ke(:,:,:) = 0.0 - tracers(:,:,:,:) = 0.0 - h(:,:,:) = 1.0 - - do i=1,nCells - r = sqrt((xCell(i) - (nx/2)*(10.0*dc))**2.0 + (yCell(i) - (ny/2)*(10.0*dc))**2.0) - if (r < 10.0*10.0*dc) then - tracers(1,1,i,1) = (20.0 / 2.0) * (1.0 + cos(pi*r/(10.0*10.0*dc))) + 0.0 - h(1,i,1) = 1.0 + 0.1*cos(pi*r/(20.0*10.0*dc)) - else - tracers(1,1,i,1) = 0.0 - h(1,i,1) = 1.0 - end if - end do - ! ! Write grid to grid.nc file ! - call write_netcdf_init( nCells, nEdges, nVertices, maxEdges, nVertLevels, nTracers, vertexDegree, dc, nx, ny ) + call write_netcdf_init( nCells, nEdges, nVertices, maxEdges, vertexDegree, dc, nx, ny ) - call write_netcdf_fields( 1, & - latCell, lonCell, xCell, yCell, zCell, indexToCellID, & + call write_netcdf_fields( latCell, lonCell, xCell, yCell, zCell, indexToCellID, & latEdge, lonEdge, xEdge, yEdge, zEdge, indexToEdgeID, & latVertex, lonVertex, xVertex, yVertex, zVertex, indexToVertexID, & cellsOnEdge, & @@ -332,19 +283,7 @@ program hexagonal_periodic_grid edgesOnVertex, & cellsOnVertex, & kiteAreasOnVertex, & - meshDensity, & - fEdge, & - fVertex, & - h_s, & - uBC, & - u, & - v, & - h, & - vh, & - circulation, & - vorticity, & - ke, & - tracers & + meshDensity & ) call write_netcdf_finalize() @@ -384,21 +323,6 @@ program hexagonal_periodic_grid end program hexagonal_periodic_grid -subroutine enforce_uBC(u, uBC, xCell, yCell, zCell, nCells, nEdges, nVertLevels, dc) -! this suboutine provides a hook into uBC. the uBC field is read into the ocean -! model and used to enforce boundary conditions on the velocity field. -! uBC is written to the grid.nc file, even if the forward model does not use it. - -real (kind=8), intent(in) :: dc -real (kind=8), intent(inout), dimension(nVertLevels, nEdges, 1) :: u -real (kind=8), intent(in), dimension(nCells) :: xCell, yCell, zCell -integer, intent(inout), dimension(nVertLevels, nEdges) :: uBC - -uBC = -10 - -end subroutine enforce_uBC - - subroutine decompose_nproc(nproc, nprocx, nprocy) implicit none From 88a4f4b3836c662cc9f8e2a7f90ebbe67e0faffe Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Mon, 11 Feb 2019 08:27:05 -0700 Subject: [PATCH 084/180] Add tool to extract MPAS fields from E3SM coupler history files This tool was written a couple years ago by Jeremy Fyke. It takes a coupler history file as input, as well as optional MPAS component files containing a grid specification for that component, and outputs the coupler fields on the MPAS mesh. --- .../extract_mpas_cpl_fields.sh | 197 ++++++++++++++++++ 1 file changed, 197 insertions(+) create mode 100644 visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh diff --git a/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh b/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh new file mode 100644 index 000000000..a853d2902 --- /dev/null +++ b/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh @@ -0,0 +1,197 @@ +#!/bin/bash + +usage() +{ +cat< Date: Mon, 11 Feb 2019 08:49:44 -0700 Subject: [PATCH 085/180] Minor cleanup/updating to e3sm cpl extraction script --- .../extract_mpas_cpl_fields.sh | 22 +++++++------------ 1 file changed, 8 insertions(+), 14 deletions(-) mode change 100644 => 100755 visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh diff --git a/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh b/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh old mode 100644 new mode 100755 index a853d2902..ef5c8dc42 --- a/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh +++ b/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh @@ -4,33 +4,24 @@ usage() { cat< Date: Tue, 19 Feb 2019 14:21:35 -0700 Subject: [PATCH 086/180] Add a script that performs vector reconstruction The script requires that `coeffs_reconstruct` has been stored in a NetCDF file. This is most easily accomplished by adding a stream to the run (in froward mode): ```xml ``` and running for one time step. --- framework/vector_reconstruction.py | 201 +++++++++++++++++++++++++++++ 1 file changed, 201 insertions(+) create mode 100755 framework/vector_reconstruction.py diff --git a/framework/vector_reconstruction.py b/framework/vector_reconstruction.py new file mode 100755 index 000000000..c1334c4d9 --- /dev/null +++ b/framework/vector_reconstruction.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python + +""" +Extract Cartesian (X, Y, Z), zonal and meridional components of an MPAS vector +field, given the field on edge normals. + +This tool requires that the field 'coeffs_reconstruct' has been saved to a +NetCDF file. The simplest way to do this is to include the following stream +in a forward run: + + + + + + +and run the model for one time step. + +""" +# Authors +# ------- +# Xylar Asay-Davis + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import xarray +import numpy +import netCDF4 +import argparse +import sys +from datetime import datetime +from dask.diagnostics import ProgressBar + + +def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): + encodingDict = {} + variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) + for variableName in variableNames: + dtype = ds[variableName].dtype + for fillType in fillValues: + if dtype == numpy.dtype(fillType): + encodingDict[variableName] = \ + {'_FillValue': fillValues[fillType]} + break + + delayed_obj = ds.to_netcdf(fileName, encoding=encodingDict, compute=False) + + print('Writing {}'.format(fileName)) + with ProgressBar(): + delayed_obj.compute() + + +def reconstruct_variable(outVarName, variableOnEdges, dsMesh, + coeffs_reconstruct, dsOut, chunkSize=32768): + nCells = dsMesh.sizes['nCells'] + # nEdgesOnCell = dsMesh.nEdgesOnCell.values + edgesOnCell = dsMesh.edgesOnCell - 1 + + variableOnEdges.load() + edgesOnCell.load() + coeffs_reconstruct.load() + + dims = [] + sizes = [] + varIndices = {} + for dim in variableOnEdges.dims: + size = variableOnEdges.sizes[dim] + varIndices[dim] = numpy.arange(size) + if dim == 'nEdges': + dim = 'nCells' + size = nCells + varIndices['nEdges'] = edgesOnCell + dims.append(dim) + sizes.append(size) + + coeffs_reconstruct = coeffs_reconstruct.chunk({'nCells': chunkSize}) + + variable = variableOnEdges[varIndices].chunk({'nCells': chunkSize}) + print('Computing {} at edgesOnCell:'.format(outVarName)) + with ProgressBar(): + variable.compute() + + varCart = [] + + print('Computing Cartesian conponents:') + for index, component in enumerate(['X', 'Y', 'Z']): + var = (coeffs_reconstruct.isel(R3=index)*variable).sum( + dim='maxEdges').transpose(*dims) + outName = '{}{}'.format(outVarName, component) + print(outName) + with ProgressBar(): + var.compute() + dsOut[outName] = var + varCart.append(var) + + latCell = dsMesh.latCell + lonCell = dsMesh.lonCell + latCell.load() + lonCell.load() + + clat = numpy.cos(latCell) + slat = numpy.sin(latCell) + clon = numpy.cos(lonCell) + slon = numpy.sin(lonCell) + + print('Computing zonal and meridional components:') + + outName = '{}Zonal'.format(outVarName) + zonal = -varCart[0]*slon + varCart[1]*clon + print(outName) + with ProgressBar(): + zonal.compute() + dsOut[outName] = zonal + + outName = '{}Meridional'.format(outVarName) + merid = -(varCart[0]*clon + varCart[1]*slon)*slat + varCart[2]*clat + print(outName) + with ProgressBar(): + merid.compute() + dsOut[outName] = merid + + +def main(): + + # client = Client(n_workers=1, threads_per_worker=4, memory_limit='10GB') + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-m", "--meshFileName", dest="meshFileName", + type=str, required=False, + help="An MPAS file with mesh data (edgesOnCell, etc.)") + parser.add_argument("-w", "--weightsFileName", dest="weightsFileName", + type=str, required=False, + help="An MPAS file with coeffs_reconstruct ") + parser.add_argument("-i", "--inFileName", dest="inFileName", type=str, + required=True, + help="An MPAS file with one or more fields on edges " + "to be reconstructed at cell centers. Used for " + "mesh data and/or weights if a separate files " + "are not provided.") + parser.add_argument("-v", "--variables", dest="variables", type=str, + required=True, + help="A comma-separated list of variables on edges to " + "reconstruct") + parser.add_argument("--outVariables", dest="outVariables", type=str, + required=False, + help="A comma-separated list of prefixes for output " + "variable names") + parser.add_argument("-o", "--outFileName", dest="outFileName", type=str, + required=True, + help="An output MPAS file with the reconstructed " + "X, Y, Z, zonal and meridional fields") + args = parser.parse_args() + + if args.meshFileName: + meshFileName = args.meshFileName + else: + meshFileName = args.inFileName + + if args.weightsFileName: + weightsFileName = args.weightsFileName + else: + weightsFileName = args.inFileName + + variables = args.variables.split(',') + if args.outVariables: + outVariables = args.outVariables.split(',') + else: + outVariables = variables + + dsIn = xarray.open_dataset(args.inFileName, mask_and_scale=False) + dsMesh = xarray.open_dataset(meshFileName) + dsWeights = xarray.open_dataset(weightsFileName) + coeffs_reconstruct = dsWeights.coeffs_reconstruct + dsOut = xarray.Dataset() + + for inVarName, outVarName in zip(variables, outVariables): + reconstruct_variable(outVarName, dsIn[inVarName], dsMesh, + coeffs_reconstruct, dsOut) + + for attrName in dsIn.attrs: + dsOut.attrs[attrName] = dsIn.attrs[attrName] + + time = datetime.now().strftime('%c') + + history = '{}: {}'.format(time, ' '.join(sys.argv)) + + if 'history' in dsOut.attrs: + dsOut.attrs['history'] = '{}\n{}'.format(history, + dsOut.attrs['history']) + else: + dsOut.attrs['history'] = history + + write_netcdf(dsOut, args.outFileName) + + +if __name__ == '__main__': + main() From f34f83373dae7727f16803106a4d2bbc090847c6 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Wed, 20 Feb 2019 10:56:32 -0700 Subject: [PATCH 087/180] Fix bug in MPASO o2x field extraction --- .../e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh b/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh index ef5c8dc42..3abd17b3f 100755 --- a/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh +++ b/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh @@ -109,7 +109,7 @@ if [ "$MpasOGridFile" ]; then #Extract o2x coupler history fields to temporary file. ncks -O -v o2x_+ $CPLFile o2xfile1.nc - ncrename -O -d .x2oacc_ox_ny,nCells o2xfile1.nc o2xfile1.nc + ncrename -O -d .o2x_nx,nCells o2xfile1.nc o2xfile1.nc ncrename -O -d time,Time o2xfile1.nc o2xfile1.nc ncwa -O -a o2x_ny o2xfile1.nc o2xfile1.nc From f6a57736d95335b718eb0af6d0540bd50c379164 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Wed, 20 Feb 2019 13:30:33 -0700 Subject: [PATCH 088/180] Fix bug in x2oacc coupler field conversion --- .../e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh b/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh index 3abd17b3f..a9758592a 100755 --- a/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh +++ b/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh @@ -122,7 +122,7 @@ if [ "$MpasOGridFile" ]; then #Extract x2oacc coupler history fields to temporary file. ncks -O -v x2oacc+ $CPLFile x2ofile.nc ncks -O -x -v x2oacc_ox_cnt x2ofile.nc x2ofile.nc #Prune counter variable - ncrename -O -d .x2oacc_ox_nx,nCells x2ofile.nc x2ofile.nc + ncrename -O -d .x2oacc_nx,nCells x2ofile.nc x2ofile.nc ncrename -O -d time,Time x2ofile.nc x2ofile.nc ncwa -O -a x2oacc_ny x2ofile.nc x2ofile.nc From f1ec117df8699b81fca6cc57c20e80442808bac9 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 25 Feb 2019 00:14:14 -0700 Subject: [PATCH 089/180] Add python code for computing planar hex meshes Currently produces bit-for-bit identical results to `periodic_hex/periodic_grid` but could be extended in the future to produce non-periodic meshes. --- mesh_tools/planar_hex/planar_hex.py | 394 ++++++++++++++++++++++++++++ 1 file changed, 394 insertions(+) create mode 100755 mesh_tools/planar_hex/planar_hex.py diff --git a/mesh_tools/planar_hex/planar_hex.py b/mesh_tools/planar_hex/planar_hex.py new file mode 100755 index 000000000..282d1fb2f --- /dev/null +++ b/mesh_tools/planar_hex/planar_hex.py @@ -0,0 +1,394 @@ +#!/usr/bin/env python + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy +import xarray +import argparse +import netCDF4 + + +def parse_args(mesh): + ''' + Parse the command-line arguments and put them into the mesh as dimensions + or attributes. + ''' + + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('--nx', dest='nx', type=int, required=True, + help='Cells in x direction') + parser.add_argument('--ny', dest='ny', type=int, required=True, + help='Cells in y direction') + parser.add_argument('--dc', dest='dc', type=float, required=True, + help='Distance between cell centers in meters') + parser.add_argument('-o', '--outFileName', dest='outFileName', type=str, + required=False, default='grid.nc', + help='The name of the output file') + +# parser.add_argument('--periodicX', dest='periodicX', action='store_true', +# help='Make the mesh periodic in x') +# parser.add_argument('--periodicY', dest='periodicY', action='store_true', +# help='Make the mesh periodic in y') + + args = parser.parse_args() + + nx = args.nx + ny = args.ny + dc = args.dc + + # non-periodic meshes aren't yet supported +# if args.periodicX: +# mesh.attrs['periodic_x'] = 'YES' +# else: +# mesh.attrs['periodic_x'] = 'NO' +# if args.periodicY: +# mesh.attrs['periodic_y'] = 'YES' +# else: +# mesh.attrs['periodic_y'] = 'NO' +# if args.periodicX or args.periodicY: +# mesh.attrs['is_periodic'] = 'YES' +# else: +# mesh.attrs['is_periodic'] = 'NO' + + mesh.attrs['is_periodic'] = 'YES' + mesh.attrs['x_period'] = nx*dc + mesh.attrs['y_period'] = ny*dc*numpy.sqrt(3.)/2. + + mesh.attrs['dc'] = dc + + mesh.attrs['on_a_sphere'] = 'NO' + mesh.attrs['sphere_radius'] = 1. + + nCells = nx * ny + nEdges = 3 * nCells + nVertices = 2 * nCells + vertexDegree = 3 + maxEdges = 6 + + # add some basic arrays to get all the dimensions in place + indexToCellID = numpy.arange(nCells, dtype='i4') + indexToEdgeID = numpy.arange(nEdges, dtype='i4') + indexToVertexID = numpy.arange(nVertices, dtype='i4') + + cellIdx = indexToCellID.reshape(ny, nx) + cellCol, cellRow = numpy.meshgrid(numpy.arange(nx, dtype='i4'), + numpy.arange(ny, dtype='i4')) + + mesh['cellIdx'] = (('ny', 'nx'), cellIdx) + mesh['cellRow'] = (('nCells'), cellRow.ravel()) + mesh['cellCol'] = (('nCells'), cellCol.ravel()) + + mesh['indexToCellID'] = (('nCells'), indexToCellID) + mesh['indexToEdgeID'] = (('nEdges'), indexToEdgeID) + mesh['indexToVertexID'] = (('nVertices'), indexToVertexID) + + mesh['nEdgesOnCell'] = (('nCells',), 6*numpy.ones((nCells,), 'i4')) + mesh['cellsOnCell'] = (('nCells', 'maxEdges'), + numpy.zeros((nCells, maxEdges), 'i4')) + mesh['edgesOnCell'] = (('nCells', 'maxEdges'), + numpy.zeros((nCells, maxEdges), 'i4')) + mesh['verticesOnCell'] = (('nCells', 'maxEdges'), + numpy.zeros((nCells, maxEdges), 'i4')) + + mesh['nEdgesOnEdge'] = (('nEdges',), 10*numpy.ones((nEdges,), 'i4')) + mesh['cellsOnEdge'] = (('nEdges', 'TWO'), + numpy.zeros((nEdges, 2), 'i4')) + mesh['edgesOnEdge'] = (('nEdges', 'maxEdges2'), + -1*numpy.ones((nEdges, 2*maxEdges), 'i4')) + mesh['verticesOnEdge'] = (('nEdges', 'TWO'), + numpy.zeros((nEdges, 2), 'i4')) + + mesh['cellsOnVertex'] = (('nVertices', 'vertexDegree'), + numpy.zeros((nVertices, vertexDegree), 'i4')) + mesh['edgesOnVertex'] = (('nVertices', 'vertexDegree'), + numpy.zeros((nVertices, vertexDegree), 'i4')) + + return args.outFileName + + +def compute_indices_on_cell(mesh): + + cellIdx = mesh.cellIdx + cellRow = mesh.cellRow + cellCol = mesh.cellCol + + indexToCellID = mesh.indexToCellID + + nx = mesh.sizes['nx'] + ny = mesh.sizes['ny'] + + mx = numpy.mod(cellCol - 1, nx) + my = numpy.mod(cellRow - 1, ny) + px = numpy.mod(cellCol + 1, nx) + py = numpy.mod(cellRow + 1, ny) + + mask = numpy.mod(cellRow, 2) == 0 + + cellsOnCell = mesh.cellsOnCell + cellsOnCell[:, 0] = cellIdx[cellRow, mx] + cellsOnCell[:, 1] = cellIdx[my, mx].where(mask, cellIdx[my, cellCol]) + cellsOnCell[:, 2] = cellIdx[my, cellCol].where(mask, cellIdx[my, px]) + cellsOnCell[:, 3] = cellIdx[cellRow, px] + cellsOnCell[:, 4] = cellIdx[py, cellCol].where(mask, cellIdx[py, px]) + cellsOnCell[:, 5] = cellIdx[py, mx].where(mask, cellIdx[py, cellCol]) + + edgesOnCell = mesh.edgesOnCell + edgesOnCell[:, 0] = 3*indexToCellID + edgesOnCell[:, 1] = 3*indexToCellID + 1 + edgesOnCell[:, 2] = 3*indexToCellID + 2 + edgesOnCell[:, 3] = 3*cellsOnCell[:, 3] + edgesOnCell[:, 4] = 3*cellsOnCell[:, 4] + 1 + edgesOnCell[:, 5] = 3*cellsOnCell[:, 5] + 2 + + verticesOnCell = mesh.verticesOnCell + verticesOnCell[:, 0] = 2*indexToCellID + verticesOnCell[:, 1] = 2*indexToCellID + 1 + verticesOnCell[:, 2] = 2*cellsOnCell[:, 2] + verticesOnCell[:, 3] = 2*cellsOnCell[:, 3] + 1 + verticesOnCell[:, 4] = 2*cellsOnCell[:, 3] + verticesOnCell[:, 5] = 2*cellsOnCell[:, 4] + 1 + + +def compute_indices_on_edge(mesh): + edgesOnCell = mesh.edgesOnCell + verticesOnCell = mesh.verticesOnCell + indexToCellID = mesh.indexToCellID + + cellsOnEdge = mesh.cellsOnEdge + for j in range(3): + cellsOnEdge[edgesOnCell[:, j], 1] = indexToCellID + for j in range(3, 6): + cellsOnEdge[edgesOnCell[:, j], 0] = indexToCellID + + verticesOnEdge = mesh.verticesOnEdge + verticesOnEdge[edgesOnCell[:, 0], 0] = verticesOnCell[:, 1] + verticesOnEdge[edgesOnCell[:, 0], 1] = verticesOnCell[:, 0] + verticesOnEdge[edgesOnCell[:, 1], 0] = verticesOnCell[:, 2] + verticesOnEdge[edgesOnCell[:, 1], 1] = verticesOnCell[:, 1] + verticesOnEdge[edgesOnCell[:, 2], 0] = verticesOnCell[:, 3] + verticesOnEdge[edgesOnCell[:, 2], 1] = verticesOnCell[:, 2] + + edgesOnEdge = mesh.edgesOnEdge + edgesOnEdge[edgesOnCell[:, 3], 0] = edgesOnCell[:, 4] + edgesOnEdge[edgesOnCell[:, 3], 1] = edgesOnCell[:, 5] + edgesOnEdge[edgesOnCell[:, 3], 2] = edgesOnCell[:, 0] + edgesOnEdge[edgesOnCell[:, 3], 3] = edgesOnCell[:, 1] + edgesOnEdge[edgesOnCell[:, 3], 4] = edgesOnCell[:, 2] + + edgesOnEdge[edgesOnCell[:, 4], 0] = edgesOnCell[:, 5] + edgesOnEdge[edgesOnCell[:, 4], 1] = edgesOnCell[:, 0] + edgesOnEdge[edgesOnCell[:, 4], 2] = edgesOnCell[:, 1] + edgesOnEdge[edgesOnCell[:, 4], 3] = edgesOnCell[:, 2] + edgesOnEdge[edgesOnCell[:, 4], 4] = edgesOnCell[:, 3] + + edgesOnEdge[edgesOnCell[:, 5], 0] = edgesOnCell[:, 0] + edgesOnEdge[edgesOnCell[:, 5], 1] = edgesOnCell[:, 1] + edgesOnEdge[edgesOnCell[:, 5], 2] = edgesOnCell[:, 2] + edgesOnEdge[edgesOnCell[:, 5], 3] = edgesOnCell[:, 3] + edgesOnEdge[edgesOnCell[:, 5], 4] = edgesOnCell[:, 4] + + edgesOnEdge[edgesOnCell[:, 0], 5] = edgesOnCell[:, 1] + edgesOnEdge[edgesOnCell[:, 0], 6] = edgesOnCell[:, 2] + edgesOnEdge[edgesOnCell[:, 0], 7] = edgesOnCell[:, 3] + edgesOnEdge[edgesOnCell[:, 0], 8] = edgesOnCell[:, 4] + edgesOnEdge[edgesOnCell[:, 0], 9] = edgesOnCell[:, 5] + + edgesOnEdge[edgesOnCell[:, 1], 5] = edgesOnCell[:, 2] + edgesOnEdge[edgesOnCell[:, 1], 6] = edgesOnCell[:, 3] + edgesOnEdge[edgesOnCell[:, 1], 7] = edgesOnCell[:, 4] + edgesOnEdge[edgesOnCell[:, 1], 8] = edgesOnCell[:, 5] + edgesOnEdge[edgesOnCell[:, 1], 9] = edgesOnCell[:, 0] + + edgesOnEdge[edgesOnCell[:, 2], 5] = edgesOnCell[:, 3] + edgesOnEdge[edgesOnCell[:, 2], 6] = edgesOnCell[:, 4] + edgesOnEdge[edgesOnCell[:, 2], 7] = edgesOnCell[:, 5] + edgesOnEdge[edgesOnCell[:, 2], 8] = edgesOnCell[:, 0] + edgesOnEdge[edgesOnCell[:, 2], 9] = edgesOnCell[:, 1] + + +def compute_indices_on_vertex(mesh): + edgesOnCell = mesh.edgesOnCell + verticesOnCell = mesh.verticesOnCell + indexToCellID = mesh.indexToCellID + + cellsOnVertex = mesh.cellsOnVertex + cellsOnVertex[verticesOnCell[:, 1], 2] = indexToCellID + cellsOnVertex[verticesOnCell[:, 3], 0] = indexToCellID + cellsOnVertex[verticesOnCell[:, 5], 1] = indexToCellID + cellsOnVertex[verticesOnCell[:, 0], 0] = indexToCellID + cellsOnVertex[verticesOnCell[:, 2], 1] = indexToCellID + cellsOnVertex[verticesOnCell[:, 4], 2] = indexToCellID + + edgesOnVertex = mesh.edgesOnVertex + edgesOnVertex[verticesOnCell[:, 0], 0] = edgesOnCell[:, 0] + edgesOnVertex[verticesOnCell[:, 1], 0] = edgesOnCell[:, 0] + edgesOnVertex[verticesOnCell[:, 2], 2] = edgesOnCell[:, 1] + edgesOnVertex[verticesOnCell[:, 1], 2] = edgesOnCell[:, 1] + edgesOnVertex[verticesOnCell[:, 2], 1] = edgesOnCell[:, 2] + edgesOnVertex[verticesOnCell[:, 3], 1] = edgesOnCell[:, 2] + + +def compute_weights_on_edge(mesh): + edgesOnCell = mesh.edgesOnCell + + nEdges = mesh.sizes['nEdges'] + maxEdges2 = mesh.sizes['maxEdges2'] + mesh['weightsOnEdge'] = (('nEdges', 'maxEdges2'), + numpy.zeros((nEdges, maxEdges2), 'f8')) + weightsOnEdge = mesh.weightsOnEdge + + weights = (1./numpy.sqrt(3.))*numpy.array( + [[1./3., 1./6., 0., 1./6., 1./3.], + [1./3., -1./6., 0., 1./6., -1./3.], + [-1./3., -1./6., 0., -1./6., -1./3.]]) + for i in range(3): + for j in range(5): + weightsOnEdge[edgesOnCell[:, i+3], j] = weights[i, j] + for i in range(3): + for j in range(5): + weightsOnEdge[edgesOnCell[:, i], j+5] = weights[i, j] + + +def compute_coordinates(mesh): + + dc = mesh.attrs['dc'] + edgesOnCell = mesh.edgesOnCell + verticesOnCell = mesh.verticesOnCell + + nCells = mesh.sizes['nCells'] + nEdges = mesh.sizes['nEdges'] + nVertices = mesh.sizes['nVertices'] + vertexDegree = mesh.sizes['vertexDegree'] + + mesh['latCell'] = (('nCells'), numpy.zeros((nCells,), 'f8')) + mesh['lonCell'] = (('nCells'), numpy.zeros((nCells,), 'f8')) + + mesh['latEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) + mesh['lonEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) + + mesh['latVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) + mesh['lonVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) + + cellRow = mesh.cellRow + cellCol = mesh.cellCol + mask = numpy.mod(cellRow, 2) == 0 + + mesh['xCell'] = (dc*(cellCol + 0.5)).where(mask, dc*(cellCol + 1)) + mesh['yCell'] = dc*(cellRow + 1)*numpy.sqrt(3.)/2. + mesh['zCell'] = (('nCells'), numpy.zeros((nCells,), 'f8')) + + mesh['xEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) + mesh['yEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) + mesh['zEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) + + mesh.xEdge[edgesOnCell[:, 0]] = mesh.xCell - 0.5*dc + mesh.yEdge[edgesOnCell[:, 0]] = mesh.yCell + + mesh.xEdge[edgesOnCell[:, 1]] = mesh.xCell - 0.5*dc*numpy.cos(numpy.pi/3.) + mesh.yEdge[edgesOnCell[:, 1]] = mesh.yCell - 0.5*dc*numpy.sin(numpy.pi/3.) + + mesh.xEdge[edgesOnCell[:, 2]] = mesh.xCell + 0.5*dc*numpy.cos(numpy.pi/3.) + mesh.yEdge[edgesOnCell[:, 2]] = mesh.yCell - 0.5*dc*numpy.sin(numpy.pi/3.) + + mesh['xVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) + mesh['yVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) + mesh['zVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) + + mesh.xVertex[verticesOnCell[:, 0]] = mesh.xCell - 0.5*dc + mesh.yVertex[verticesOnCell[:, 0]] = mesh.yCell + dc*numpy.sqrt(3.)/6. + + mesh.xVertex[verticesOnCell[:, 1]] = mesh.xCell - 0.5*dc + mesh.yVertex[verticesOnCell[:, 1]] = mesh.yCell - dc*numpy.sqrt(3.)/6. + + mesh['angleEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) + mesh.angleEdge[edgesOnCell[:, 1]] = numpy.pi/3. + mesh.angleEdge[edgesOnCell[:, 2]] = 2.*numpy.pi/3. + + mesh['dcEdge'] = (('nEdges'), dc*numpy.ones((nEdges,), 'f8')) + mesh['dvEdge'] = mesh.dcEdge*numpy.sqrt(3.)/3. + + mesh['areaCell'] = \ + (('nCells'), dc**2*numpy.sqrt(3.)/2.*numpy.ones((nCells,), 'f8')) + + mesh['areaTriangle'] = \ + (('nVertices'), dc**2*numpy.sqrt(3.)/4.*numpy.ones((nVertices,), 'f8')) + + mesh['kiteAreasOnVertex'] = \ + (('nVertices', 'vertexDegree'), + dc**2*numpy.sqrt(3.)/12.*numpy.ones((nVertices, vertexDegree), 'f8')) + + mesh['meshDensity'] = (('nCells',), numpy.ones((nCells,), 'f8')) + + +def add_one_to_indices(mesh): + '''Neede to adhere to Fortran indexing''' + indexVars = ['indexToCellID', 'indexToEdgeID', 'indexToVertexID', + 'cellsOnCell', 'edgesOnCell', 'verticesOnCell', + 'cellsOnEdge', 'edgesOnEdge', 'verticesOnEdge', + 'cellsOnVertex', 'edgesOnVertex'] + for var in indexVars: + mesh[var] = mesh[var] + 1 + + +def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): + encodingDict = {} + variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) + for variableName in variableNames: + dtype = ds[variableName].dtype + for fillType in fillValues: + if dtype == numpy.dtype(fillType): + encodingDict[variableName] = \ + {'_FillValue': fillValues[fillType]} + break + + ds.to_netcdf(fileName, encoding=encodingDict) + + +def make_diff(mesh, refMeshFileName, diffFileName): + + refMesh = xarray.open_dataset(refMeshFileName) + diff = xarray.Dataset() + for variable in mesh.data_vars: + if variable in refMesh: + diff[variable] = mesh[variable] - refMesh[variable] + print(diff[variable].name, float(numpy.abs(diff[variable]).max())) + else: + print('mesh has extra variable {}'.format(mesh[variable].name)) + + for variable in refMesh.data_vars: + if variable not in mesh: + print('mesh mising variable {}'.format(refMesh[variable].name)) + + for attr in refMesh.attrs: + if attr not in mesh.attrs: + print('mesh mising attribute {}'.format(attr)) + + for attr in mesh.attrs: + if attr not in refMesh.attrs: + print('mesh has extra attribute {}'.format(attr)) + + write_netcdf(diff, diffFileName) + + +if __name__ == '__main__': + + mesh = xarray.Dataset() + outFileName = parse_args(mesh) + compute_indices_on_cell(mesh) + compute_indices_on_edge(mesh) + compute_indices_on_vertex(mesh) + compute_weights_on_edge(mesh) + compute_coordinates(mesh) + add_one_to_indices(mesh) + + # drop some arrays that aren't stantard for MPAS but were used to compute + # the hex mesh + mesh = mesh.drop(['cellIdx', 'cellRow', 'cellCol']) + mesh.attrs.pop('dc') + + write_netcdf(mesh, outFileName) + + # used to make sure results are exactly identical to periodic_hex + # make_diff(mesh, '../periodic_hex/grid.nc', 'diff.nc') From e0e4dcb9315b64ef2cc01f6bc003a48176325d4e Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 15 Feb 2019 14:30:39 -0700 Subject: [PATCH 090/180] Add conda recipe for mesh conversion tools --- .../mesh_conversion_tools/conda/build.sh | 8 + .../mesh_conversion_tools/conda/meta.yaml | 64 + .../test/Arctic_Ocean.geojson | 7638 +++++++++++++++++ .../test/land_mask_final.nc | Bin 0 -> 11732 bytes .../test/mesh.QU.1920km.151026.nc | Bin 0 -> 178192 bytes 5 files changed, 7710 insertions(+) create mode 100644 mesh_tools/mesh_conversion_tools/conda/build.sh create mode 100644 mesh_tools/mesh_conversion_tools/conda/meta.yaml create mode 100644 mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson create mode 100644 mesh_tools/mesh_conversion_tools/test/land_mask_final.nc create mode 100644 mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc diff --git a/mesh_tools/mesh_conversion_tools/conda/build.sh b/mesh_tools/mesh_conversion_tools/conda/build.sh new file mode 100644 index 000000000..3b60f3029 --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/conda/build.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -x +set -e + +export NETCDF=$NETCDF_DIR +cd mesh_tools/mesh_conversion_tools +make diff --git a/mesh_tools/mesh_conversion_tools/conda/meta.yaml b/mesh_tools/mesh_conversion_tools/conda/meta.yaml new file mode 100644 index 000000000..a1644d2f0 --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/conda/meta.yaml @@ -0,0 +1,64 @@ +{% set name = "mpas_mesh_conversion_tools" %} +{% set version = "0.0.1" %} + +package: + name: '{{ name|lower }}' + version: '{{ version }}' + +source: + git_url: https://github.com/MPAS-Dev/MPAS-Tools.git + git_rev: eb772e863dd65dbff1a5cad948211dabd240285d + +build: + number: 0 + +requirements: + build: + - {{ compiler('c') }} + - {{ compiler('cxx') }} + host: + - netcdf4 =1.4.2 + - hdf5 =1.10.3 + - libnetcdf =4.6.1 + run: + - netcdf4 =1.4.2 + - hdf5 =1.10.3 + - libnetcdf =4.6.1 + +test: + commands: + - cd mesh_tools/mesh_conversion_tools/test + - MpasMeshConverter.x mesh.QU.1920km.151026.nc mesh.nc + - MpasCellCuller.x mesh.nc culled_mesh.nc -m land_mask_final.nc + - MpasMaskCreator.x mesh.nc arctic_mask.nc -f Arctic_Ocean.geojson + +about: + home: https://github.com/MPAS-Dev/MPAS-Tools/tree/master/mesh_tools/mesh_conversion_tools + license: BSD-3-Clause + license_family: BSD + license_file: '' + summary: Mesh conversion tools for Model for Prediction Across Scales (MPAS) + description: | + MpasMeshConverter.x is a piece of software designed create an MPAS mesh. + As input, this software takes the locations of MPAS cell centers, and cell + vertices, along with the connectivity array cellsOnVertex. If provided, it + will also migrate data from the meshDensity field, if it is not present it + will write 1.0 for every cell. + + MpasCellCuller.x is a piece of software designed remove + cells/edge/vertices from an MPAS mesh. As input, this software takes a + valid MPAS mesh with one additional field "cullCell". This new field should + be nCells integers. A 1 means the cell should be kept, and a 0 means the + cell should be removed. + + MpasMaskCreator.x is a piece of software designed to create cell masks + from region definitions. Region definitions are defined in geojson files, + and can be created using the tools contained within the repository located + at: + https://github.com/MPAS-Dev/geometric_features + Masks have a value of 0 or 1, and are integers. + doc_url: 'https://github.com/MPAS-Dev/MPAS-Tools/blob/master/mesh_tools/mesh_conversion_tools/README' + dev_url: 'https://github.com/MPAS-Dev/MPAS-Tools/tree/master/mesh_tools/mesh_conversion_tools' + +extra: + recipe-maintainers: 'xylar' diff --git a/mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson b/mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson new file mode 100644 index 000000000..d02d1cb6a --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson @@ -0,0 +1,7638 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": { + "name": "Arctic Ocean", + "tags": "Arctic_Ocean;Arctic_Basin", + "object": "region", + "component": "ocean", + "author": "http://www.marineregions.org/downloads.php#iho" + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -114.848345, + 77.854709 + ], + [ + -114.730291, + 77.818882 + ], + [ + -114.709164, + 77.813600 + ], + [ + -114.662509, + 77.803864 + ], + [ + -114.653505, + 77.801521 + ], + [ + -115.950298, + 77.452424 + ], + [ + -116.082091, + 77.488309 + ], + [ + -116.203336, + 77.519991 + ], + [ + -116.351100, + 77.539155 + ], + [ + -116.487782, + 77.550264 + ], + [ + -116.536118, + 77.544436 + ], + [ + -116.585827, + 77.540545 + ], + [ + -116.647509, + 77.537764 + ], + [ + -116.754182, + 77.534427 + ], + [ + -116.833073, + 77.533600 + ], + [ + -116.875000, + 77.534991 + ], + [ + -116.907527, + 77.527827 + ], + [ + -116.879173, + 77.517764 + ], + [ + -116.851100, + 77.516664 + ], + [ + -116.763336, + 77.514018 + ], + [ + -116.785282, + 77.499145 + ], + [ + -116.894445, + 77.473309 + ], + [ + -116.919727, + 77.470536 + ], + [ + -116.991673, + 77.466664 + ], + [ + -117.038336, + 77.471000 + ], + [ + -117.073545, + 77.475464 + ], + [ + -117.148755, + 77.455273 + ], + [ + -117.002918, + 77.396664 + ], + [ + -116.975827, + 77.393327 + ], + [ + -116.899173, + 77.399427 + ], + [ + -116.870827, + 77.400818 + ], + [ + -116.741382, + 77.395264 + ], + [ + -116.653618, + 77.385200 + ], + [ + -116.787355, + 77.318318 + ], + [ + -116.848345, + 77.315809 + ], + [ + -116.876391, + 77.318055 + ], + [ + -116.946664, + 77.329436 + ], + [ + -117.007500, + 77.343036 + ], + [ + -117.060818, + 77.353318 + ], + [ + -117.119455, + 77.359982 + ], + [ + -117.150836, + 77.360118 + ], + [ + -117.177564, + 77.343664 + ], + [ + -117.154164, + 77.332491 + ], + [ + -117.117982, + 77.338391 + ], + [ + -117.060818, + 77.326664 + ], + [ + -117.020618, + 77.299491 + ], + [ + -117.276400, + 77.289155 + ], + [ + -117.450845, + 77.312191 + ], + [ + -117.611936, + 77.327773 + ], + [ + -117.731518, + 77.339991 + ], + [ + -117.753900, + 77.349155 + ], + [ + -117.774591, + 77.360400 + ], + [ + -117.858891, + 77.386509 + ], + [ + -117.911118, + 77.386936 + ], + [ + -118.097227, + 77.378591 + ], + [ + -118.130336, + 77.366091 + ], + [ + -118.165836, + 77.355255 + ], + [ + -118.197218, + 77.354982 + ], + [ + -118.224718, + 77.356091 + ], + [ + -118.449718, + 77.358873 + ], + [ + -118.651109, + 77.360536 + ], + [ + -118.732500, + 77.355545 + ], + [ + -118.757236, + 77.352482 + ], + [ + -118.870000, + 77.333882 + ], + [ + -118.893891, + 77.327482 + ], + [ + -118.916945, + 77.322491 + ], + [ + -118.941382, + 77.319718 + ], + [ + -119.001109, + 77.321109 + ], + [ + -119.086673, + 77.326664 + ], + [ + -119.114436, + 77.327482 + ], + [ + -119.153345, + 77.325818 + ], + [ + -119.201109, + 77.313036 + ], + [ + -119.222227, + 77.306364 + ], + [ + -119.260700, + 77.292209 + ], + [ + -119.295836, + 77.276655 + ], + [ + -119.315827, + 77.258045 + ], + [ + -119.339864, + 77.233873 + ], + [ + -119.355755, + 77.209291 + ], + [ + -119.389182, + 77.184418 + ], + [ + -119.410827, + 77.178591 + ], + [ + -119.433318, + 77.173600 + ], + [ + -119.600564, + 77.145827 + ], + [ + -119.776400, + 77.106091 + ], + [ + -119.818891, + 77.093873 + ], + [ + -119.835418, + 77.068664 + ], + [ + -119.920273, + 77.023609 + ], + [ + -119.955555, + 77.011245 + ], + [ + -119.976936, + 77.013318 + ], + [ + -119.997218, + 77.016391 + ], + [ + -120.030827, + 77.014300 + ], + [ + -120.069164, + 77.008045 + ], + [ + -120.091382, + 77.003055 + ], + [ + -120.365282, + 76.836109 + ], + [ + -120.374436, + 76.807964 + ], + [ + -120.401673, + 76.797209 + ], + [ + -120.581127, + 76.749418 + ], + [ + -120.604173, + 76.746373 + ], + [ + -120.633327, + 76.747482 + ], + [ + -120.669155, + 76.751100 + ], + [ + -120.766109, + 76.743591 + ], + [ + -120.812209, + 76.737200 + ], + [ + -120.840136, + 76.728036 + ], + [ + -120.861936, + 76.711927 + ], + [ + -120.892227, + 76.696091 + ], + [ + -120.923318, + 76.689973 + ], + [ + -121.056664, + 76.671373 + ], + [ + -121.079455, + 76.668318 + ], + [ + -121.109845, + 76.670818 + ], + [ + -121.212509, + 76.649718 + ], + [ + -121.311527, + 76.591791 + ], + [ + -121.306955, + 76.578327 + ], + [ + -121.312209, + 76.572491 + ], + [ + -121.421945, + 76.493591 + ], + [ + -121.523755, + 76.440664 + ], + [ + -121.549991, + 76.434709 + ], + [ + -121.738055, + 76.421100 + ], + [ + -121.781955, + 76.420255 + ], + [ + -121.826682, + 76.422764 + ], + [ + -122.014718, + 76.432482 + ], + [ + -122.309436, + 76.408873 + ], + [ + -122.398900, + 76.396945 + ], + [ + -122.578609, + 76.353591 + ], + [ + -122.608327, + 76.345273 + ], + [ + -122.629436, + 76.333182 + ], + [ + -122.641400, + 76.293873 + ], + [ + -122.633900, + 76.267627 + ], + [ + -122.721118, + 76.231373 + ], + [ + -122.848618, + 76.208882 + ], + [ + -122.979173, + 76.125809 + ], + [ + -123.037782, + 76.084718 + ], + [ + -124.054720, + 75.960278 + ], + [ + -125.071657, + 75.834747 + ], + [ + -126.088595, + 75.708118 + ], + [ + -127.105533, + 75.580380 + ], + [ + -128.122471, + 75.451524 + ], + [ + -129.139408, + 75.321543 + ], + [ + -130.156346, + 75.190425 + ], + [ + -131.173284, + 75.058163 + ], + [ + -132.190221, + 74.924747 + ], + [ + -133.207159, + 74.790168 + ], + [ + -134.224097, + 74.654415 + ], + [ + -135.241035, + 74.517481 + ], + [ + -136.257972, + 74.379354 + ], + [ + -137.274910, + 74.240027 + ], + [ + -138.291848, + 74.099488 + ], + [ + -139.308786, + 73.957729 + ], + [ + -140.325723, + 73.814739 + ], + [ + -141.342661, + 73.670509 + ], + [ + -142.359599, + 73.525029 + ], + [ + -143.376537, + 73.378290 + ], + [ + -144.393474, + 73.230280 + ], + [ + -145.410412, + 73.080991 + ], + [ + -146.427350, + 72.930412 + ], + [ + -147.444288, + 72.778533 + ], + [ + -148.461225, + 72.625344 + ], + [ + -149.478163, + 72.470835 + ], + [ + -150.495101, + 72.314995 + ], + [ + -151.512039, + 72.157815 + ], + [ + -152.528976, + 71.999283 + ], + [ + -153.545914, + 71.839390 + ], + [ + -154.562852, + 71.678125 + ], + [ + -155.579790, + 71.515477 + ], + [ + -156.596727, + 71.351436 + ], + [ + -157.617060, + 71.362620 + ], + [ + -158.513610, + 71.372447 + ], + [ + -158.637393, + 71.373804 + ], + [ + -159.657726, + 71.384988 + ], + [ + -160.678060, + 71.396172 + ], + [ + -161.698393, + 71.407356 + ], + [ + -162.718726, + 71.418540 + ], + [ + -163.739059, + 71.429724 + ], + [ + -164.759392, + 71.440907 + ], + [ + -165.779725, + 71.452091 + ], + [ + -166.800058, + 71.463275 + ], + [ + -167.820391, + 71.474459 + ], + [ + -168.840724, + 71.485643 + ], + [ + -169.861057, + 71.496827 + ], + [ + -170.881390, + 71.508011 + ], + [ + -171.901723, + 71.519195 + ], + [ + -172.922056, + 71.530379 + ], + [ + -173.942389, + 71.541562 + ], + [ + -174.962722, + 71.552746 + ], + [ + -175.983055, + 71.563930 + ], + [ + -177.003388, + 71.575114 + ], + [ + -178.023721, + 71.586298 + ], + [ + -179.044055, + 71.597482 + ], + [ + -179.195555, + 71.581382 + ], + [ + -179.222500, + 71.564709 + ], + [ + -179.305300, + 71.551364 + ], + [ + -179.502255, + 71.566373 + ], + [ + -179.628600, + 71.577191 + ], + [ + -179.900773, + 71.548791 + ], + [ + -179.927245, + 71.535536 + ], + [ + -180.000000, + 71.535845 + ], + [ + -180.000000, + 90.000000 + ], + [ + 180.000000, + 90.000000 + ], + [ + 180.000000, + 71.535855 + ], + [ + 179.981627, + 71.536100 + ], + [ + 178.970764, + 71.821787 + ], + [ + 177.959900, + 72.103201 + ], + [ + 176.949036, + 72.380399 + ], + [ + 175.938173, + 72.653438 + ], + [ + 174.927309, + 72.922374 + ], + [ + 173.916445, + 73.187262 + ], + [ + 172.905582, + 73.448159 + ], + [ + 171.894718, + 73.705119 + ], + [ + 170.883855, + 73.958196 + ], + [ + 169.872991, + 74.207444 + ], + [ + 168.862127, + 74.452917 + ], + [ + 167.851264, + 74.694667 + ], + [ + 166.840400, + 74.932746 + ], + [ + 165.829536, + 75.167206 + ], + [ + 164.818673, + 75.398099 + ], + [ + 163.807809, + 75.625474 + ], + [ + 162.796945, + 75.849382 + ], + [ + 161.786082, + 76.069873 + ], + [ + 160.775218, + 76.286994 + ], + [ + 159.764355, + 76.500796 + ], + [ + 158.753491, + 76.711325 + ], + [ + 157.742627, + 76.918629 + ], + [ + 156.731764, + 77.122755 + ], + [ + 156.722745, + 77.132200 + ], + [ + 156.700809, + 77.136936 + ], + [ + 156.677464, + 77.140545 + ], + [ + 156.653045, + 77.143055 + ], + [ + 156.603027, + 77.146945 + ], + [ + 156.555545, + 77.148036 + ], + [ + 156.496855, + 77.147064 + ], + [ + 156.450255, + 77.140273 + ], + [ + 156.436355, + 77.132064 + ], + [ + 155.417109, + 77.080709 + ], + [ + 154.397864, + 77.029154 + ], + [ + 153.378618, + 76.977396 + ], + [ + 152.359373, + 76.925435 + ], + [ + 151.340127, + 76.873270 + ], + [ + 150.320882, + 76.820901 + ], + [ + 149.301636, + 76.768327 + ], + [ + 149.113864, + 76.755827 + ], + [ + 148.982455, + 76.752209 + ], + [ + 148.746609, + 76.745818 + ], + [ + 147.645006, + 76.688409 + ], + [ + 146.543403, + 76.630756 + ], + [ + 145.441800, + 76.572857 + ], + [ + 144.340197, + 76.514713 + ], + [ + 143.238594, + 76.456321 + ], + [ + 142.136991, + 76.397681 + ], + [ + 141.035388, + 76.338793 + ], + [ + 139.933785, + 76.279654 + ], + [ + 138.832182, + 76.220264 + ], + [ + 137.831149, + 76.365653 + ], + [ + 136.830116, + 76.509537 + ], + [ + 135.829083, + 76.651931 + ], + [ + 134.828050, + 76.792849 + ], + [ + 133.827017, + 76.932305 + ], + [ + 132.825984, + 77.070314 + ], + [ + 131.824951, + 77.206890 + ], + [ + 130.823918, + 77.342047 + ], + [ + 129.822885, + 77.475799 + ], + [ + 128.821852, + 77.608160 + ], + [ + 127.820819, + 77.739145 + ], + [ + 126.819786, + 77.868765 + ], + [ + 125.818753, + 77.997036 + ], + [ + 124.817720, + 78.123970 + ], + [ + 123.816687, + 78.249580 + ], + [ + 122.815654, + 78.373881 + ], + [ + 121.814621, + 78.496885 + ], + [ + 120.813588, + 78.618604 + ], + [ + 119.812555, + 78.739053 + ], + [ + 118.811522, + 78.858243 + ], + [ + 117.810489, + 78.976187 + ], + [ + 116.809456, + 79.092898 + ], + [ + 115.808423, + 79.208388 + ], + [ + 114.807390, + 79.322670 + ], + [ + 113.806357, + 79.435755 + ], + [ + 112.805324, + 79.547656 + ], + [ + 111.804291, + 79.658385 + ], + [ + 110.803258, + 79.767954 + ], + [ + 109.802225, + 79.876373 + ], + [ + 108.801192, + 79.983656 + ], + [ + 107.800159, + 80.089814 + ], + [ + 106.799126, + 80.194857 + ], + [ + 105.798093, + 80.298798 + ], + [ + 104.797060, + 80.401647 + ], + [ + 103.796027, + 80.503416 + ], + [ + 102.794995, + 80.604116 + ], + [ + 101.793962, + 80.703758 + ], + [ + 100.792929, + 80.802352 + ], + [ + 99.791896, + 80.899909 + ], + [ + 98.790863, + 80.996440 + ], + [ + 97.789830, + 81.091956 + ], + [ + 96.788797, + 81.186466 + ], + [ + 95.787764, + 81.279982 + ], + [ + 95.732755, + 81.287200 + ], + [ + 95.699418, + 81.290264 + ], + [ + 95.653045, + 81.290545 + ], + [ + 95.527482, + 81.289427 + ], + [ + 94.515779, + 81.284637 + ], + [ + 93.504076, + 81.279843 + ], + [ + 92.492374, + 81.275047 + ], + [ + 91.621942, + 81.270919 + ], + [ + 91.480671, + 81.270249 + ], + [ + 90.468968, + 81.265448 + ], + [ + 89.457266, + 81.260644 + ], + [ + 88.445563, + 81.255838 + ], + [ + 87.433860, + 81.251029 + ], + [ + 86.422158, + 81.246217 + ], + [ + 85.410455, + 81.241403 + ], + [ + 84.398752, + 81.236586 + ], + [ + 83.387049, + 81.231766 + ], + [ + 82.375347, + 81.226944 + ], + [ + 81.363644, + 81.222119 + ], + [ + 80.351941, + 81.217292 + ], + [ + 79.340239, + 81.212462 + ], + [ + 78.328536, + 81.207629 + ], + [ + 77.316833, + 81.202794 + ], + [ + 76.305131, + 81.197956 + ], + [ + 75.293428, + 81.193115 + ], + [ + 74.281725, + 81.188272 + ], + [ + 73.270023, + 81.183426 + ], + [ + 72.258320, + 81.178578 + ], + [ + 71.246617, + 81.173727 + ], + [ + 70.234914, + 81.168873 + ], + [ + 69.223212, + 81.164016 + ], + [ + 68.211509, + 81.159157 + ], + [ + 67.199806, + 81.154295 + ], + [ + 66.188104, + 81.149431 + ], + [ + 65.176401, + 81.144564 + ], + [ + 63.791664, + 81.664155 + ], + [ + 63.782491, + 81.669436 + ], + [ + 63.749436, + 81.679427 + ], + [ + 63.701664, + 81.688873 + ], + [ + 63.639436, + 81.697755 + ], + [ + 63.600827, + 81.701936 + ], + [ + 63.463055, + 81.713882 + ], + [ + 63.336382, + 81.719145 + ], + [ + 63.296109, + 81.719982 + ], + [ + 63.217209, + 81.720264 + ], + [ + 63.105827, + 81.717209 + ], + [ + 62.965545, + 81.708600 + ], + [ + 62.764718, + 81.703045 + ], + [ + 62.793327, + 81.708600 + ], + [ + 62.805545, + 81.714709 + ], + [ + 62.788609, + 81.719709 + ], + [ + 62.755273, + 81.720827 + ], + [ + 62.716936, + 81.720264 + ], + [ + 62.680545, + 81.718873 + ], + [ + 62.388882, + 81.707491 + ], + [ + 62.247491, + 81.699709 + ], + [ + 60.834577, + 81.763704 + ], + [ + 59.421664, + 81.827209 + ], + [ + 59.256664, + 81.846936 + ], + [ + 59.208327, + 81.850536 + ], + [ + 59.164991, + 81.851927 + ], + [ + 59.087491, + 81.850818 + ], + [ + 58.700000, + 81.844145 + ], + [ + 58.627491, + 81.840818 + ], + [ + 58.115827, + 81.816936 + ], + [ + 58.043609, + 81.813309 + ], + [ + 57.976382, + 81.807755 + ], + [ + 57.947773, + 81.803036 + ], + [ + 56.908305, + 81.754937 + ], + [ + 55.868837, + 81.706558 + ], + [ + 54.829370, + 81.657896 + ], + [ + 53.789902, + 81.608951 + ], + [ + 52.750434, + 81.559721 + ], + [ + 51.710967, + 81.510205 + ], + [ + 50.671499, + 81.460400 + ], + [ + 49.632031, + 81.410305 + ], + [ + 48.566984, + 81.137230 + ], + [ + 47.501936, + 80.855545 + ], + [ + 47.464718, + 80.854709 + ], + [ + 47.198600, + 80.840273 + ], + [ + 47.112773, + 80.830555 + ], + [ + 47.091664, + 80.825000 + ], + [ + 47.101664, + 80.819718 + ], + [ + 47.130273, + 80.815809 + ], + [ + 47.170827, + 80.812764 + ], + [ + 47.193318, + 80.808591 + ], + [ + 47.203045, + 80.803036 + ], + [ + 47.194709, + 80.796100 + ], + [ + 47.026655, + 80.756945 + ], + [ + 46.999436, + 80.753327 + ], + [ + 46.833882, + 80.744136 + ], + [ + 46.800273, + 80.742482 + ], + [ + 46.763327, + 80.741927 + ], + [ + 46.685545, + 80.746645 + ], + [ + 46.537773, + 80.743864 + ], + [ + 46.436936, + 80.739155 + ], + [ + 46.402491, + 80.734155 + ], + [ + 46.429436, + 80.729427 + ], + [ + 46.517218, + 80.721927 + ], + [ + 46.385827, + 80.700545 + ], + [ + 46.358891, + 80.696927 + ], + [ + 46.298609, + 80.691655 + ], + [ + 45.987500, + 80.668045 + ], + [ + 45.954164, + 80.666382 + ], + [ + 45.733882, + 80.664427 + ], + [ + 45.534436, + 80.665545 + ], + [ + 45.464718, + 80.663036 + ], + [ + 45.227482, + 80.651091 + ], + [ + 45.161100, + 80.647491 + ], + [ + 45.000000, + 80.632673 + ], + [ + 44.880545, + 80.621091 + ], + [ + 44.860000, + 80.613455 + ], + [ + 43.839773, + 80.577079 + ], + [ + 42.819545, + 80.540702 + ], + [ + 41.799318, + 80.504326 + ], + [ + 40.779091, + 80.467950 + ], + [ + 39.758864, + 80.431574 + ], + [ + 38.738636, + 80.395198 + ], + [ + 37.718409, + 80.358822 + ], + [ + 36.698182, + 80.322446 + ], + [ + 35.677955, + 80.286070 + ], + [ + 34.657727, + 80.249694 + ], + [ + 33.637500, + 80.213318 + ], + [ + 33.624991, + 80.217755 + ], + [ + 33.602218, + 80.221373 + ], + [ + 33.523882, + 80.231091 + ], + [ + 33.424436, + 80.238873 + ], + [ + 33.386382, + 80.241091 + ], + [ + 33.317218, + 80.243045 + ], + [ + 33.286664, + 80.242482 + ], + [ + 33.261109, + 80.240264 + ], + [ + 33.030818, + 80.214427 + ], + [ + 32.893327, + 80.196364 + ], + [ + 32.751109, + 80.190536 + ], + [ + 32.591100, + 80.179982 + ], + [ + 32.330827, + 80.160536 + ], + [ + 32.121655, + 80.145264 + ], + [ + 31.791945, + 80.128036 + ], + [ + 31.493609, + 80.110809 + ], + [ + 31.476664, + 80.105818 + ], + [ + 31.449436, + 80.085818 + ], + [ + 30.395964, + 80.043113 + ], + [ + 29.342491, + 80.000225 + ], + [ + 28.289018, + 79.957154 + ], + [ + 27.235545, + 79.913900 + ], + [ + 27.165555, + 79.939700 + ], + [ + 27.121382, + 79.958327 + ], + [ + 27.101109, + 79.967482 + ], + [ + 27.145000, + 80.004436 + ], + [ + 27.167773, + 80.021927 + ], + [ + 27.229300, + 80.096518 + ], + [ + 27.182218, + 80.106936 + ], + [ + 27.146109, + 80.107755 + ], + [ + 26.977491, + 80.122755 + ], + [ + 26.908473, + 80.146800 + ], + [ + 26.800000, + 80.172209 + ], + [ + 26.637218, + 80.183318 + ], + [ + 26.598055, + 80.184709 + ], + [ + 26.242491, + 80.186373 + ], + [ + 26.081664, + 80.185809 + ], + [ + 25.893891, + 80.172764 + ], + [ + 25.863882, + 80.171645 + ], + [ + 25.725455, + 80.176227 + ], + [ + 25.698882, + 80.215818 + ], + [ + 25.543609, + 80.234418 + ], + [ + 25.508891, + 80.236373 + ], + [ + 25.468464, + 80.233600 + ], + [ + 25.450273, + 80.224991 + ], + [ + 25.262082, + 80.225682 + ], + [ + 25.228609, + 80.250545 + ], + [ + 25.255273, + 80.255827 + ], + [ + 25.309164, + 80.259718 + ], + [ + 25.345000, + 80.270200 + ], + [ + 25.289436, + 80.274991 + ], + [ + 25.183327, + 80.268600 + ], + [ + 25.105827, + 80.262209 + ], + [ + 24.982218, + 80.254991 + ], + [ + 24.810000, + 80.247755 + ], + [ + 24.779718, + 80.246645 + ], + [ + 24.801109, + 80.258609 + ], + [ + 24.848055, + 80.276927 + ], + [ + 24.888327, + 80.320973 + ], + [ + 24.860000, + 80.338318 + ], + [ + 24.836382, + 80.350818 + ], + [ + 24.814164, + 80.349427 + ], + [ + 24.786664, + 80.343318 + ], + [ + 24.765973, + 80.332073 + ], + [ + 24.750555, + 80.308591 + ], + [ + 24.732082, + 80.291645 + ], + [ + 24.555827, + 80.256382 + ], + [ + 24.532500, + 80.253600 + ], + [ + 24.500555, + 80.254718 + ], + [ + 24.467218, + 80.264018 + ], + [ + 24.476664, + 80.296100 + ], + [ + 24.551936, + 80.306636 + ], + [ + 24.600000, + 80.312900 + ], + [ + 24.560273, + 80.333600 + ], + [ + 24.527218, + 80.341373 + ], + [ + 24.506109, + 80.344982 + ], + [ + 24.347773, + 80.367755 + ], + [ + 24.307500, + 80.368864 + ], + [ + 24.273609, + 80.368591 + ], + [ + 24.213055, + 80.366091 + ], + [ + 24.189436, + 80.363309 + ], + [ + 24.149164, + 80.352764 + ], + [ + 24.182218, + 80.336382 + ], + [ + 24.203327, + 80.333055 + ], + [ + 24.229436, + 80.330273 + ], + [ + 24.290836, + 80.325545 + ], + [ + 24.361109, + 80.325545 + ], + [ + 24.397909, + 80.317209 + ], + [ + 24.365000, + 80.289982 + ], + [ + 24.338055, + 80.283873 + ], + [ + 24.305827, + 80.284718 + ], + [ + 24.216936, + 80.294436 + ], + [ + 23.979436, + 80.308318 + ], + [ + 23.939436, + 80.309418 + ], + [ + 23.906664, + 80.304700 + ], + [ + 23.951664, + 80.293045 + ], + [ + 23.984991, + 80.285264 + ], + [ + 24.022564, + 80.271518 + ], + [ + 23.993882, + 80.269718 + ], + [ + 23.866664, + 80.278045 + ], + [ + 23.844164, + 80.280273 + ], + [ + 23.815555, + 80.288591 + ], + [ + 23.792500, + 80.300818 + ], + [ + 23.761391, + 80.304700 + ], + [ + 23.738055, + 80.297764 + ], + [ + 23.718055, + 80.256655 + ], + [ + 23.728745, + 80.229982 + ], + [ + 23.751391, + 80.207218 + ], + [ + 23.619718, + 80.142773 + ], + [ + 23.596664, + 80.135818 + ], + [ + 23.548609, + 80.129836 + ], + [ + 23.489164, + 80.154709 + ], + [ + 23.467355, + 80.172345 + ], + [ + 23.492073, + 80.190945 + ], + [ + 23.472909, + 80.206236 + ], + [ + 23.429436, + 80.207764 + ], + [ + 23.248055, + 80.192473 + ], + [ + 23.225273, + 80.189700 + ], + [ + 23.090273, + 80.164564 + ], + [ + 23.118327, + 80.153045 + ], + [ + 23.175827, + 80.136382 + ], + [ + 23.220482, + 80.117482 + ], + [ + 23.183055, + 80.113309 + ], + [ + 23.152773, + 80.115264 + ], + [ + 23.101391, + 80.120818 + ], + [ + 23.071664, + 80.126091 + ], + [ + 23.004164, + 80.156791 + ], + [ + 23.047500, + 80.244982 + ], + [ + 23.086936, + 80.252491 + ], + [ + 23.139718, + 80.256655 + ], + [ + 23.169718, + 80.257764 + ], + [ + 23.196109, + 80.259991 + ], + [ + 23.242491, + 80.265545 + ], + [ + 23.301800, + 80.280682 + ], + [ + 23.339718, + 80.342418 + ], + [ + 23.308891, + 80.349155 + ], + [ + 23.268609, + 80.350264 + ], + [ + 23.208045, + 80.360809 + ], + [ + 23.127082, + 80.382764 + ], + [ + 23.144718, + 80.392491 + ], + [ + 23.173045, + 80.398327 + ], + [ + 23.203327, + 80.399718 + ], + [ + 23.243882, + 80.398609 + ], + [ + 23.277500, + 80.399155 + ], + [ + 23.309445, + 80.404155 + ], + [ + 23.355827, + 80.426518 + ], + [ + 23.315000, + 80.444982 + ], + [ + 23.284718, + 80.450273 + ], + [ + 23.258055, + 80.453045 + ], + [ + 23.127773, + 80.461382 + ], + [ + 22.946936, + 80.476100 + ], + [ + 22.886936, + 80.490264 + ], + [ + 22.833882, + 80.436918 + ], + [ + 22.832500, + 80.407900 + ], + [ + 22.749164, + 80.324155 + ], + [ + 22.716936, + 80.325000 + ], + [ + 22.695000, + 80.328327 + ], + [ + 22.619236, + 80.348591 + ], + [ + 22.632773, + 80.369982 + ], + [ + 22.658055, + 80.384564 + ], + [ + 22.678400, + 80.412073 + ], + [ + 22.612218, + 80.426655 + ], + [ + 22.498055, + 80.429155 + ], + [ + 22.408327, + 80.426655 + ], + [ + 22.384991, + 80.423600 + ], + [ + 22.361109, + 80.410264 + ], + [ + 22.335764, + 80.358736 + ], + [ + 22.386664, + 80.328873 + ], + [ + 22.408609, + 80.325545 + ], + [ + 22.503191, + 80.319909 + ], + [ + 22.541391, + 80.314991 + ], + [ + 22.568536, + 80.296373 + ], + [ + 22.500000, + 80.275164 + ], + [ + 22.452355, + 80.261518 + ], + [ + 22.419164, + 80.169709 + ], + [ + 22.361664, + 80.037491 + ], + [ + 22.328473, + 80.033736 + ], + [ + 22.363882, + 80.001391 + ], + [ + 22.355000, + 79.995255 + ], + [ + 22.287500, + 79.981091 + ], + [ + 22.257500, + 79.978318 + ], + [ + 22.226382, + 79.979155 + ], + [ + 22.198882, + 79.984700 + ], + [ + 22.192636, + 80.019009 + ], + [ + 22.128055, + 80.075545 + ], + [ + 22.054164, + 80.108600 + ], + [ + 21.856109, + 80.143600 + ], + [ + 21.829718, + 80.146100 + ], + [ + 21.798055, + 80.146945 + ], + [ + 21.736664, + 80.140682 + ], + [ + 21.666936, + 80.112491 + ], + [ + 21.637500, + 80.111100 + ], + [ + 21.607982, + 80.121300 + ], + [ + 21.658882, + 80.144436 + ], + [ + 21.686664, + 80.154164 + ], + [ + 21.729718, + 80.168591 + ], + [ + 21.761109, + 80.177473 + ], + [ + 21.884300, + 80.202764 + ], + [ + 21.898327, + 80.217755 + ], + [ + 21.870409, + 80.258464 + ], + [ + 21.838327, + 80.271100 + ], + [ + 21.802773, + 80.272491 + ], + [ + 21.697218, + 80.273609 + ], + [ + 21.483327, + 80.266391 + ], + [ + 21.300555, + 80.239973 + ], + [ + 21.125273, + 80.216664 + ], + [ + 21.095827, + 80.215273 + ], + [ + 20.943327, + 80.212200 + ], + [ + 20.881936, + 80.211382 + ], + [ + 20.853327, + 80.211109 + ], + [ + 20.812427, + 80.219700 + ], + [ + 20.745691, + 80.267773 + ], + [ + 20.783336, + 80.286927 + ], + [ + 20.835136, + 80.306018 + ], + [ + 20.812773, + 80.311918 + ], + [ + 20.779445, + 80.311373 + ], + [ + 20.727218, + 80.306927 + ], + [ + 20.674436, + 80.298455 + ], + [ + 20.647773, + 80.292482 + ], + [ + 20.615555, + 80.293045 + ], + [ + 20.572500, + 80.302200 + ], + [ + 20.527773, + 80.320827 + ], + [ + 20.432027, + 80.397309 + ], + [ + 20.415827, + 80.413882 + ], + [ + 20.365827, + 80.419709 + ], + [ + 20.329445, + 80.421100 + ], + [ + 20.237773, + 80.419709 + ], + [ + 20.211382, + 80.417482 + ], + [ + 20.179436, + 80.412618 + ], + [ + 20.100555, + 80.405818 + ], + [ + 20.035136, + 80.463600 + ], + [ + 20.007500, + 80.469436 + ], + [ + 19.701109, + 80.499709 + ], + [ + 19.668882, + 80.501664 + ], + [ + 19.642491, + 80.499418 + ], + [ + 19.481936, + 80.462345 + ], + [ + 19.463327, + 80.454709 + ], + [ + 19.458745, + 80.420682 + ], + [ + 19.477355, + 80.394991 + ], + [ + 19.512218, + 80.387209 + ], + [ + 19.548609, + 80.386109 + ], + [ + 19.581936, + 80.386936 + ], + [ + 19.689991, + 80.395264 + ], + [ + 19.723327, + 80.396382 + ], + [ + 19.913745, + 80.376645 + ], + [ + 19.858609, + 80.339709 + ], + [ + 19.812773, + 80.275818 + ], + [ + 19.807218, + 80.231091 + ], + [ + 19.840827, + 80.220964 + ], + [ + 19.808609, + 80.212491 + ], + [ + 19.687500, + 80.213045 + ], + [ + 19.655273, + 80.213609 + ], + [ + 19.623745, + 80.218600 + ], + [ + 19.600827, + 80.224700 + ], + [ + 19.433609, + 80.286373 + ], + [ + 19.416518, + 80.299709 + ], + [ + 19.385555, + 80.314145 + ], + [ + 19.330273, + 80.325273 + ], + [ + 19.261664, + 80.334718 + ], + [ + 19.211382, + 80.340273 + ], + [ + 19.156664, + 80.345264 + ], + [ + 19.097218, + 80.349427 + ], + [ + 19.060827, + 80.350536 + ], + [ + 19.023891, + 80.350536 + ], + [ + 18.980000, + 80.336655 + ], + [ + 19.099164, + 80.258045 + ], + [ + 19.214582, + 80.196918 + ], + [ + 19.310555, + 80.174427 + ], + [ + 19.368882, + 80.169982 + ], + [ + 19.404991, + 80.170255 + ], + [ + 19.480545, + 80.168318 + ], + [ + 19.552082, + 80.162618 + ], + [ + 19.579855, + 80.149291 + ], + [ + 19.387500, + 80.107209 + ], + [ + 19.340555, + 80.086382 + ], + [ + 19.318882, + 80.083327 + ], + [ + 19.290000, + 80.081664 + ], + [ + 19.263336, + 80.084155 + ], + [ + 19.234164, + 80.089573 + ], + [ + 19.212636, + 80.102200 + ], + [ + 19.191245, + 80.114982 + ], + [ + 18.904164, + 80.187618 + ], + [ + 18.760973, + 80.187764 + ], + [ + 18.717355, + 80.159291 + ], + [ + 18.674436, + 80.158036 + ], + [ + 18.634027, + 80.161800 + ], + [ + 18.610555, + 80.167755 + ], + [ + 18.594164, + 80.184709 + ], + [ + 18.443464, + 80.181091 + ], + [ + 18.415555, + 80.172482 + ], + [ + 18.254718, + 80.173036 + ], + [ + 18.223327, + 80.174991 + ], + [ + 18.203327, + 80.184709 + ], + [ + 18.167500, + 80.185809 + ], + [ + 18.038327, + 80.185255 + ], + [ + 17.877909, + 80.155536 + ], + [ + 17.790827, + 80.127064 + ], + [ + 17.938327, + 80.125536 + ], + [ + 18.007500, + 80.128036 + ], + [ + 18.062773, + 80.126082 + ], + [ + 18.214822, + 80.100459 + ], + [ + 16.547642, + 80.034096 + ], + [ + 16.531109, + 80.042345 + ], + [ + 16.335000, + 80.060527 + ], + [ + 16.307773, + 80.062764 + ], + [ + 14.412899, + 80.208520 + ], + [ + 13.411241, + 80.285569 + ], + [ + 12.409584, + 80.362618 + ], + [ + 11.407926, + 80.439666 + ], + [ + 10.406269, + 80.516715 + ], + [ + 9.404611, + 80.593764 + ], + [ + 8.402954, + 80.670813 + ], + [ + 7.401296, + 80.747862 + ], + [ + 6.399639, + 80.824910 + ], + [ + 5.397981, + 80.901959 + ], + [ + 4.396324, + 80.979008 + ], + [ + 3.394666, + 81.056057 + ], + [ + 2.393009, + 81.133106 + ], + [ + 1.391352, + 81.210155 + ], + [ + 0.389694, + 81.287203 + ], + [ + -0.611963, + 81.364252 + ], + [ + -1.613621, + 81.441301 + ], + [ + -2.615278, + 81.518350 + ], + [ + -3.616936, + 81.595399 + ], + [ + -4.618593, + 81.672447 + ], + [ + -5.620251, + 81.749496 + ], + [ + -6.621908, + 81.826545 + ], + [ + -7.623566, + 81.903594 + ], + [ + -8.625223, + 81.980643 + ], + [ + -9.626881, + 82.057692 + ], + [ + -10.628538, + 82.134740 + ], + [ + -11.630196, + 82.211789 + ], + [ + -12.631853, + 82.288838 + ], + [ + -13.633510, + 82.365887 + ], + [ + -14.635168, + 82.442936 + ], + [ + -15.636825, + 82.519985 + ], + [ + -16.638483, + 82.597033 + ], + [ + -17.640140, + 82.674082 + ], + [ + -18.641798, + 82.751131 + ], + [ + -19.643455, + 82.828180 + ], + [ + -20.645113, + 82.905229 + ], + [ + -21.646770, + 82.982277 + ], + [ + -22.648428, + 83.059326 + ], + [ + -23.650085, + 83.136375 + ], + [ + -24.651743, + 83.213424 + ], + [ + -25.653400, + 83.290473 + ], + [ + -25.670000, + 83.298873 + ], + [ + -25.685000, + 83.303591 + ], + [ + -25.775555, + 83.325000 + ], + [ + -25.800282, + 83.330555 + ], + [ + -26.100000, + 83.369709 + ], + [ + -26.199718, + 83.379155 + ], + [ + -26.257782, + 83.383882 + ], + [ + -26.340555, + 83.388045 + ], + [ + -26.751664, + 83.421100 + ], + [ + -27.176664, + 83.450000 + ], + [ + -27.432827, + 83.466618 + ], + [ + -27.751109, + 83.477764 + ], + [ + -27.860836, + 83.481091 + ], + [ + -27.913891, + 83.481655 + ], + [ + -27.961945, + 83.479709 + ], + [ + -28.004445, + 83.474700 + ], + [ + -28.050555, + 83.471645 + ], + [ + -28.095000, + 83.469982 + ], + [ + -28.197218, + 83.466936 + ], + [ + -28.391945, + 83.462773 + ], + [ + -28.440282, + 83.456509 + ], + [ + -28.415836, + 83.448318 + ], + [ + -28.367218, + 83.443036 + ], + [ + -28.204445, + 83.434418 + ], + [ + -28.149727, + 83.431509 + ], + [ + -28.317500, + 83.419436 + ], + [ + -28.559718, + 83.416091 + ], + [ + -28.602500, + 83.416091 + ], + [ + -28.664718, + 83.423873 + ], + [ + -28.719164, + 83.431655 + ], + [ + -28.896109, + 83.462200 + ], + [ + -28.904682, + 83.471036 + ], + [ + -28.870000, + 83.477200 + ], + [ + -28.705273, + 83.474991 + ], + [ + -28.653891, + 83.475264 + ], + [ + -28.560000, + 83.478045 + ], + [ + -28.529164, + 83.482209 + ], + [ + -28.513336, + 83.490955 + ], + [ + -28.524173, + 83.499709 + ], + [ + -28.543336, + 83.504991 + ], + [ + -28.567500, + 83.508327 + ], + [ + -28.613055, + 83.511655 + ], + [ + -28.773336, + 83.513609 + ], + [ + -28.826945, + 83.514164 + ], + [ + -28.876391, + 83.512773 + ], + [ + -29.021664, + 83.507218 + ], + [ + -29.057500, + 83.503327 + ], + [ + -29.026945, + 83.488309 + ], + [ + -29.056664, + 83.481655 + ], + [ + -29.100000, + 83.477482 + ], + [ + -29.157218, + 83.479982 + ], + [ + -29.179445, + 83.482209 + ], + [ + -29.235555, + 83.492482 + ], + [ + -29.257227, + 83.500955 + ], + [ + -29.245555, + 83.509991 + ], + [ + -29.214582, + 83.518191 + ], + [ + -29.233327, + 83.525273 + ], + [ + -29.254718, + 83.529164 + ], + [ + -29.415555, + 83.541091 + ], + [ + -29.699445, + 83.566936 + ], + [ + -29.796673, + 83.575000 + ], + [ + -29.856945, + 83.578600 + ], + [ + -30.215000, + 83.596100 + ], + [ + -30.331945, + 83.600818 + ], + [ + -30.388336, + 83.602200 + ], + [ + -30.442773, + 83.602482 + ], + [ + -30.597218, + 83.600264 + ], + [ + -30.636391, + 83.598036 + ], + [ + -30.677773, + 83.592755 + ], + [ + -30.691945, + 83.587491 + ], + [ + -30.708336, + 83.583055 + ], + [ + -30.751391, + 83.578873 + ], + [ + -30.844445, + 83.573045 + ], + [ + -30.946945, + 83.569445 + ], + [ + -31.154164, + 83.567218 + ], + [ + -31.261673, + 83.569445 + ], + [ + -31.435273, + 83.575273 + ], + [ + -31.488055, + 83.578600 + ], + [ + -31.653327, + 83.591373 + ], + [ + -31.700555, + 83.595827 + ], + [ + -31.871945, + 83.596373 + ], + [ + -31.960827, + 83.591091 + ], + [ + -32.164445, + 83.578327 + ], + [ + -32.187500, + 83.575000 + ], + [ + -32.229718, + 83.570545 + ], + [ + -32.276945, + 83.568055 + ], + [ + -32.300282, + 83.570264 + ], + [ + -32.301391, + 83.589709 + ], + [ + -32.274718, + 83.596645 + ], + [ + -32.229718, + 83.599991 + ], + [ + -32.206945, + 83.606791 + ], + [ + -32.233609, + 83.611927 + ], + [ + -32.295282, + 83.614991 + ], + [ + -32.522227, + 83.622482 + ], + [ + -32.579173, + 83.623600 + ], + [ + -32.908055, + 83.620255 + ], + [ + -33.146109, + 83.616655 + ], + [ + -33.436664, + 83.610809 + ], + [ + -33.688609, + 83.604155 + ], + [ + -33.784445, + 83.599718 + ], + [ + -33.899727, + 83.592482 + ], + [ + -33.943327, + 83.588882 + ], + [ + -34.028609, + 83.579436 + ], + [ + -34.070000, + 83.570264 + ], + [ + -34.081527, + 83.564009 + ], + [ + -34.077227, + 83.550809 + ], + [ + -34.035282, + 83.534718 + ], + [ + -34.014164, + 83.528873 + ], + [ + -33.940555, + 83.513882 + ], + [ + -33.815555, + 83.501391 + ], + [ + -33.784173, + 83.496373 + ], + [ + -33.760836, + 83.491364 + ], + [ + -33.747082, + 83.483182 + ], + [ + -33.760282, + 83.475264 + ], + [ + -33.785282, + 83.468318 + ], + [ + -33.837218, + 83.456645 + ], + [ + -33.858891, + 83.453600 + ], + [ + -33.882773, + 83.452482 + ], + [ + -33.917218, + 83.455264 + ], + [ + -34.195000, + 83.521927 + ], + [ + -34.282500, + 83.548873 + ], + [ + -34.298336, + 83.554427 + ], + [ + -34.305827, + 83.566373 + ], + [ + -34.307155, + 83.584018 + ], + [ + -34.326391, + 83.593318 + ], + [ + -34.342500, + 83.595536 + ], + [ + -34.364164, + 83.596936 + ], + [ + -34.410555, + 83.594145 + ], + [ + -34.440555, + 83.587491 + ], + [ + -34.455836, + 83.580418 + ], + [ + -34.457500, + 83.567218 + ], + [ + -34.461945, + 83.551227 + ], + [ + -34.483745, + 83.541791 + ], + [ + -34.512218, + 83.539982 + ], + [ + -34.541673, + 83.542482 + ], + [ + -34.572918, + 83.549855 + ], + [ + -34.583609, + 83.565264 + ], + [ + -34.636118, + 83.589427 + ], + [ + -34.649991, + 83.593873 + ], + [ + -34.682500, + 83.598873 + ], + [ + -34.726100, + 83.601382 + ], + [ + -35.754793, + 83.587455 + ], + [ + -36.783487, + 83.573498 + ], + [ + -37.812180, + 83.559510 + ], + [ + -38.840873, + 83.545492 + ], + [ + -39.869567, + 83.531444 + ], + [ + -40.898260, + 83.517366 + ], + [ + -41.926953, + 83.503257 + ], + [ + -42.955647, + 83.489117 + ], + [ + -43.984340, + 83.474947 + ], + [ + -45.013033, + 83.460746 + ], + [ + -46.041726, + 83.446514 + ], + [ + -47.070420, + 83.432251 + ], + [ + -48.099113, + 83.417958 + ], + [ + -49.127806, + 83.403633 + ], + [ + -50.156500, + 83.389278 + ], + [ + -51.185193, + 83.374891 + ], + [ + -52.213886, + 83.360473 + ], + [ + -53.242580, + 83.346024 + ], + [ + -54.271273, + 83.331544 + ], + [ + -55.299966, + 83.317032 + ], + [ + -56.328660, + 83.302489 + ], + [ + -57.357353, + 83.287914 + ], + [ + -58.386046, + 83.273308 + ], + [ + -59.414740, + 83.258670 + ], + [ + -60.443433, + 83.244001 + ], + [ + -61.472126, + 83.229299 + ], + [ + -62.500820, + 83.214566 + ], + [ + -63.529513, + 83.199801 + ], + [ + -64.558206, + 83.185004 + ], + [ + -65.586899, + 83.170175 + ], + [ + -66.615593, + 83.155313 + ], + [ + -67.644286, + 83.140420 + ], + [ + -68.672979, + 83.125494 + ], + [ + -69.701673, + 83.110536 + ], + [ + -69.748891, + 83.111927 + ], + [ + -69.812209, + 83.112200 + ], + [ + -70.001400, + 83.107755 + ], + [ + -70.111936, + 83.109418 + ], + [ + -70.160000, + 83.111373 + ], + [ + -70.260009, + 83.113873 + ], + [ + -70.373891, + 83.113309 + ], + [ + -70.470000, + 83.107482 + ], + [ + -70.585282, + 83.103318 + ], + [ + -70.694155, + 83.103591 + ], + [ + -70.887218, + 83.098036 + ], + [ + -71.125273, + 83.087491 + ], + [ + -71.425000, + 83.029436 + ], + [ + -71.481282, + 83.006864 + ], + [ + -71.306382, + 82.982209 + ], + [ + -71.080836, + 82.937482 + ], + [ + -70.961945, + 82.918591 + ], + [ + -70.904173, + 82.908036 + ], + [ + -70.842782, + 82.888318 + ], + [ + -70.871382, + 82.881091 + ], + [ + -70.952227, + 82.883609 + ], + [ + -71.018336, + 82.891936 + ], + [ + -71.084164, + 82.900545 + ], + [ + -71.144164, + 82.908327 + ], + [ + -71.219727, + 82.914991 + ], + [ + -71.336673, + 82.914700 + ], + [ + -71.493609, + 82.932209 + ], + [ + -71.567227, + 82.941082 + ], + [ + -71.789718, + 83.010827 + ], + [ + -71.775009, + 83.032209 + ], + [ + -71.750000, + 83.043045 + ], + [ + -71.696382, + 83.057755 + ], + [ + -71.654445, + 83.068882 + ], + [ + -71.589318, + 83.088182 + ], + [ + -71.611664, + 83.096100 + ], + [ + -71.712782, + 83.098873 + ], + [ + -71.831682, + 83.097764 + ], + [ + -72.005573, + 83.099155 + ], + [ + -72.111936, + 83.101091 + ], + [ + -72.226945, + 83.101382 + ], + [ + -72.336400, + 83.097764 + ], + [ + -72.365827, + 83.094145 + ], + [ + -72.400700, + 83.086518 + ], + [ + -72.424164, + 83.079164 + ], + [ + -72.477491, + 83.076664 + ], + [ + -72.523900, + 83.076936 + ], + [ + -72.566464, + 83.088245 + ], + [ + -72.599727, + 83.096936 + ], + [ + -72.650555, + 83.096373 + ], + [ + -72.927491, + 83.067491 + ], + [ + -72.948609, + 83.055255 + ], + [ + -73.033891, + 83.036655 + ], + [ + -73.261945, + 83.007764 + ], + [ + -73.626800, + 82.938864 + ], + [ + -73.640345, + 82.923800 + ], + [ + -73.607500, + 82.913036 + ], + [ + -73.577227, + 82.908036 + ], + [ + -73.495000, + 82.902482 + ], + [ + -73.460827, + 82.898609 + ], + [ + -73.425418, + 82.892073 + ], + [ + -73.401400, + 82.874982 + ], + [ + -73.257509, + 82.825818 + ], + [ + -73.211400, + 82.813873 + ], + [ + -73.027218, + 82.786927 + ], + [ + -72.983891, + 82.783873 + ], + [ + -72.912218, + 82.776655 + ], + [ + -72.716664, + 82.755555 + ], + [ + -72.648900, + 82.746645 + ], + [ + -72.500691, + 82.721373 + ], + [ + -72.599027, + 82.696645 + ], + [ + -72.633900, + 82.694427 + ], + [ + -72.672227, + 82.698591 + ], + [ + -72.700836, + 82.703327 + ], + [ + -72.750000, + 82.714709 + ], + [ + -72.835827, + 82.728591 + ], + [ + -72.906664, + 82.735809 + ], + [ + -72.949718, + 82.738873 + ], + [ + -73.075009, + 82.745818 + ], + [ + -73.160282, + 82.751391 + ], + [ + -73.247218, + 82.761655 + ], + [ + -73.281955, + 82.766391 + ], + [ + -73.548336, + 82.806091 + ], + [ + -73.607773, + 82.815809 + ], + [ + -73.817782, + 82.852764 + ], + [ + -73.851673, + 82.866655 + ], + [ + -73.879436, + 82.897218 + ], + [ + -74.018064, + 82.956936 + ], + [ + -74.084164, + 82.972491 + ], + [ + -74.172773, + 82.991091 + ], + [ + -74.279173, + 83.009991 + ], + [ + -74.408055, + 83.024700 + ], + [ + -74.435818, + 83.027209 + ], + [ + -74.706664, + 83.041091 + ], + [ + -74.797500, + 83.043591 + ], + [ + -74.956391, + 83.045536 + ], + [ + -75.000000, + 83.043882 + ], + [ + -75.046955, + 83.041655 + ], + [ + -75.313327, + 83.027482 + ], + [ + -75.580836, + 83.038036 + ], + [ + -75.948609, + 83.051927 + ], + [ + -75.979718, + 83.053036 + ], + [ + -76.028609, + 83.054427 + ], + [ + -76.079182, + 83.053591 + ], + [ + -76.113327, + 83.050536 + ], + [ + -76.206664, + 83.036655 + ], + [ + -76.266664, + 83.029164 + ], + [ + -76.360273, + 83.021382 + ], + [ + -76.559436, + 83.011936 + ], + [ + -76.863055, + 83.010818 + ], + [ + -77.135564, + 83.011382 + ], + [ + -77.170555, + 83.015545 + ], + [ + -77.135982, + 83.030400 + ], + [ + -77.183882, + 83.033873 + ], + [ + -77.222782, + 83.030545 + ], + [ + -77.252227, + 83.025273 + ], + [ + -77.276109, + 83.020264 + ], + [ + -77.341945, + 83.005555 + ], + [ + -77.379100, + 82.990127 + ], + [ + -77.344727, + 82.972491 + ], + [ + -77.131673, + 82.939973 + ], + [ + -77.066391, + 82.930818 + ], + [ + -77.025836, + 82.927764 + ], + [ + -76.881945, + 82.913609 + ], + [ + -76.844164, + 82.909145 + ], + [ + -76.752791, + 82.894991 + ], + [ + -76.710827, + 82.885818 + ], + [ + -76.666655, + 82.872482 + ], + [ + -76.629164, + 82.859709 + ], + [ + -76.586400, + 82.838591 + ], + [ + -76.545273, + 82.821109 + ], + [ + -76.525282, + 82.813873 + ], + [ + -76.501682, + 82.807755 + ], + [ + -76.447491, + 82.797482 + ], + [ + -76.375273, + 82.789155 + ], + [ + -76.288609, + 82.784718 + ], + [ + -76.241382, + 82.783600 + ], + [ + -76.186391, + 82.783873 + ], + [ + -75.990555, + 82.784918 + ], + [ + -76.014727, + 82.775818 + ], + [ + -76.056945, + 82.771655 + ], + [ + -76.176391, + 82.767209 + ], + [ + -76.226391, + 82.764436 + ], + [ + -76.269455, + 82.760818 + ], + [ + -76.303945, + 82.744500 + ], + [ + -76.275555, + 82.724427 + ], + [ + -76.256391, + 82.717209 + ], + [ + -76.235827, + 82.712200 + ], + [ + -76.103055, + 82.686100 + ], + [ + -75.807500, + 82.654709 + ], + [ + -75.670545, + 82.642764 + ], + [ + -75.625545, + 82.633045 + ], + [ + -75.557500, + 82.628582 + ], + [ + -75.503618, + 82.628864 + ], + [ + -75.468891, + 82.627764 + ], + [ + -75.434718, + 82.623873 + ], + [ + -75.402927, + 82.616918 + ], + [ + -75.420273, + 82.606936 + ], + [ + -75.451673, + 82.603318 + ], + [ + -75.500836, + 82.600264 + ], + [ + -75.606382, + 82.595827 + ], + [ + -75.648055, + 82.591664 + ], + [ + -75.671391, + 82.586927 + ], + [ + -75.773900, + 82.557209 + ], + [ + -75.802782, + 82.546373 + ], + [ + -75.887218, + 82.522218 + ], + [ + -75.975009, + 82.499709 + ], + [ + -76.037782, + 82.484418 + ], + [ + -76.102782, + 82.470536 + ], + [ + -76.184155, + 82.453873 + ], + [ + -76.233891, + 82.444973 + ], + [ + -76.258136, + 82.469236 + ], + [ + -76.203673, + 82.507282 + ], + [ + -76.038891, + 82.557209 + ], + [ + -75.972782, + 82.571382 + ], + [ + -75.938327, + 82.575818 + ], + [ + -75.918609, + 82.579991 + ], + [ + -75.894309, + 82.590127 + ], + [ + -75.913891, + 82.597491 + ], + [ + -76.058882, + 82.616927 + ], + [ + -76.093336, + 82.620818 + ], + [ + -76.387218, + 82.651382 + ], + [ + -76.538327, + 82.664155 + ], + [ + -76.570555, + 82.666655 + ], + [ + -76.605282, + 82.692482 + ], + [ + -76.644164, + 82.709155 + ], + [ + -76.674436, + 82.721373 + ], + [ + -76.708618, + 82.733045 + ], + [ + -76.766664, + 82.750827 + ], + [ + -76.789173, + 82.756382 + ], + [ + -76.815555, + 82.761109 + ], + [ + -76.851109, + 82.765000 + ], + [ + -76.898345, + 82.766100 + ], + [ + -76.950418, + 82.771236 + ], + [ + -76.966664, + 82.804700 + ], + [ + -77.098609, + 82.855955 + ], + [ + -77.128327, + 82.863309 + ], + [ + -77.319455, + 82.873309 + ], + [ + -77.405273, + 82.878864 + ], + [ + -77.467227, + 82.883882 + ], + [ + -77.528064, + 82.891100 + ], + [ + -77.616655, + 82.902773 + ], + [ + -77.698618, + 82.914291 + ], + [ + -77.768336, + 82.922482 + ], + [ + -77.813045, + 82.924427 + ], + [ + -77.863327, + 82.921373 + ], + [ + -77.950000, + 82.914155 + ], + [ + -77.986664, + 82.909991 + ], + [ + -78.080291, + 82.898327 + ], + [ + -78.108336, + 82.893327 + ], + [ + -78.231873, + 82.856573 + ], + [ + -78.194445, + 82.845827 + ], + [ + -78.128873, + 82.836655 + ], + [ + -78.107909, + 82.828609 + ], + [ + -78.144164, + 82.823318 + ], + [ + -78.175555, + 82.827209 + ], + [ + -78.341673, + 82.850536 + ], + [ + -78.500564, + 82.845536 + ], + [ + -78.550827, + 82.855609 + ], + [ + -78.538609, + 82.876645 + ], + [ + -78.521945, + 82.889164 + ], + [ + -78.507782, + 82.910809 + ], + [ + -78.546109, + 82.926655 + ], + [ + -78.631945, + 82.941364 + ], + [ + -78.671109, + 82.945527 + ], + [ + -78.719727, + 82.946636 + ], + [ + -78.756118, + 82.942473 + ], + [ + -78.780291, + 82.938036 + ], + [ + -78.825291, + 82.928036 + ], + [ + -78.928055, + 82.898609 + ], + [ + -79.069300, + 82.897282 + ], + [ + -79.177491, + 82.951936 + ], + [ + -79.370545, + 82.974155 + ], + [ + -79.414445, + 82.975264 + ], + [ + -79.458345, + 82.974155 + ], + [ + -79.793336, + 82.957491 + ], + [ + -79.904727, + 82.951100 + ], + [ + -80.095836, + 82.937191 + ], + [ + -80.398055, + 82.899718 + ], + [ + -80.430000, + 82.890827 + ], + [ + -80.393064, + 82.875536 + ], + [ + -80.277218, + 82.850818 + ], + [ + -80.219727, + 82.841664 + ], + [ + -80.194155, + 82.838318 + ], + [ + -80.158336, + 82.835536 + ], + [ + -80.110000, + 82.834718 + ], + [ + -80.006673, + 82.834427 + ], + [ + -79.896118, + 82.835818 + ], + [ + -79.847782, + 82.834991 + ], + [ + -79.677773, + 82.821518 + ], + [ + -79.942491, + 82.811373 + ], + [ + -79.975827, + 82.808591 + ], + [ + -79.996945, + 82.803318 + ], + [ + -79.936391, + 82.772218 + ], + [ + -79.913327, + 82.765273 + ], + [ + -79.886945, + 82.759427 + ], + [ + -79.836945, + 82.750545 + ], + [ + -79.623045, + 82.727764 + ], + [ + -79.403064, + 82.706373 + ], + [ + -79.331682, + 82.699709 + ], + [ + -79.243055, + 82.695255 + ], + [ + -78.931945, + 82.681655 + ], + [ + -78.895009, + 82.680264 + ], + [ + -78.840836, + 82.680818 + ], + [ + -78.576673, + 82.686918 + ], + [ + -78.531955, + 82.684418 + ], + [ + -78.511955, + 82.679009 + ], + [ + -78.565555, + 82.674700 + ], + [ + -78.843609, + 82.664991 + ], + [ + -79.149991, + 82.667755 + ], + [ + -79.384736, + 82.672764 + ], + [ + -79.468336, + 82.677473 + ], + [ + -79.617491, + 82.693036 + ], + [ + -79.684155, + 82.699709 + ], + [ + -79.747500, + 82.704991 + ], + [ + -79.787509, + 82.707764 + ], + [ + -79.829727, + 82.708882 + ], + [ + -79.885836, + 82.708600 + ], + [ + -79.928609, + 82.705555 + ], + [ + -79.972291, + 82.692618 + ], + [ + -79.848618, + 82.663882 + ], + [ + -79.816255, + 82.652073 + ], + [ + -79.861664, + 82.644145 + ], + [ + -79.941664, + 82.649427 + ], + [ + -80.003064, + 82.656373 + ], + [ + -80.070845, + 82.665545 + ], + [ + -80.160282, + 82.681364 + ], + [ + -80.180127, + 82.695609 + ], + [ + -80.139173, + 82.717900 + ], + [ + -80.158336, + 82.727764 + ], + [ + -80.293336, + 82.774427 + ], + [ + -80.318618, + 82.779982 + ], + [ + -80.381100, + 82.788882 + ], + [ + -80.418336, + 82.792209 + ], + [ + -80.500564, + 82.797482 + ], + [ + -80.801936, + 82.812482 + ], + [ + -80.977218, + 82.820264 + ], + [ + -81.022227, + 82.821927 + ], + [ + -81.359727, + 82.827773 + ], + [ + -81.411391, + 82.827773 + ], + [ + -81.473055, + 82.825000 + ], + [ + -81.514173, + 82.821109 + ], + [ + -81.536391, + 82.816664 + ], + [ + -81.564164, + 82.808873 + ], + [ + -81.579173, + 82.792973 + ], + [ + -81.508618, + 82.764709 + ], + [ + -81.450000, + 82.755555 + ], + [ + -81.305827, + 82.733873 + ], + [ + -81.223618, + 82.715818 + ], + [ + -81.124709, + 82.686918 + ], + [ + -81.097500, + 82.672482 + ], + [ + -81.077227, + 82.666927 + ], + [ + -81.049991, + 82.660809 + ], + [ + -80.994445, + 82.650273 + ], + [ + -80.873891, + 82.629700 + ], + [ + -80.599164, + 82.554427 + ], + [ + -80.579873, + 82.544573 + ], + [ + -80.891955, + 82.532764 + ], + [ + -80.949718, + 82.538036 + ], + [ + -80.989436, + 82.547209 + ], + [ + -81.136127, + 82.578045 + ], + [ + -81.300827, + 82.611100 + ], + [ + -81.359727, + 82.620818 + ], + [ + -81.432500, + 82.629155 + ], + [ + -81.543336, + 82.637209 + ], + [ + -81.931382, + 82.663882 + ], + [ + -81.972227, + 82.666382 + ], + [ + -82.060273, + 82.669709 + ], + [ + -82.155000, + 82.671100 + ], + [ + -82.215291, + 82.668591 + ], + [ + -82.255009, + 82.664427 + ], + [ + -82.288055, + 82.659991 + ], + [ + -82.344864, + 82.648036 + ], + [ + -82.376518, + 82.637218 + ], + [ + -82.391882, + 82.616018 + ], + [ + -82.343891, + 82.595264 + ], + [ + -82.321118, + 82.589155 + ], + [ + -82.263900, + 82.576664 + ], + [ + -81.966400, + 82.528873 + ], + [ + -81.927491, + 82.522764 + ], + [ + -81.880555, + 82.517764 + ], + [ + -81.847227, + 82.515545 + ], + [ + -81.751400, + 82.516936 + ], + [ + -81.713336, + 82.515273 + ], + [ + -81.541945, + 82.500545 + ], + [ + -81.670000, + 82.492482 + ], + [ + -82.091673, + 82.501391 + ], + [ + -82.316955, + 82.506945 + ], + [ + -82.406391, + 82.509155 + ], + [ + -82.458891, + 82.508327 + ], + [ + -82.498336, + 82.506382 + ], + [ + -82.530000, + 82.499845 + ], + [ + -82.704309, + 82.422209 + ], + [ + -82.728673, + 82.398391 + ], + [ + -82.711673, + 82.382473 + ], + [ + -82.679991, + 82.370818 + ], + [ + -82.625545, + 82.359145 + ], + [ + -82.513064, + 82.337773 + ], + [ + -82.454727, + 82.328045 + ], + [ + -82.170545, + 82.286655 + ], + [ + -81.887791, + 82.238036 + ], + [ + -81.825564, + 82.226655 + ], + [ + -81.799727, + 82.222764 + ], + [ + -81.423327, + 82.176927 + ], + [ + -81.324718, + 82.164991 + ], + [ + -81.253345, + 82.159718 + ], + [ + -81.171109, + 82.156373 + ], + [ + -81.051391, + 82.154709 + ], + [ + -80.909164, + 82.156645 + ], + [ + -80.871245, + 82.153036 + ], + [ + -80.899736, + 82.146382 + ], + [ + -80.931382, + 82.142209 + ], + [ + -80.956664, + 82.137209 + ], + [ + -80.975555, + 82.125682 + ], + [ + -80.955700, + 82.113318 + ], + [ + -80.922227, + 82.103591 + ], + [ + -80.878327, + 82.094145 + ], + [ + -80.822236, + 82.083882 + ], + [ + -80.791109, + 82.079436 + ], + [ + -80.725555, + 82.071655 + ], + [ + -80.657227, + 82.064700 + ], + [ + -80.624436, + 82.061918 + ], + [ + -80.368609, + 82.041091 + ], + [ + -80.331682, + 82.038591 + ], + [ + -80.213900, + 82.032209 + ], + [ + -79.916400, + 82.023882 + ], + [ + -79.880827, + 82.021927 + ], + [ + -79.853336, + 82.018873 + ], + [ + -79.835009, + 82.010555 + ], + [ + -79.844455, + 81.971373 + ], + [ + -79.670836, + 81.927473 + ], + [ + -79.579727, + 81.913609 + ], + [ + -79.521118, + 81.905545 + ], + [ + -79.489991, + 81.900273 + ], + [ + -79.452227, + 81.889982 + ], + [ + -79.236809, + 81.816082 + ], + [ + -79.492218, + 81.819718 + ], + [ + -79.534436, + 81.820827 + ], + [ + -79.570691, + 81.827073 + ], + [ + -79.588755, + 81.841236 + ], + [ + -79.610000, + 81.851091 + ], + [ + -79.883055, + 81.924700 + ], + [ + -80.035282, + 81.963045 + ], + [ + -80.085009, + 81.973600 + ], + [ + -80.153609, + 81.981373 + ], + [ + -80.225827, + 81.986100 + ], + [ + -80.432500, + 81.997482 + ], + [ + -80.640291, + 82.018327 + ], + [ + -80.868327, + 82.031373 + ], + [ + -81.091109, + 82.059418 + ], + [ + -81.150282, + 82.068882 + ], + [ + -81.249164, + 82.081373 + ], + [ + -81.353055, + 82.091664 + ], + [ + -81.425282, + 82.097764 + ], + [ + -81.608891, + 82.118591 + ], + [ + -81.918064, + 82.154982 + ], + [ + -82.011127, + 82.168591 + ], + [ + -82.101945, + 82.183045 + ], + [ + -82.160282, + 82.193318 + ], + [ + -82.211118, + 82.204709 + ], + [ + -82.263064, + 82.222218 + ], + [ + -82.286664, + 82.229155 + ], + [ + -82.452791, + 82.249418 + ], + [ + -82.508900, + 82.258045 + ], + [ + -82.621655, + 82.278045 + ], + [ + -82.654445, + 82.282209 + ], + [ + -82.693600, + 82.284718 + ], + [ + -82.735545, + 82.286100 + ], + [ + -82.990827, + 82.292482 + ], + [ + -83.025618, + 82.278464 + ], + [ + -83.027791, + 82.235264 + ], + [ + -83.011400, + 82.221645 + ], + [ + -82.987500, + 82.215000 + ], + [ + -82.940282, + 82.203600 + ], + [ + -82.886945, + 82.193864 + ], + [ + -82.860273, + 82.187764 + ], + [ + -82.772227, + 82.163318 + ], + [ + -82.724309, + 82.146382 + ], + [ + -82.693045, + 82.128718 + ], + [ + -82.680409, + 82.113036 + ], + [ + -82.651945, + 82.100264 + ], + [ + -82.619718, + 82.096100 + ], + [ + -82.584445, + 82.092755 + ], + [ + -82.546391, + 82.090273 + ], + [ + -82.417500, + 82.087200 + ], + [ + -82.243055, + 82.084991 + ], + [ + -82.102491, + 82.085536 + ], + [ + -82.058609, + 82.084718 + ], + [ + -82.020845, + 82.082218 + ], + [ + -81.966109, + 82.071109 + ], + [ + -81.924164, + 82.058873 + ], + [ + -81.885700, + 82.036855 + ], + [ + -81.926100, + 82.034718 + ], + [ + -81.963618, + 82.037200 + ], + [ + -82.055555, + 82.050809 + ], + [ + -82.122218, + 82.058591 + ], + [ + -82.199436, + 82.064145 + ], + [ + -82.284164, + 82.066373 + ], + [ + -82.421664, + 82.066936 + ], + [ + -82.636400, + 82.070545 + ], + [ + -82.674436, + 82.073045 + ], + [ + -82.758055, + 82.076936 + ], + [ + -82.797500, + 82.077773 + ], + [ + -82.888336, + 82.072491 + ], + [ + -82.974164, + 82.064991 + ], + [ + -83.076400, + 82.061918 + ], + [ + -83.123045, + 82.069364 + ], + [ + -83.062500, + 82.080273 + ], + [ + -83.001955, + 82.089155 + ], + [ + -82.965418, + 82.101509 + ], + [ + -82.953064, + 82.119982 + ], + [ + -82.976673, + 82.138318 + ], + [ + -83.000000, + 82.151091 + ], + [ + -83.022782, + 82.159427 + ], + [ + -83.083891, + 82.175809 + ], + [ + -83.130555, + 82.184982 + ], + [ + -83.184155, + 82.194700 + ], + [ + -83.242218, + 82.204164 + ], + [ + -83.308336, + 82.218318 + ], + [ + -83.344455, + 82.227200 + ], + [ + -83.368255, + 82.249782 + ], + [ + -83.364300, + 82.272909 + ], + [ + -83.384736, + 82.282209 + ], + [ + -83.516400, + 82.316936 + ], + [ + -83.606382, + 82.331373 + ], + [ + -83.767500, + 82.353045 + ], + [ + -83.841945, + 82.361373 + ], + [ + -83.876936, + 82.364155 + ], + [ + -83.961400, + 82.368591 + ], + [ + -84.047227, + 82.371373 + ], + [ + -84.095555, + 82.371091 + ], + [ + -84.146955, + 82.369709 + ], + [ + -84.180555, + 82.368045 + ], + [ + -84.228882, + 82.363873 + ], + [ + -84.303327, + 82.355818 + ], + [ + -84.344455, + 82.352764 + ], + [ + -84.384509, + 82.363936 + ], + [ + -84.418336, + 82.381091 + ], + [ + -84.450000, + 82.386109 + ], + [ + -84.482500, + 82.389436 + ], + [ + -84.559718, + 82.394991 + ], + [ + -84.714718, + 82.405818 + ], + [ + -84.888609, + 82.416927 + ], + [ + -84.916655, + 82.420536 + ], + [ + -84.942218, + 82.428873 + ], + [ + -84.895282, + 82.433591 + ], + [ + -84.787782, + 82.434982 + ], + [ + -84.620418, + 82.452618 + ], + [ + -84.641682, + 82.465545 + ], + [ + -84.662782, + 82.468600 + ], + [ + -84.693882, + 82.471373 + ], + [ + -85.003064, + 82.480818 + ], + [ + -85.046955, + 82.481936 + ], + [ + -85.298618, + 82.478045 + ], + [ + -85.502500, + 82.471100 + ], + [ + -85.708618, + 82.463609 + ], + [ + -85.746945, + 82.461382 + ], + [ + -85.794727, + 82.458600 + ], + [ + -85.819736, + 82.454436 + ], + [ + -85.910755, + 82.428936 + ], + [ + -85.866945, + 82.421918 + ], + [ + -85.669445, + 82.409427 + ], + [ + -85.524727, + 82.405400 + ], + [ + -85.501545, + 82.396236 + ], + [ + -85.531682, + 82.369709 + ], + [ + -85.515018, + 82.343318 + ], + [ + -85.485836, + 82.316655 + ], + [ + -85.457227, + 82.307482 + ], + [ + -85.396391, + 82.296936 + ], + [ + -85.364509, + 82.284045 + ], + [ + -85.413891, + 82.276091 + ], + [ + -85.508345, + 82.273036 + ], + [ + -85.557773, + 82.269436 + ], + [ + -85.580564, + 82.264436 + ], + [ + -85.601391, + 82.251791 + ], + [ + -85.620000, + 82.243591 + ], + [ + -85.662218, + 82.239700 + ], + [ + -85.706118, + 82.238036 + ], + [ + -85.753891, + 82.237491 + ], + [ + -85.798891, + 82.237764 + ], + [ + -85.841382, + 82.239155 + ], + [ + -85.934155, + 82.238873 + ], + [ + -85.984436, + 82.237491 + ], + [ + -86.137791, + 82.226927 + ], + [ + -86.181109, + 82.225264 + ], + [ + -86.228882, + 82.224700 + ], + [ + -86.316664, + 82.224700 + ], + [ + -86.520009, + 82.229709 + ], + [ + -86.571673, + 82.230273 + ], + [ + -86.619445, + 82.229709 + ], + [ + -86.669445, + 82.228318 + ], + [ + -86.764173, + 82.221645 + ], + [ + -86.843609, + 82.212491 + ], + [ + -86.868464, + 82.197482 + ], + [ + -86.752227, + 82.141100 + ], + [ + -86.731382, + 82.136382 + ], + [ + -86.706118, + 82.131927 + ], + [ + -86.637509, + 82.124418 + ], + [ + -86.565555, + 82.118864 + ], + [ + -86.485000, + 82.114155 + ], + [ + -86.278882, + 82.107209 + ], + [ + -86.091109, + 82.104427 + ], + [ + -86.062209, + 82.103864 + ], + [ + -85.999727, + 82.094145 + ], + [ + -85.915836, + 82.077482 + ], + [ + -85.851673, + 82.067218 + ], + [ + -85.755845, + 82.058873 + ], + [ + -85.678327, + 82.054427 + ], + [ + -85.405836, + 82.042209 + ], + [ + -85.116945, + 82.033055 + ], + [ + -85.039991, + 82.028591 + ], + [ + -84.932500, + 82.019436 + ], + [ + -84.899736, + 82.015273 + ], + [ + -84.840836, + 82.006100 + ], + [ + -84.815282, + 82.000827 + ], + [ + -84.751682, + 81.984709 + ], + [ + -84.722218, + 81.973591 + ], + [ + -84.613464, + 81.888455 + ], + [ + -84.635282, + 81.886109 + ], + [ + -84.656109, + 81.887773 + ], + [ + -84.688600, + 81.891936 + ], + [ + -84.746855, + 81.909455 + ], + [ + -84.793436, + 81.927782 + ], + [ + -84.817082, + 81.961864 + ], + [ + -84.831682, + 81.979427 + ], + [ + -84.858891, + 81.985264 + ], + [ + -84.889173, + 81.990264 + ], + [ + -84.929173, + 81.993045 + ], + [ + -85.001109, + 81.994136 + ], + [ + -85.056518, + 81.989982 + ], + [ + -85.043055, + 81.970682 + ], + [ + -85.025282, + 81.960818 + ], + [ + -84.994718, + 81.948591 + ], + [ + -84.915009, + 81.918045 + ], + [ + -84.863891, + 81.900273 + ], + [ + -84.836682, + 81.889755 + ], + [ + -84.879436, + 81.887500 + ], + [ + -84.984164, + 81.911100 + ], + [ + -85.018891, + 81.919436 + ], + [ + -85.096391, + 81.945818 + ], + [ + -85.140564, + 81.966091 + ], + [ + -85.162918, + 81.982482 + ], + [ + -85.188327, + 81.992755 + ], + [ + -85.217500, + 81.995527 + ], + [ + -85.258618, + 81.996936 + ], + [ + -85.559436, + 82.001664 + ], + [ + -85.650555, + 81.998318 + ], + [ + -85.693882, + 81.994982 + ], + [ + -85.730209, + 81.986164 + ], + [ + -85.654727, + 81.950818 + ], + [ + -85.566100, + 81.924991 + ], + [ + -85.469455, + 81.899718 + ], + [ + -85.441936, + 81.893873 + ], + [ + -85.391945, + 81.878036 + ], + [ + -85.372773, + 81.861791 + ], + [ + -85.379436, + 81.856936 + ], + [ + -85.422500, + 81.857482 + ], + [ + -85.467227, + 81.867200 + ], + [ + -85.628873, + 81.916091 + ], + [ + -85.731382, + 81.950000 + ], + [ + -85.767500, + 81.961927 + ], + [ + -85.815000, + 81.973873 + ], + [ + -85.914445, + 81.997482 + ], + [ + -85.960555, + 82.007491 + ], + [ + -86.016109, + 82.016664 + ], + [ + -86.169155, + 82.041655 + ], + [ + -86.202791, + 82.045536 + ], + [ + -86.239164, + 82.048600 + ], + [ + -86.278064, + 82.050809 + ], + [ + -86.356382, + 82.053591 + ], + [ + -86.583618, + 82.053864 + ], + [ + -86.791945, + 82.058027 + ], + [ + -86.843336, + 82.057209 + ], + [ + -86.892500, + 82.054155 + ], + [ + -86.931673, + 82.049427 + ], + [ + -86.994309, + 82.038036 + ], + [ + -87.128745, + 81.966100 + ], + [ + -87.098055, + 81.958327 + ], + [ + -87.066100, + 81.954991 + ], + [ + -86.919445, + 81.942745 + ], + [ + -86.863618, + 81.933591 + ], + [ + -86.834727, + 81.927764 + ], + [ + -86.733400, + 81.899645 + ], + [ + -86.768336, + 81.890273 + ], + [ + -86.804173, + 81.893055 + ], + [ + -86.828891, + 81.897491 + ], + [ + -86.877209, + 81.909427 + ], + [ + -86.939436, + 81.918873 + ], + [ + -87.063327, + 81.934418 + ], + [ + -87.101673, + 81.937764 + ], + [ + -87.169155, + 81.945527 + ], + [ + -87.265836, + 81.958882 + ], + [ + -87.305964, + 81.974918 + ], + [ + -87.258055, + 81.989427 + ], + [ + -87.232773, + 81.993318 + ], + [ + -87.185373, + 82.016691 + ], + [ + -87.230564, + 82.036927 + ], + [ + -87.271664, + 82.047764 + ], + [ + -87.343200, + 82.065264 + ], + [ + -87.402218, + 82.073882 + ], + [ + -87.501400, + 82.084155 + ], + [ + -87.599727, + 82.089155 + ], + [ + -87.641955, + 82.090273 + ], + [ + -87.666400, + 82.089427 + ], + [ + -87.710282, + 82.085400 + ], + [ + -87.911945, + 82.090818 + ], + [ + -88.038327, + 82.103864 + ], + [ + -88.088891, + 82.098945 + ], + [ + -88.113618, + 82.090545 + ], + [ + -88.145009, + 82.086927 + ], + [ + -88.250000, + 82.080827 + ], + [ + -88.296664, + 82.080273 + ], + [ + -88.443055, + 82.075000 + ], + [ + -88.543064, + 82.070545 + ], + [ + -88.589718, + 82.066664 + ], + [ + -88.625545, + 82.062764 + ], + [ + -88.773055, + 82.039427 + ], + [ + -88.963900, + 82.008045 + ], + [ + -89.021118, + 81.998027 + ], + [ + -89.048055, + 81.984155 + ], + [ + -89.011945, + 81.958600 + ], + [ + -88.989582, + 81.948182 + ], + [ + -88.998473, + 81.918318 + ], + [ + -89.033327, + 81.912200 + ], + [ + -89.074718, + 81.911655 + ], + [ + -89.149800, + 81.923364 + ], + [ + -89.249727, + 81.941082 + ], + [ + -89.288891, + 81.943036 + ], + [ + -89.338900, + 81.940264 + ], + [ + -89.371655, + 81.935809 + ], + [ + -89.413264, + 81.921927 + ], + [ + -89.397509, + 81.909427 + ], + [ + -89.367218, + 81.905545 + ], + [ + -89.328064, + 81.902209 + ], + [ + -89.203682, + 81.883282 + ], + [ + -89.235973, + 81.849291 + ], + [ + -89.356382, + 81.811100 + ], + [ + -89.425000, + 81.815264 + ], + [ + -89.461400, + 81.818055 + ], + [ + -89.629991, + 81.856373 + ], + [ + -89.649445, + 81.863309 + ], + [ + -89.679855, + 81.900818 + ], + [ + -89.700836, + 81.915545 + ], + [ + -89.735827, + 81.917482 + ], + [ + -89.783327, + 81.917209 + ], + [ + -89.990827, + 81.905545 + ], + [ + -90.154445, + 81.896655 + ], + [ + -90.245273, + 81.896100 + ], + [ + -90.338055, + 81.893055 + ], + [ + -90.436664, + 81.887500 + ], + [ + -90.565282, + 81.878036 + ], + [ + -90.610000, + 81.873873 + ], + [ + -90.635009, + 81.868864 + ], + [ + -90.689709, + 81.851191 + ], + [ + -90.727491, + 81.841091 + ], + [ + -90.852218, + 81.842482 + ], + [ + -91.001109, + 81.832764 + ], + [ + -91.051664, + 81.828873 + ], + [ + -91.101109, + 81.818882 + ], + [ + -91.144445, + 81.800609 + ], + [ + -91.117764, + 81.784427 + ], + [ + -91.090564, + 81.777209 + ], + [ + -91.050545, + 81.768245 + ], + [ + -91.212509, + 81.759427 + ], + [ + -91.255009, + 81.759155 + ], + [ + -91.287509, + 81.761936 + ], + [ + -91.351391, + 81.770264 + ], + [ + -91.386127, + 81.773882 + ], + [ + -91.485545, + 81.769991 + ], + [ + -91.724727, + 81.714145 + ], + [ + -91.737209, + 81.686918 + ], + [ + -91.770845, + 81.663318 + ], + [ + -91.801100, + 81.658600 + ], + [ + -91.838900, + 81.658600 + ], + [ + -91.867764, + 81.663318 + ], + [ + -91.902218, + 81.666927 + ], + [ + -91.926936, + 81.664991 + ], + [ + -91.953045, + 81.660400 + ], + [ + -91.943600, + 81.628309 + ], + [ + -91.912664, + 81.620181 + ], + [ + -93.517500, + 81.384991 + ], + [ + -93.559091, + 81.373309 + ], + [ + -93.533327, + 81.348600 + ], + [ + -93.487427, + 81.322555 + ], + [ + -93.515291, + 81.310527 + ], + [ + -93.553327, + 81.305545 + ], + [ + -93.602918, + 81.313455 + ], + [ + -93.630418, + 81.324436 + ], + [ + -93.665836, + 81.332764 + ], + [ + -93.694445, + 81.337491 + ], + [ + -93.755009, + 81.344709 + ], + [ + -93.789445, + 81.348036 + ], + [ + -94.035282, + 81.363309 + ], + [ + -94.068073, + 81.363309 + ], + [ + -94.153882, + 81.359709 + ], + [ + -94.200564, + 81.355545 + ], + [ + -94.240827, + 81.350818 + ], + [ + -94.273609, + 81.344018 + ], + [ + -94.378200, + 81.278736 + ], + [ + -94.385700, + 81.254436 + ], + [ + -94.302491, + 81.234982 + ], + [ + -94.282227, + 81.231091 + ], + [ + -94.200564, + 81.221100 + ], + [ + -94.166400, + 81.218045 + ], + [ + -94.031391, + 81.208882 + ], + [ + -93.928882, + 81.203873 + ], + [ + -93.852218, + 81.203045 + ], + [ + -93.728336, + 81.207218 + ], + [ + -93.687500, + 81.210264 + ], + [ + -93.514727, + 81.217755 + ], + [ + -93.419445, + 81.219982 + ], + [ + -93.259736, + 81.212200 + ], + [ + -93.121109, + 81.182755 + ], + [ + -93.094655, + 81.158391 + ], + [ + -93.123318, + 81.115264 + ], + [ + -93.157636, + 81.093318 + ], + [ + -93.255845, + 81.082764 + ], + [ + -93.299991, + 81.079709 + ], + [ + -93.517227, + 81.084427 + ], + [ + -93.689436, + 81.093045 + ], + [ + -93.795273, + 81.099427 + ], + [ + -93.866391, + 81.103045 + ], + [ + -93.907227, + 81.101655 + ], + [ + -93.935273, + 81.098327 + ], + [ + -93.960827, + 81.094145 + ], + [ + -93.989718, + 81.092482 + ], + [ + -94.130282, + 81.092755 + ], + [ + -94.154727, + 81.093873 + ], + [ + -94.205273, + 81.103318 + ], + [ + -94.230836, + 81.110536 + ], + [ + -94.255282, + 81.115536 + ], + [ + -94.278336, + 81.117200 + ], + [ + -94.313045, + 81.115536 + ], + [ + -94.354709, + 81.102827 + ], + [ + -94.328609, + 81.089427 + ], + [ + -94.182500, + 81.068055 + ], + [ + -94.042218, + 81.055545 + ], + [ + -94.013336, + 81.053591 + ], + [ + -93.907218, + 81.039982 + ], + [ + -94.071673, + 81.024991 + ], + [ + -94.143618, + 81.015827 + ], + [ + -94.337364, + 80.976927 + ], + [ + -94.365555, + 80.968873 + ], + [ + -94.408618, + 80.965545 + ], + [ + -94.434155, + 80.965545 + ], + [ + -94.472782, + 80.969145 + ], + [ + -94.499545, + 80.988800 + ], + [ + -94.493882, + 81.017491 + ], + [ + -94.546109, + 81.033327 + ], + [ + -94.572782, + 81.038882 + ], + [ + -94.663055, + 81.048600 + ], + [ + -94.814164, + 81.054155 + ], + [ + -94.943327, + 81.048873 + ], + [ + -95.183064, + 81.019718 + ], + [ + -95.220836, + 81.011382 + ], + [ + -95.248055, + 81.001373 + ], + [ + -95.260982, + 80.974845 + ], + [ + -95.283618, + 80.950000 + ], + [ + -95.311936, + 80.939145 + ], + [ + -95.334164, + 80.934709 + ], + [ + -95.422773, + 80.920818 + ], + [ + -95.472009, + 80.896136 + ], + [ + -95.413327, + 80.885273 + ], + [ + -95.300827, + 80.885273 + ], + [ + -95.170545, + 80.884718 + ], + [ + -95.148755, + 80.882482 + ], + [ + -95.170836, + 80.875809 + ], + [ + -95.212782, + 80.868318 + ], + [ + -95.371382, + 80.853318 + ], + [ + -95.440282, + 80.846100 + ], + [ + -95.500836, + 80.838318 + ], + [ + -95.527218, + 80.819291 + ], + [ + -95.501109, + 80.806927 + ], + [ + -95.475555, + 80.803036 + ], + [ + -95.442764, + 80.799709 + ], + [ + -95.334164, + 80.788882 + ], + [ + -95.282500, + 80.786100 + ], + [ + -95.243055, + 80.787764 + ], + [ + -95.025282, + 80.801655 + ], + [ + -95.036045, + 80.773464 + ], + [ + -94.895555, + 80.747755 + ], + [ + -94.722782, + 80.728591 + ], + [ + -94.694718, + 80.726655 + ], + [ + -94.659727, + 80.725264 + ], + [ + -94.549436, + 80.724991 + ], + [ + -94.491109, + 80.726927 + ], + [ + -94.449155, + 80.730273 + ], + [ + -94.423045, + 80.734982 + ], + [ + -94.304445, + 80.733873 + ], + [ + -94.140291, + 80.721927 + ], + [ + -94.108336, + 80.718873 + ], + [ + -94.081809, + 80.707491 + ], + [ + -94.117491, + 80.698591 + ], + [ + -94.199718, + 80.693036 + ], + [ + -94.231673, + 80.692200 + ], + [ + -94.331118, + 80.693864 + ], + [ + -94.439164, + 80.697482 + ], + [ + -94.514727, + 80.696364 + ], + [ + -94.553609, + 80.694973 + ], + [ + -94.596955, + 80.690536 + ], + [ + -94.628327, + 80.685809 + ], + [ + -94.664373, + 80.663555 + ], + [ + -94.552636, + 80.602764 + ], + [ + -94.524173, + 80.598327 + ], + [ + -94.484727, + 80.598327 + ], + [ + -94.457509, + 80.600264 + ], + [ + -94.437773, + 80.605545 + ], + [ + -94.308336, + 80.606373 + ], + [ + -94.093609, + 80.593318 + ], + [ + -94.005009, + 80.585264 + ], + [ + -93.973618, + 80.581936 + ], + [ + -93.949155, + 80.578045 + ], + [ + -93.894727, + 80.565809 + ], + [ + -93.810545, + 80.541364 + ], + [ + -93.786664, + 80.528800 + ], + [ + -93.839445, + 80.518600 + ], + [ + -93.866945, + 80.518327 + ], + [ + -93.899173, + 80.519145 + ], + [ + -93.965555, + 80.533664 + ], + [ + -94.010555, + 80.549427 + ], + [ + -94.230836, + 80.556364 + ], + [ + -94.375000, + 80.557209 + ], + [ + -94.554991, + 80.554427 + ], + [ + -94.658618, + 80.555818 + ], + [ + -94.696655, + 80.556927 + ], + [ + -94.762364, + 80.560673 + ], + [ + -94.823627, + 80.569718 + ], + [ + -94.846955, + 80.574709 + ], + [ + -94.902500, + 80.586655 + ], + [ + -94.962782, + 80.599718 + ], + [ + -94.995000, + 80.603045 + ], + [ + -95.030836, + 80.603318 + ], + [ + -95.067227, + 80.601382 + ], + [ + -95.132218, + 80.593873 + ], + [ + -95.172227, + 80.591373 + ], + [ + -95.246655, + 80.589982 + ], + [ + -95.318345, + 80.590818 + ], + [ + -95.423618, + 80.593600 + ], + [ + -95.498045, + 80.592482 + ], + [ + -95.536391, + 80.590818 + ], + [ + -95.671664, + 80.584718 + ], + [ + -95.941664, + 80.586382 + ], + [ + -95.980000, + 80.584718 + ], + [ + -97.228982, + 80.442143 + ], + [ + -98.477964, + 80.297429 + ], + [ + -99.726945, + 80.150545 + ], + [ + -99.759173, + 80.149718 + ], + [ + -99.795273, + 80.147764 + ], + [ + -99.827227, + 80.143600 + ], + [ + -100.023618, + 80.099718 + ], + [ + -100.065555, + 80.089982 + ], + [ + -100.081682, + 80.084427 + ], + [ + -100.193327, + 80.033873 + ], + [ + -101.236385, + 79.896661 + ], + [ + -102.279444, + 79.757580 + ], + [ + -103.322502, + 79.616604 + ], + [ + -104.365560, + 79.473711 + ], + [ + -105.408618, + 79.328873 + ], + [ + -105.439991, + 79.329164 + ], + [ + -106.434104, + 79.217227 + ], + [ + -107.428216, + 79.104131 + ], + [ + -108.422329, + 78.989862 + ], + [ + -109.416442, + 78.874411 + ], + [ + -110.410555, + 78.757764 + ], + [ + -110.430555, + 78.758609 + ], + [ + -110.460282, + 78.757491 + ], + [ + -110.637509, + 78.748600 + ], + [ + -110.791109, + 78.735264 + ], + [ + -110.956127, + 78.718318 + ], + [ + -111.160555, + 78.691655 + ], + [ + -111.363055, + 78.642764 + ], + [ + -111.385618, + 78.616091 + ], + [ + -111.455564, + 78.592755 + ], + [ + -111.572245, + 78.588591 + ], + [ + -111.600827, + 78.585264 + ], + [ + -111.641955, + 78.574155 + ], + [ + -111.677782, + 78.563036 + ], + [ + -111.752500, + 78.550536 + ], + [ + -111.809718, + 78.545255 + ], + [ + -111.853055, + 78.542755 + ], + [ + -111.873045, + 78.544436 + ], + [ + -111.903609, + 78.548873 + ], + [ + -111.988055, + 78.552764 + ], + [ + -112.129709, + 78.551927 + ], + [ + -112.238055, + 78.547209 + ], + [ + -112.311664, + 78.539982 + ], + [ + -112.363055, + 78.533327 + ], + [ + -112.607500, + 78.499418 + ], + [ + -112.711673, + 78.484709 + ], + [ + -113.038327, + 78.436918 + ], + [ + -113.119718, + 78.421927 + ], + [ + -113.216109, + 78.385273 + ], + [ + -113.333327, + 78.330818 + ], + [ + -114.303327, + 78.070545 + ], + [ + -114.326945, + 78.071109 + ], + [ + -114.355000, + 78.070545 + ], + [ + -114.400836, + 78.067491 + ], + [ + -114.605827, + 78.030545 + ], + [ + -114.740282, + 78.000000 + ], + [ + -114.777500, + 77.981655 + ], + [ + -114.797782, + 77.975536 + ], + [ + -114.819736, + 77.973036 + ], + [ + -114.930282, + 77.960536 + ], + [ + -115.033891, + 77.962200 + ], + [ + -115.060545, + 77.963882 + ], + [ + -115.090845, + 77.963609 + ], + [ + -115.112700, + 77.957491 + ], + [ + -115.077218, + 77.938582 + ], + [ + -114.848345, + 77.854709 + ] + ] + ] + } + } + ] +} \ No newline at end of file diff --git a/mesh_tools/mesh_conversion_tools/test/land_mask_final.nc b/mesh_tools/mesh_conversion_tools/test/land_mask_final.nc new file mode 100644 index 0000000000000000000000000000000000000000..90d2a4dd033c2a87c6c2da744ed279d7f1321d2c GIT binary patch literal 11732 zcmeHNQE%He5O&%$X_75nmc8Z0!1lJ)vYpgGx;?B&w<18&Z4K5}fstrijYPVnJS6)C z+uztP+fUgYsWUiEj^w5Zl1v1oJ;CaXs#csGTwqhZ*?8UN}@T3ymR?zj4Jz91!a!0H6<~0AtQ)Ax0{Rb#UJ6 z^alM7Dj07Tock^_yw(~f4)ugbR>%21>~*@RZI~1yFUn_Xv3&gVixcpzXgO68N0_m1 zmkbMitHUJaMTIc-dVJpPo^_w?iWJ?g4Ya}aM44s61;*-_l|ekVhgO(7EWo4BtiJpO z$W6N^4vS?qo+Y?@oYrH@S^Em)md%Bmho8@~*?E+3ad_C)zJc4;Y|s5GWvhfoIk%Cw zVROD_P;U%o>I@S15jC(C!)$Huf$<6LU1vvP9)S#K} z4e~$u!}h;M9yP23HMc*{=3=n#>d5hRic=;p!;;UJ6o>4je{5Mdeq9`muL@6olE}lp zulsh_kG`umKc27GjlT$wu)d^RXeXa*!@h4Pf0ZO2Ny3v(lI*^$^85HIo#K(4hp+4R zaiq_al#l$r?%RDo=_Kdj>-q^-C9CpO@$qj9en#JxcGdZO?0L|5NG=keI3$tl_Uq_X z`6wPqA3x9jp*Wv+TlBi~ye;Fc8VAkix@1*;-@oWMeEwb0X`a_T-kZvE-SfC=e5!*l z7mZ8&tMWcHj-Qv})RjK}rga+UP3Nu3S2ZrzPkoY?#U{)=c^TLB5#P08pX?-k|Drm@ z@nuyU!dKz;0kC`ozp2{?#k-;^M&oZP&{W`FRp5olnTp_5+F`_-kAZu&w9RCi3e>3p zzO%;*zcgGd;RCV z-S+eLNcDDi2c1+#ERAKx5&_0+!r^Oz=Fkn&sm>J-bfj1wjrW3ls@_j!qIuBC;(RwC zY@nt>2O=4b^YkK=5oaRIWX#hr=Q=m1TEfU2RMBvXB+R8uH5y_TDjp>h9vDtAO64g_ zivVwX!7sn|_qxG^tD#gG6A?G@fFsB=E)!608tfnyzUnekjDQ8#UxoqDDzPc@{x{ww?8jl8IQ2!BYmDM%j0uh4E0H3T*A9H X*Y5Udoe)cX^kYNawNYH(Zm|CVbmnnA literal 0 HcmV?d00001 diff --git a/mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc b/mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc new file mode 100644 index 0000000000000000000000000000000000000000..eecf5aedc0728635acb5ad961377f8fd31dcd773 GIT binary patch literal 178192 zcmb6B30O|u*8q$+XrAXx$(*@D5?RL-Whe<1qDdm8$&i!`$&`|W%yWq(No0pokwj^p z=XsWj`1ZY5p5Jr5-~WBT-@ofxmb3TTYu;cX4yH#UVa^j*f}$u-^NZN|;CU_gb4?&_78Ef@bTCdH?6h zlk}i0?DsCRcW|+{=gmL?#3zNr zn_R5z9Nke}KAvoTXyZ+@KM6$)W9@3c$-%|Z&UM;et*Kf%+EaNKQ+L|p%nnL!|39ZFLowb2 z)!Uv)MY)1nQ#En?W}Tl4~NT;czRPIQ=0WQ(Jd z{U%2{-v4=Y;x60WXY1_N+^?g<8{(6oFp+|YleHT!n!GW7-ar5PqrMW7#cQ!Libb4u zvB>j={tsGdo_B@z{^k0A(kk+J3GMr@Xb1Cj5!(M>(WJQ#-w%?0`JX&5p{r#gp z;gtp6-(Fq%j~LC7|B~L9M=!GXKh*yR?^BF+-+x6L!f5yZSG3o8v@!#Ef-BH7|5F$0 z7a>Z@`wQo-k+9{JK;wV*!(X59>eSyqx<-bVQ}pjKTVu~FlmGRa*IVgAX^(+Y`k!V0 z`2T;A^(mCDlVq9sA7THFF;Ag?<$X~qy_Oiz?Qb^m?BT7UbpQO(|62}Rl$y~0{Pn^A zT=UmaJX)R)a1H-#-kN{e^LH7uhbQZQS+|t8?jL&wR`KZnZ4;FO@%c{{ zVBYfV;++A1|Nf<)RVE#jvd0+x|FLcdZ=JNwF87^w)-L}_QKsMDyx_^BvIb1#Kfqff zYVBfgJy6d6Z3~a~KedOdZ&Yv{XZ^ay#KxCKedVfSQ&4d9+e{>>VApy3xe?XURYDqxSvtNA-!p3;w5N^goh3Tg_Y?nHlt&r_aBAL+JtCDtBuqM>lqM|J!yR6{R0I*8~C> zJ@3R`?QCu9xYL0*|9{X2x6yeSw*fo;hm8Nlo=ZHQihtM4=f&|q3;z#4+~%#5-eu$F z?rQ%p&;7rB_n*8Hc*I*H!Rsv++3$4y|F-`r9~3`y5VF2thIq#n=pFgfY|#`|GBA$V zsS0t;9i)p+#_Q+^GCG|{Ot)iKhG5LhH;43o<_X9N+0=c*GK;E z36u9;gdpEdcpb7)ANlLx_&iG-zwM=u@|y9wOakZEo=({8Z=+MPK?)eZ96lUh-h*^G zQ}o&V`c53*uny_U)S;|H&G>EFLdbBNRR!_i5FfaM6V&$r?DEmNdh zyaDlgLmZDd0$oYFq!&yXiCE_q;^|(9CoD%iI*Ks4j|vErdmsSutI0T?Hk%B{#qq_) zIDWGW$DbSGb%YnruQ$i*`UyDS=7`rNYHXg<+m6^g8JN6;K*a42f$5Pu6Ya}a!0Xoo zdc4*|e);amFLx&61Dz8QCnOMNFSUo^3Cj^r_d=}m3NiHulhc`ka{dJ1b(<8^gY&OL z94yWFa=}9opHM)#ZR)7UpLD$Ll0pkeIMdHN95<0gm~+9(??g} z6|gugHb=S#%MjmeLOv!#5ih%df`Q{z3pz{78_EbY= z(HnjEe)#w3@W}m~qPy+LGTY;va!;D<(_eDjpwA(nMt_`w)RexaDLpf8>nl#Wcxr}SKTu#u(#<5Q?@jdr||Kz)THIf3Px zB_`AFq8*DbA>Y{doQPB7@P#W*aH4)gFBqgXauQ=7yf$9r$Vozhu4#TAu2+d0u;#{yP8-m)aO*QPr4_2dT^>sTy@_iEMj&PoAEXgybIU_g)?%w)g-md5m`|+GoxLV!qrLvsTaN&rXM+v9!iG-@n ztLKzgjXl<0aE?>C?sUmRJ(5$MawK<8sS)bk$HyrpNg1}O&gBHQjDRq~W9Y{eeokbz zv}Ht~7bmP%Ty|e^9Vh5x{Nmd2`KZr5ccyRYiGGytG#C9m{~)f9afdd;HHWvSbf8}x zEGFLVdqnM|n%Mr5J0^;bcvGG;8N&a8iReJXThJ$jM4i`|7EihwG}!aNX(!oSdoM?GKMGW4zy9 z=Hvs26Lm4Bf9m!)%uX({^swKeog+{m$2|IDdgGLX;rR)o*9{WPOi-^6{;cmvAY zM6K>+Ek?iZFvB?iG38W3t*>?~E#_2GUrqZp7h(OX>-aJn!dZ6 z9^$&t`%wB)JSX6)5bjseT4<~fT#%+<3(db2btWuGM}4A#p;PE`!_Cc^7zb&8_$Agc znTr?W_)mqU=Dm{V_(gt6^1W1teue3G+OiKL-Nbv)Bd~eHQ=tS-PFB&tDq;~QpH(v= zzT`3b@BJ7SpAWxxaH8^OzwG>?%}HFEab&zs4{7pEQ0Cj9O41&j==JyN`KP`Oi~}3r zmkv2wf6O>(RRVPAKE;X;D=24VD@SCCrsF##WRGf3;SQ)LmJ zr%1EQn4j9&C8WyFL+E@vkg~Tb1_!yXq|DvPHGTPah&}4I>Zax^_+~OCc9QH2@_OR; zo`jsmkiU7TqvMc^X}uc$@~;;3t=u=e6dH#gURnM%mY}>}1h20WHm~POP%n32 z>#f2_Lw{gyvxXXJ71%#(!@AR?MQ!TsCvOr+{i&(rJ8$Td+AEtXXGk9)$@8};l}d|| z!i*#DjI>vf+?yt+8yX3DEU-_aVc}?qx-0B{Wc5e*6zphny-x@}2+k4ePEaK;`2+5x zZHgjarhJLBd~udMYN}e+`#~R&{~$nl4*=!60O}(L19E}MX;=YFuGRx!dPs~UOrC`U zF!|cX5UczAM8mT*_!{uhK6X?Pl$T@#h}x<_StOr`;2Krf**|tH2eOBo8WjU8K5J$?lEC}{9NF#Jp=Z^ zUwi-B|JNVI{2^iTD`!G^S^t8!^KL=Cfl5e2z&2>uQ=`^;tsW{rfAl+4*hv^JG9zsK zx->z$!vxpm62`~ao-lhdy9nC%hM>JK3A2CnEW-Tp(u&mHIkxDi`a04SI7$8Gq+z5g z(alG;;TS1>aBa!6cU7e2^<&9dD?CVRsgaG<=EJ1XIp9O^;1#59q{|I8$wj1Lu&)E; z9VgATdSP>eev0V(mysQ5n&+@Aq~n-fXOrtML%3vQ}@}PfFlxMaV?E@G^*Z?vom9XaPHqAJLc} zo(9$7DWBc<{{Ymp4$vM?K>Zg3vqvHc(7tiN{4uc^2J8pskEC%h;2%K03BiCp!0h!( z1GIAqFuR8)0s3PmFgx$~0JD4AYQpsYlS-I9>mv!X?-NIuy+Jz&v;SK=Vg8t&1k67p zbI5>wglE@+LD$KEon*id1nul61O6Z}V^TaytzMAEsu=UdL&W~6Zv*3i+K#J8d?%4syUucvb3ubm`cdW7|Z z$8t0ul^Ts}Zj*bDG)y+tN|VOjn}bxppCqkib1UzUTtVt3&BlzkJ3`9rr*AQm*$g?V zmg}zMtcH?q`My_UZjvu6=X=Jt5es#*L8?L%dLMny2gHYgjVG>y0Xo2S%K`bUgTMCtwePRJf9?P4k6NG5 z=l$;XP&(>Xo9s7B$XQtFc5dH1C|iA0{L=kOsO`CG`Yw?AnPEPH5JaK>PH7*?Z_Wp#5_I{c#*B-?|LSD$j)6VM{u^lor6py|so9_%xtotjx5!E%yR?mg1Hdr|lOqOYX6;>7l84L}-3zn%JR9+2`kgLd27?5+*t_etSs zK9yJeN8t9G)dnjaQ{cnd3(>LL*Fa_Lr_A;@6;PD0U(0k@9#j`584ObMC(noO^H=B& z1Eyz42QYgU>;=^SD=>R<{0Xyf@fgDVp*)!UwIBWAKnDCnnBOWiVZa_hdpiiTbL3%Q zb}w)M^v6~}J3j)md%p^x{(AuJxd>?AFktrT#{sjyz6a1Bwt)V5K?dw2XjeQLu#^1t z1GBU9EBWh>qHAN5$)|8&`2PC8pRJOCam4qYF`TG)73pepaJ*(X;XP5Me{4K)`YWWH z;fB|W_n_?S2dNtt7yrle!&Nx0r2>t*$0Zj3`Z@6Yko)KPAzL>$59d2~Gi)IL8L?y& zVu7Up*YiV^caI?7AB0_N4gKf&Asg@X#Bu!*|2#irbh}d7Is?h3f1V$*afi7$E|8Dw z9PIvie#qlzpdg6$>?(k=iPJvW&AkX26HAW;31~1Jm>h@WW8y?*~@4<11FW&)X=b$f$XSo5>W9col zZ`&ulo-T^(r%NEe?J9V^^cM2btwB7fpD=r=Jq)j0hIn-<;x)$+Q-3fy*2_?iAI&G8 z->no;kB?gre_M!rzpX_4@hr-<8iRWH9mH$vV@QA58}UK<+>^&|N+z$wCA5Ot*ie6u$r{uqe!mxT$li`v8HW1aCj@-xm)TZ-dRX-uBMfo$aWiJk{Me_SseSeK5N zXyH0b1H^`oOdp-s(zx!OG}2YlzJq6H$s^>1m4hHW%7AXqS6nGz$%2JS{AWuPo0`CP0B?`(ER4AxY z7))UZg`pINQ5a5v_W{@l3hER_QW!;HG=(t~#!?tZVLSy53KJ;s-u;+FVKN2YGs~$I zrcuzOphbcATF!I|GbqfYz3Ej3n(n4u!zE9 z3I-IGP*_UAkb)6~WfYcESV3VW1!D>(6jo6%rNDdN#f-ul3Tr8>qhL_r3eFUEQ*fc+ zO2LhSI|UC4dnoLs!23{fKLt+;2Pk+^I7q>pf)9m56nrThrf`J9Q3}T>_)$1c!Jonj z3MVO?qHvnR843Xu&Qb`Z5Jce|gZH zo22Fb_Ydm3+Yld`PTqQ3oA(Y{NWMfw$IdQUf!B(eq~SvShj`1Cz?<)rc+Tea)SYqu z@I=I+PZ;h=9?a-^3r_-%kKTMkU~;@(p&YMZygoD?7~gY85&LK2xWO*OD?Hiy*(I}) z-yjaL;s++DkFOB*uq?&(#zMGG|2U)TN%mlLy@dnvwg=$6Z7@{rR31HQO$OBK%9XqM zdO>yQ0sYZ_9+2tJS8(LqTX=O*&vAFdK2m;kN~rONhona8Q29(v8N%#XaSl4I_Q&LW z5k@^EJ`?Xylj+BGWsqO+FVb=JK>BelNs|B0(c|f)x1{-r``~2yIY_~g=4nyB(IhYI z&iYGA_Q2-P+$N0Qr@8Rv=#H=XQ^pe>&MkIhd=K%xX7Xn=-2hx4fOa z&zeq1|KU91ltY+j{-0P}XTOd?J@T6g#$y6u^4aB{BZb=as~(Ddgqm> z_elYkZ@r;Q2+xlZcHc2iY<*DvrHL$`T)caczw;u({Np9Z;x?mi9MW$OLH^TEqr6c1 zyo{HBA%2+88J>jY=Wt~f*E{Rtf#uPb6pX)i2j<)IFqS|0l2_2rri0PWuy0JiaI2N1 zyK|=2!YN8f|9BE%`@laE^XG{s%e#=fBQgGF0!*H7+CtQG|2E_sF9em-N$OJp8D^ie z_&374j?T9sY=4c_!t3-wsGsIo7DpH11hngUCi>+>J+OVXU?uK{$K%lNU60V8;DmnL zQG;>a_Xk*BuDOo>>v{-mU%U^(d^5Dec^fgNUv$0!^|Hxf{>k$(VR6Wv z-GO;iNuM|K=rZ@B-7cMk?W-|+SUxYNJNL^rh z@7<2}rVqk+8VyCclO|vsV!xw3O7d*q=lS^Ie%Nvx^Z(FU#?Lo(A@cj4g!YcCL%%Ni zfqCz$fblpp7xRenXLf4>QQ!(#NImRaf_Tjui5&F$v9_`ZpiSq9rL_PN&$9#S?8uizb zME;plxWC+skdK)(>ZP#({d(IM>DH7nzdaS;2&-ed%h7IE35>_kOpITeBFZ~(68%m0 z70XW>5#;xF8nF6aG6VVMNnsp6cwrt&??AoJ$zi?7xQhHwy+Hk3ikKfF#P*~76J;z< zBEF`f-kOGl)x{U1&>w4F;JNua68FWv*~sU}SKL>9uaFPbm&Gk|67DNw3A}z~f&6Y; zGr9MzhN0d)R+wMyDHzYF0F>90j&kZmQU6FktP=}1Gy8JC@5TD#F$ncIb{+F2Z6@Yl zP!RI@o=sT2jva#O#d}JOr%XKFq@P z8)J}f)P9V&&QL}dVcUuPq;Wr86@7ww4Ij_+j1afMIK+3b>*KS&qknHN!1^>e4CCN; z9^(*{hWWQx6y@5=vbcmDna%2{b9gr}eWgtp&h1vlI#a(C`D6~%@uLE`4|I*$ewoqr ziP6oRG!gyq#2on~Md5zj{RiV_{22Kb%i(;SHOA?ZAo^q34z~W`{4S(FqKf`M0=WN@ zW}$!dY?1!1AD(l@rwP+LX%6z8mxcO_nTK{3h+y1l-@*1z<$d(Wle37gNh2R6OO)%- zjDFG=V)o6PG>iFX*LOFphaOp&cdwjL?z|G_|FEN3m`_{7u}-#EA^mw7wvY5I>zRJ} zl3gsm54mWRpYj;!TABQ6IyjxW6Oy(XOrr^xunMjL(N?yv}%z z>nBCx{xR5z_Dvt?OSfLe^W}jr*4JZv=+BV@ak3~zJB&skpJ9TSAKTM_)rX)+q!%5I zc{cqq<#^*=e^3SC(`|@$8&0p8E8!+N)V zA?o!ZgW0`%WEAGT`bPBkyK9(_`p3{OO_9Lrbm33j--*H)Pl{Qeqv3-3*(4&~JfK(f z1D0ob2f9)I{t=9y%ZYHrK6hE&@8c^)J>#X=KI-|@i~fr)M7x4+;XXb+8L{7O^m|wh z(vdLqOHdKBe@4A5*4vHI!20JX7vSAbxCkHTL{qn1(Yh7F$!+^HnN!w+-{uZQ*DPA` zeTcEZ@!4CqJ@AylYt?noWt2J6q;ey^f5G>gQ(2-IXykgIQx5k!JN(pTsNbaV%}JsH zl4Ytr=Nh--`yZ#};K#M@dBuC{IQhLlXXm>mK<8Fr(LKL|@qLZ>+4z3RXBSSP#eS$l zWHP7t%UUB+Y!AMFvgsA4I9}z3VN)fi`0mk7kE{+(o?H6#mdFXP<;eBZ@*3n$Y! zF+?KbGbghz+5g(K0#4keH{wSJA1C65m8!wr!%kf(bdo*}% zG$#;bzTN)SK~BK*eedyvhaBHd@q^pf`*ZS_PMmT&9?i+GJ^k*bq6Wv8^S)oHx}Q_p ztUm7BtjV0*ttZhQ#*3j}dff5_A`hW?s=~*KHP_It@bypLAJ7lHP#Un+I76*xYZ zW%HFrbVBc>J|CU3bDTg!>jsDJ7x1&@#>cT|Z$kS^gGyDGIA}RIXH`+73^aB|bdP&# z0F|jrml;US;uJrOHhOs1j#Ig@cAG-VVCWIq5o?{5&J7ZFk4Ty8igA%q=9F)X*4D;* za;k-^hva8-(4H?YldJua;|pw(&7ai9@ei)PIag&YWWQI5^{(9kC0_H7Uwrg}Q@Sy& zv+6(_{GNAyPO6XqWXv{r8f+58sg5~c5hpGSU7Gzq`$CpNwy}iZkt>0mO0}YTW@{Cv zEP0SVc`4%fZ&=I3p3TF2nRboo^R%lH-%Dy&-~`m|&K*fl;uLg#>dI%kK#TF%RoAD= zLaAciwHq@pH}vzpC839bM4-dw=b?AE8iX z@bSdjc^f&2QC~W4%Xo1T5k)aEiQSkN$qFdHClTZEE+2k5cUY6(!q7JMVwBTyfZWql z-i}Y4z{wfz%76IBmXkahT(xcRSWczoOwfGiP)^*t;+O4(>70PD*ebp7AWlAXMo#ZM zXHH@G2O-(7=A3-V(uEVAKjQ{z|M}S6G>20?=IGTo=>(_ZoucCN%mwv$a~7)BWd!%H zyAFNRwYJB%ZpM6F;=w8AcSfIixQkQX{ZXved<3U#8t9bb^_-JbxFP93R+_#y*XGHM z66U123x11jhC-XOuuPDBGJLjf|2ad- z32K=>;gp_!l!$E9gm$q*o2Tx11MLs%hP&06U_4^nDV<5p-=aH>_I?o)S5b5fT*h;nQN3_KU9pP&y^ebo3S@PUxWaoR`0CaX--a zFQpZw9F(GFaf8%yY2_3eU zcXzw@;`Ib)v@>!Zr?S!FhRtdRPWWlZ4yX1CPFeH1mVJ0DG-=Jc92+_e%B!0rjKqQxZT^oa!Se|d}hK|NFS%OKyXqo zCvRxfU|#SV`lru%7VI9(NvIBS8+YY8C$RX^JCz46oU))`z_?TMI0>Nu;^m?Q4%4p1 ze;hNKlRNEn)5@)elM>UkoII(EQ%e6e?{OtyK0Fp1*oR6S-<~a2iXR0yRh6yt&&k(t zqW5Nos6G=y|J-xuMEcWD1Wpm+|~4=*B`$HE?%dP_zvEp0SBP z_0vOWSrF5+$^ROSYsu9u4Kq1~lXl_>Yr@da^R{pz?JxCYG{$p$KY9jD?NjCCR*hU~ z)%%W9Fbpv+GVSDqUyjV27c=#r_gs~%A8GYlwLsUn6odWC1W<27dXB2!>{V2WgbGbn zua??SPVS1nICp@LQ#G-fa`>DHC#?R&yV6=4{p5dvQ_Z?@cl@_YoN7R2>$XT4PCh7K zCH|x>)Zf!PIaqrO>Njf^C-J5^vYNh9DRVOIhxk4}NG{obsQcy?%(o$Pp(3W}((2K{ zP!jv<+W00dPDJwC)bbtzjSmiuK2-0-NsRZ}cp~6AltmoPSzOc0Nl!gDxIjq)>&0Ed z2{}#4Fp0Mqh}&~cU{C*qp%>jz&sCwEeEc~1^)kDmufiszuKfomE90tB)0EFiZnlca zon8v9EW&Z_w*DFpa+c%Mu z{qxzTZn_e@*POhnef>U8q(emfP}3%~N6ZmgF7*_T;md-Sxyk;SxwAQ$V~+~N^DQ|A zP5WU5#%rK+?5)1VZ8@AsL+-t(iOQTr{Oe_k6Ph^D;PCQYp4&LthkvFDKQ4w&zB6Y# zPdjlcEBt5s7}|0Q*?#)kp{qF+6XjGsr6}kbR`hhkwPsH0l)|^@Dap%vCQlyE)08uXPh99_3{28b8Qr6o-!T z)Yp@4z2u}G@w<|mGPGypOX&ZzIt3R4o+#i#;qOMvpLm` z0U7uGyEu`HV+z0ft=TjYJM#$1{a%L&KK(=UxZ$%)PW_~7d3f%mW%&E#aw zL)LH5vV!iM-|q{v&Y+%2e$X>+y8nbUK9swKe$S!DPbJpr4D`ESdG~&!Ehp;VY11*Z zkP}(H<@)Q!>ztU@=%Gc19vDZNSDZxn=6tz{X`JM>wsA4qXF0hE*{ff>cGGw@{JNlc z29huBTxV%s%1J!x`6wh>z{!dH9$NowD<^n({86KfU7XytjoZ&}>cKd@9m2^M{xX`D z5lr(*#?g1z5l&WR%$3mk19TlXRi$z+r?B>2JHK%l>bvGFC-5`$YH{=jPE|ieDPE?S z6X+ZEPH$`<$A5KE@0R&HIN>|8xl-XfI4O~sgU2s^}rTdKuoUDbJ;Z%`CPT|DeVU7DAL1F9Nb3T)kA-{9zpirG!Rv(JYE^?|t)9UPk zUT`Xn!_ws1-*c+peyqPdhJJ4$Mp%8e*?LYW^m&WVzOV3gZ0dtya2ProN1W@4cZHtY zH?CJ!{)V3_gNGidBAl??$`EgxC(w7Hd)C_?*@1d*3f&pBA)c+rDaeM|HfC?+q|OrV zqEQzV4og2YZL=;XHa=ypc4-Tzq<@NkYD@vAH1tw(dW9;dR4}15yNlZK^t>U=65td* zPx9)lNu+u1ZoXD78+vC>v{O4em{ZzhGh5KvkyG_ATrwg2CZ`l{J+8)mDf-oBEhn)h z#?c|Bh59G0_iSq+r+jyU;oPv1oXWDTd6PDO=2Win#a!Pj&GCn&4S)Nzol{9{ZxdL$ zo$jO9f_W>Sae}Z|+E1g6Qx4Yk5sHc9gog<${(7gziC6^XPV2aZayIxuclk`dLb(;3 ztYwAS+F?sMA-9|j^1(MSKfj40&H!i}lOQturx7=3;<|Y=CFtjEyzlMU`?5Uz(DjzZ zC9mr<<`Ml~Nt2p?+c5KqQ2tWpiP@fN$WHapQ!3HJb=xeFZ*egv6DE%v+V7k`C_9;xcqu;gSltIsYI#xGx4TL3t9A3f{j&7E zuKPAZ@iy|DY?hV4l$h`gI!`{ zid%f38lH>C%}BrR3Jq$DoaIyK=WtE8+hDl>DKt^tIHU6ju{rMBs*FJECtL zc`SF-LU6qj6fF~W3wgH(DmGfAj?rr2 zJh?OS<5lC4r1G2k&R;hqA$6`_xcwP-s7?F1%+$V|#-$eKmU@?< zc>k;Htf$N1OUC}eCd((ox19xL=c~-1VsY+^dxK{};gOHc&sN_dInifFzg}cP^30y! zoAuKZa>S*wEjK}E)03P!duNo7D3S&&x8FdZ;^yTr_f$c zH#ppUBsF%46fnP>r{7Odf07nKu0V6-XN&H851>il{RM@Xainp?zOSQ7CzA5-c7e7o z^mFw?6|uSWz1+%%EE}^*Rq}Sn`G788LTc`cXr=ER1{jao(6I9Q!Y6mbfbE~|VURsI zE2JdHg;aemb8z`n2sOGNJ|5{kO-i0jT{l10k1&0_vq<90k6Sxh=aHK8A`@m6y@rhO z5xRXb02SW3HeF*cLiNLy!$((rf=}mWO#2U4zxf+fY-`N@UiIrua*7rV7lah)`*c?p=ivuq%@~MD7H{> zU$!Wh)J1EQ{xZ8jlKkeB>sjj%^!G1PVe;aHSwBCd>n)vATaZjDbRwlYa>tQYi-l|M zg*||*R%2qf_77?J`EmHY_4n632g(;Fyd`he$wc~`;e(XO4}%_DFNYe_FGlk&6N2_HhO+qVxV4>{P;NW= zq>M!rusE00LmhWv#WT?)XwqCCUSWbKspwy@GAXH*e04DK zZLE$YnBO}f|DxL$r56(c?H>%~Qr^YSAL>Dao8ykUyOT+A^7>%@7L7gnEE&Sj*}fdQ$&Fxr+5`i61z30af$3fOlCb)@HxO7IKV%5Cce(FwL%Rs8 z-z(Ap^{pXzj!Xu2eqQ(ujq~IlP3?LO%n!+;gq<&I#h|L{lt=%8HzYeU;Oa>+`hJT- zwX9bmA#B`>es71>Nt)lbvaW~Y5&-wJ2JM6Hed?b-k)Xe8fbEl?_G8^HGauh~$(+2!^R#Ve%BsqA-)_cw_7sbaBo{(E5iRMwGT{r^DlT)j>P z&R@dnHhoWs#qsHLK=~2C`UZiEfOVn(*!eQ~E2&-Nuz&bMd#IRo-K#rh4B-BAB6!}t zg*v~ZJ*It$(3&3W>sIOpERXhx67;tz{k?*T-}E)yfSqf^ny~l}Qze+klL@Pv8|FjZ zV&%d2L+SfBES@(=&E+=J3wM42_JwMM`7ut0uzo~a8_+Mcz|J+*sifJeHDs7s7F3E( zu2-^lg1TdELlu_IA&94tfpZt0_>6b9I35OUf40?=LhrM2?&Z3G@(w{ojp+vcr`CY= zHXiWYmm=7A7!y{X=g~UL`i&_B`;8uw<>Av76qpI@d{#R{u-|x2(5?!|nsKLQ+S%=- zT3_s>^pj+Q`Tc}2c`?d_`SB+`XLiMSMpu^s%Wu(2!umMw7O?)ZXc=MWshI^~cG}R- z(^|ARsB-oc60oGq82+`-o zynSy$uwN`BtPd`^Lh7$CJ?gdUAi?}SK``F?NK4lSwU_hgK4JB_i`3lq4SoH?h_Jf< z`Xs^nR1A0?T_%`Es|l;m^mj6_&xj< z*bi}bk;6S81qS*AQf;ay8g}3hv`n72YeRb!$(%7WrG5H*!usP+R^;3Mao-L3eF>h2 z^u2#}ju#VFm#a(x=>d>#BN?cR!1`fJ6+pYR0rxfi{t2`D6JXwD0{S}{&`-Mo<5)%T zoXe))KU`Xt7?=($PtHaIs~hW-3A1ByGQmEllCXYsxDhESNfz-)y8*19@%9APXT~d$ z>H-bncF!8X{?LQqxnm7@|KI~GpUaK_-bd^u?A+@c0qi~_)&f|c-`hrb{bHye{au@b zyPWddbph-A8esPw-&}z8&Angg_X&A=cLMr10P2fvEar*5*@1GN>iw%)GR zA;_l~FuyYZ`@cB^&viRs_XXFB2-4H>SlFpx+1N^;u?se#?Ny+PTHG zGW!7g-U$TzxFdx1e|^G)^~XW7!0zkTSrP0nX9M=VVg%Rw6Fe`L5!N?V<^tA9HNyJa zQKt#3?;`Gmou^k*0qfvWzBWE-16SU~Lj`2Q}LP{edU9M-Aks`AIjp}h(v>)7P zx4z^G)SRivoH0KMlE|HV9`GKXMJRQj{W1+IR<-{=lo3l1e}TX9s)Z6qPx+|^k2Z>D zcju{)@|X9vTpzNWlt$h-yHI>4VS0#;AWW|kTHl#IA4sc-J-5^9whSx`QR}G`GqC(@QL#e4XjOmb*#w8^|kZUV#A>M z!2@wEqlHj)>!;fOpJ|Yz+jDdBJ4I-+@@}cyv<>QWN?-PDs-frH)t7VK6rt$)Ron8U z2ZXJEu0*mW{=}$G+(7Sd7yn3l-UY?GBF?l{d?yL=oO@h~Hz`*!e5ai4L7v=Qe}W5*}A3{%S4o+cywoOeOeRAwKQI~h5pV-G$7u1t!+e=QRU*0rG!KT0RedIZ9s-O2=Al5_t3szSPwm5t%AT zFb{@6)vUJ>Up!QZ>-TFmU&8VTi$~BjKO1p}E64mmg{*KtaSMD!J$y$F8 zi2_?vIc;#=iu3~7AMx^kK6y8%aFUFgFTp&^CY2pB%f|*>g^Kd{(-WQY0M{KLRa*r$ zj!YUtFdmTv^RtbVT}qtrqwfJ>da1M$j8h}2s4MrI+d{vW?FKAE3X#wDtRq zBvJa#W&`Pb$P+hH@ZC*+H)UOPUhO>z(z-{=@IXv46i&SF{o6DVFdkJ99Wu+sjGTvo z{X<$NJ#dq~N1sbP{j_o|Up~~3-MSA_x=FV2I_=(vet`Ofkf#3Pn@+tSp~m8Qs#anK z6gcesIYVBTg|-xq%yVYT42pI^0j+uaFC@iv@A_IH`@0X$qCTt8E#Wd8cxiP8Fkf& z)HcqLuTHuF-u)ytqJjP{hGmbbyvtNz{`QO@_1#voZ$cAEqsC|5j9gh#ZC>OR{w$az zZ>$wdR-FmSBg9ACJa!EVw%nRI>0k>a4!V0zQp}C8ef;hTFnuN`lZMlsuQsW?Buyte z=dO{DBDfEA!SC$d%USow(SGQQ=-S-J&@3*kVJ$}Bf$f9yAGX>NtS4LP{i^*2qw}34 z>4n!zjq2y*ufMA#g6?-cwIjPfM!FJ+D2Kh1)${S*5Z zkiR=&@#J4g?|Z$EIIY=DN`EX|AYoMo4d)kqDSD(%Fz*aWY*CV6|ENiT@iU>%RjB?U zr(qzkNOMAuNR-zYV1CW1BrMN=9|X)}E5h>OjXA-(v=;2&b?h0c=}lPvmz0yj&t`w> zBI*0I-cQY}gSNoiGe083(tHTkWBNTCUVN`KlC&558+XV#0Ot2jNbK&ARf!)9&%Q3} zyLT*{G=*-d+%cR!PvYe_eV_WKvd$0h1N(s4uPG08d^1)4m`B4$(^M^~FCh>YcXoEI zEFT%zSHSYGGa9g7=@J$<2YNrq;_L@sjXJh`u6KpjMH=)@V1lOx&tM@-K;ph179hBs~kSSOpw!;FG;RwI_v_k(!*xe|&OUr&CKcLbQf zWmACdlUOCf{FZ48IUZ|F)5o_HmT!_Lfvq34gz7{4im^om^L993JZHc_{?PN?V1nTX zLty?0;3w>ybUjY;wkn>C=^6=Dy?dvm4S!1SV|n{_HAz=@k;x5Gf(G-T*Y`4lA-_x{ zzx!1vv?l%fcyHTu!p>c*Dq#Dl#u45o4(&_pizlpq$=C#Jzh5+kI`L(*=4|kz&#jve z`si;UEm;=@wvQA6womiqfz|)v0)(Bb!WMw%Z#rRhTjwBsu1Mo(BtV$_gx&P{4(%IW zk0h*4ct-=CFE2^aoo%jeFU25L>id$xT~7$^zXdcO>3O%Jj^6Kk=**g@4hKi5WiEg8 zgJ2$&5$5k)`aJ)~`dpLDiGb&t17Y#sdWx_--ggUF-IVEqx68FYyjTzh7|%4Q(e-{^ zC^MI^zObs9pdTFw_9IIP^UKd;fah8vBwn_-GEtMx?^c>B;=Y5tlM3z-suBl=D@T&Q z=Wb)~7pufNJz)Fukt9?<3%G5m-A7O!A@#27v+gD>Bdp%`ssTIqZ+imv9U6f3IT2XD zkv0^t-&jv-Q+GC;OY9|hK5qf+H&h7P6$C9NRZVI`bs^(W!4{`u@__kW229?&Bw&8j z_lKtz8iJ~#Ckf`a9$|glhld3}(a+6zeeigAba3&O1$sHa^7m{7 zVex+KMxK?V-&YwNM_7H{wTXOFHxAw9N#Da|b^jH?`lJo)JQ{qCusrId_cg3Oj{xj5 zHWTKbA$#g>kzEVA_VD+2-58(19g#LKdcYTt^+dx z_w{o?yE6#OJ1sYY{=Q0>pXM|ZjN>q1=UmqdNaI@jCSDv2EKlB;(fbo#-Jty=v*RJK zKIg^|z-)pTU$lP-Y-=px1LQN7u>AJ3C)od;0CuilC?f2>pwgY5 z_msZa9xzVh2)m!t2nH3=dS^bE@J~QIkn$WPHhQZ*LTu>y>?W)q9bCdf$>xgoKK&L z>vu^qexqZCB2L=J&9`SNiGM+wvK$X z4i^n1c>)I~IjH{vHoy5dVf+m9;LV<03r;@nAv}D-){*h;m@LKQYqX01uHTJ%E%-&h zzc43au>Wdc`YE0xtuwz(U+hnR4~i#;evg_rFEjlm>T4N>e11CP`f151FEAcn?>V0u zBb`B-G~7gr+#ch4J|R-_w&|QnXDzUC7y2FwkFSR%@-sgO%&xd7#F2@J6AU5a^yP_; z>HWa`utyLE^g=#vam>&6vyUTvN($m+dcViZ2dN^AtH}%0!+aZI@!0p7FnMopVZ5ZW z+5E7^To%vKZS?yUyttU3M?Q)xq3+`DX~TYfL;0ua^BG>eukS@YycCd+96$PFb3NMc zIiC6B>7W7m_9qE1Kf)Wwp`2R!yBRz`Z6>Jqjc}H4V+<}3o*$zx(%*C9?W?>}lwTLi z^68~?6!PEvjPU%kP0p9aP2;Br(ia~^{=c-Dya>6W!18a}ZOrF`HH4R+_uu5OxGMX; z1(runJ23v^mt($-(P#N{GT;^Z`MV0*skn>j_ar!+v_7ru9r`!}=?^v%^vgEPpQAdw zyo(U|V!-0RO1zXOFTAts4C+}UiTlLjEL6zzt69TcW?$Tuy@Xw>DH7iP8r>F-*9wxT z-`G(Y#}FN~YdQViFx_V_>G!FbzmijNKOC&ZxI5RPKi#IH->lp*u7WdwmzS%go}vGK z%*DJkh{k;Lq0c9I^P~5pe#84ANBWI=;Wc@t*O*b0&_9C+i^JC3c+4A%n}pGQJBW7A zA4Ax_8a4v+A#)W$e-oxxjE+3g8Qn*0w+8*t--i4Qy^!C-6~O!*6^Z(`E8_SyWAwv# zAG|IqM84ALm`^)L1KWQ&^!NREbwnZ$`L4;uyt|soi&J?2^4-An_RT?i6(lj9t|2J* z@luQfe+1eitB?DBP&e)eznhrykd4Z9A|c~GP=jp zCqns<4(9#$xr|S=+Z;A;(wT{V3)+izjbDxO?;Surx1Pm(K3t6Y@7RI-zYWIyOuzTW z@h1-U)Z}6Eg`&~J-+m_z{(|C;hVUF>${DktHHV^nagJAx7AiuY- zfz_oopOJ4-7{+l)JRO3@!q6|`f`yiF*?a)^m4^%Rq587=vY?mktF{^ruZ>%gUsM^B zJJ??h_3rDy{LqPQbw9j(}t3To0>Zph9UCbByy-HSh z?nWV>p0kA2>+gJ+|Juh<9)B$AyLBL+pA5!z9{K3^eVr_iIwsA;dY3JZ@y#27e#+!n z{ydPFf%WC|PtN{a80^H)0${ zm17+4nqmHV?MJzdVT`ZTV^da7U(CG(tY0{niP$p}>rBgTAgb&XZep>z3bY zo0(4I3%Y-P{C?5+F~2kBdM5oxK6jGqniS>CJcK6I<;RxkYxd*aJZ9al<}~#-i)``> zmow|c^L#xY)x321&E4*3`_-;wdj40tikSU3ex8}1o3D)ha&w+b?X}L<*E?lQy<-mD zHvLKR)~rwMA!gmV?$G!=;aj9k0koGGj>*b)jkhXf&nQ z{Z@Ug-+1w<@voArZGHXQVTtL_;CQz2o4saSos*gQ`^J0A^z~&nolY99swp4omDxYX z)0y#o^U?mZGTJqLe%V=w_r1ySHN`YDpP5&i@!v^f#?f__t|!IEk!JkgFDN~4%Sssg zxO}fg&d=W}>iTw!I;!hyb!CgmPf^75Bl-o?zoZY$@v80u)6T6Rb3Mg%f*w~n=XxOR zc(YE`d0wZlkB6JIpR;~tN16TFu#G7{^ro?^j(>~2-zj5X`iYTcFK9nnbtjtMXDcp6 z*5~WiDT|qQtMa}RIWLxc=!_@3S+~(X#*aL!n)HBLCjb6PlkTz4)O+iNUjN8vS2^{M zHtXPZ(#+f3$J%an%+1@4vY*Ym4_IyFhFD79zeJ8{)@vB= zK@t0aFQ&fK&bUV&Hv5440%M=Zw3GI_GWT;g&GF8^r|H+RCwkmFH%&Kwziz$hPXq2N z<$Cr|6qDcov6AC-QZL@eE%zH^51I88Dd(egV@>~3CNk2end#S%l-keQ#A&75e;uKm zu5WjuDn`bxZ;t!x5}JP2nW6VlssSy`{0*sM=6l&5Tkjjb@2F+U7iQk$a(8Hq?Q-P!H-u184}1pfNOorqB$ULknmL{?H0qLmOxd?Vvq$fR4}!Izt!e z3f-VP^njkw3wlEz=nMUzKMa6@FbD?25Eu%>U^t9`kuVBI!x$I~<6t~YfQc{(CPM&B zfvGSJro#-F3A11}%z?Qu59Y%HSO|fz2o}QI+h9BFfSs@lcEcXn3;V!^{cr#d!XY>eN8l(NgX3@lPQocT4QC(-&cZo34;SDf zT!LV@3|HVPT!Rp}4maQ?+=AP12kyc>xDOBDAv}UmcnnV<44%R>cn&Y%CA@;y@CM$( zJ9rNt;3Is3&+rAl!Z-L1Kj5d?`YsnTJVbzq5D7dWGDLx>5DlV342TJ_AU1eH9Pk2f zhzs!`J|uvIkO&e(68I02LNZ7WK9B-ZLMliNX&^16gY=LAGD0TE3|Sy6WP|LG19Czx z$PIZQFXV&#Pyh--At($*peQJ?pcoW~5>OI+p%j#cGEf%EL3yYE6`>MThAQ9(RiPSG zhZ;~5YC&zN19hPu)Q1Mp5E?;aXaY^488n9$&=UNi6|{yn&=%T3d*}cip%Zk5F3=Ua zL3ii@J)sx$hCa|2`aypf00UtV42B^v6o$cY7y%<;6pV&3Fc!wac$feaVG>M+0GI+( zVH!+_888!O!EBfVb73CLhXt?@0$~v>h9$5RmcepZ0V`n@tcEqP7S_Rf*Z>=06KsYp zuobq!cGv+sVHfO%J+K${feriN033uva2SrjQ8)(2;RKw7Q*av2KoFdTb8sFmz(u$O z!EhO_z*V>gA#fdTz)iRXw}JbGg!4i-75}cTeax3;f1Eq&LRjFNNj(zjwEgOt$HbvSY@{#Gd-Qy8T`B!6mBh%($BjPAB-37ihpor^u$_?));hABBYCfxo{tu$ z-mZg;>|&dCdz3Q$2<>LleV3bdy7KvmwA*!?U2QkI664Og+8PDbmoi)r{nG)R%ix zJv*OwvU*eF#Nqgdvm1HhmC;ujX!_&R$F$d?n{K~ug8ZgG16rH@dFC|z9lYAem-~#q za0O$x^s4FilTcGnVN2J0d`TptLgO?k9XJ1oKg$G7f&gJAg^>zqv^uGCxERw>M zYZ_+k&b2i%XrYnE)0+GePXE%CGUakcH2Oy4^*q#CnN`ori5Q3VJe4m!SkGIX%PIA| z$U1BP$YkcVPSf~iKCcDna>p)D(Dl@QI?~uJ&a2DeSGMuXnYQuMm?ph%wyo#2Z8zKT zQEF`Z zru^1orhNZ?rvA{ny1fVSlbZC~h$eps&o5;^wcl&{8(hoCb8U^jX_)DEky1wbmecJV z?>yJ^>q~dtuR6Zhbia=E7^C}Du4hc$uiAP2m0rKq{z|WB$1_T=_p#AR*872LwMpMS zX7c;pHth%TyAA0V{-@VFXHp~6#WDI4jf~xKo}lELNd$qqw_fKKQc{jw2+qKc8 zX;+S0$2%|5{jQxiz>K5EGd-?jJp#>q`hGLx_%gt>bA|7>NPoJgHvOnv$jAy0jJ~t4 zu?yz;ne?YW?~9k?>(+`UJ*24_=bH~^-0=&VbZATUsN$hrd8}Y{dCZVh0Uoo|;RBt@ zB`mvKrzfAAZ}PM7-cqqkbI-`68%_SMRK`9=6O%qUSG}todv4ZaS&h8O`;sJm*ETZv zzLET{P1lztpDpc9J~zVF?Upa0(*Yi1O}-Uu^fCGvyPY45KIthV)7&%qlYHMz`d@m9 z(Kp#?WW8fXpKXP%CsDM@O1JmY*YtZdp9@L3l*N?pPvzrA7LKXQO@3M4)KfT`k(EPC zeJR6Czs5wio$(sUxO6)BzL9sG^v!~%o%#W${tm;8Y+J|Zt54GXN#r-p)L(zM>95}y zlde9=wBOb;vcqIkuJjaBE?P~K&bH3fTW^VxP5Hh?x>c(^i;q*!n_cbe-K&7D(}PE9 zee8bsjJ|4Hqql1t{lRZWPd&Qah%rV^4AS|YgNJJS*!}jHa>WkTaqZ_lxIgPdciZVD&+~aqMd9Lw6f=G z%WFx_;Gk-AEw6U2l3(Bau;TRmoFeHYUn`C}HmQ8}y5@KH2R@tM^;cbCevhAPuz7#Q zv5V&S_8-TX_Z!srwi1QU_-RWj7@n5dq&Ut?Qd1!g{v0bdT{#|%4nf#y6ru^T)Lsl`1gxG(*4ot>WAikEKY;lTGht@x#;w13m4h4X%(N>+kZKQ?!tFw9EO zRE?t#myrX0JoVU#4uR zzONiUqi~1#>f3R<+MBTP=Dj%CD_X8Bd2|2VH`xle=c0GrpQTMdgMV5cF)lXrES1Lc z$d|q3;vUZX4{o%xBKUu=v82cWD?&is!ueWvvLbgF-Zs$xkQHTIjC-+b$G4))d2xJ9 z;~`e$eBEMH>@m~wNc3#l?csMUk7&`>H}Aj8in(alvK+HlS}_Bz+S{g;vSM`iD_drH zLMyuGMRZrfn6TYk^u$ZHB zrWN^WL%UnI;#TCW&kqNsIA?iAKcA;gAO0`vzF1RwzPe_`sc@mvy2rs*oa%>)J!`wc z@?13h{+ah*EKl!)>8F2xXL+B?Uu4LLAC`BqZbQmXz`rNvOPwWAX3J|v`UGv}uClz- zJiQP&Evw~yHAD9O(WV40oL_u@ zEZ@uO)sZT>YvzipUOc!lz&qCpb@NG|QoCbzP*>-@={(AAsP0}rygL2?t^>bc`cOCO zJoRHpEMBWjJFJ8ih7R4-BZ0bm zsQcOzf%(s<@?rG@1vf@n3YoorMXGbFt%O%1 zCdqJ+_c)cY8ofE$-bye%g6mR^x>kaDZ{Nqc*33$ndHaSk=g(LPZ@rJ@UyApn&G4_2 zvA`6wPh35$_;oWLFLEk_6~E-S>hJQ@Hu{JMtpsbPo$h*mjg>GY=9i0|BUuU4cN;f4 zT{kOX|142&R9UNnr__!0Jih1DE@71ePK9AN9$gSSS zj^8Ll4IlO9Wz~ROWjCu=TS_dg)q1vi^Rt2}XQZP;5=y&n0Y!uLAQ)#pKx zzSR19Mt!*u`f|zKeCkt-A$j|DU1PaCA`fcyqpR7ksdlLE3I5yb<2ylpJJ|H)=g=%h zpCp0h%HF)?w+!>tmjl-uFTHa{eft(xaqz(!>id7|68Ai}Q+=tnBk=o|`|4A|m0MFL za;cA>9zM<-?W#E*9p-(JOHaHw9q`QZnlz{Hkgt7={?K46PPK+%)5pcI;uOu^v%>pR zmUoI3>+Xi~KEWPMGUtgs+=|z&MS`&h^I7puB%ONe5q^?jSlX$xmRQ~wizm&Tv#=G< zYo2$w?LYYcxFMaBXZ>cyuQoq(%`{i7`1K|xpR%I06>p{Af$ljbS@9B2@F-evi9ViR zkHCBUjyElF?Bsnb;mt|g<1{N_^ewxqvx$rL%)MX^znk#rlxyV^%j3-K9evt0vm*D5 zzS+l4V0n07y*w^sZ!5ys>W6kWykUh;pCUBv(M4AH^8t6y#5!z6>2M-f`Bv-Ac`H(0 zD|(S{Da$P?W<~d2P-uG(=lye0a#>MkrA>5eRtL)!q3HR&CGuD6IxglQ?m?x%WcMHpTzVyk2)tnmIj$A8J;V|lipRp3E?m*siTZ+pJQewL@_jxx_{ zEw^IMd^RxZP5wV}o}G)j`$n*0+|D>J$%I?#Q?(pkD__4h=c^r;)SE4*a*KE9z z^I#>*>t^ax`)8!F;&=qluKjGL<+(G;x5~Ndn{_={*7DByy7kE_i_CGT)^k0-*8;Me z_V&*;=Ygl^%<;FzV|A&<7S(Ol6m>1<03XkY)y#P}kDryWM8V@HJxB4rxX3Lpo=#@U zudl3LEzDA8OIP0Cc%{+)>$UyW=j?Mw^a;=VB>jVaEPTAp@-E!rY08T!d2dpo(U~WB zS@9Z=*)_FLuoZ7zt=jY6gm=!*NiBXkcI`ulM$Y;7yOl7<-qM}N&gK0}uU7h>+iS&- z8Q3da>QYwt2J0HFSby9K|GD&>JxLbu9;b&t#&+v(MJ_h5;QTNjGoG{Y%zlZ)^_;v9 zXt6TKix}O^yd2$Z&dZ-Ss8@CKm8jZnhI&yd>hxMEI;q>~eB$S-zgdNptC?p8?*Y~2 z+j?7`%g6Tluz8YmzMX5uXj>}r$8^iAm>#!lMVmQ-_e1Tw@$Abo^=0jmh~eKnGVNzw zZG|tmx&FuPybozXVxI@)R;jNKKK9FS%lSXbyq8S>-|^cW&H6^`VCpa1$;c{=o&6cZ z%v0lNM%Fv2*Ck|nOS2BU_nCdOcDCtnmvm;l;bt1?QNrlQ4>R_gTA2RDt8HZ3(Wadv z{Y*bxr+5!Q$e1Pt&)Lt*=e$!W;Gz9uK;yu5p%v_BE$${xI_#nSB-E$zzzU)EieBY& zpMm{4sv^sVri*}>{a_2xzH zRUV-h=e&_1_lr38!3M5~`_{I$_oq$RAa7t6<+Q7u`ju(#Jnx;=?JK80O1C>Mz0&ok z;dhGCUgh~pw}0PP>HfTYtFCP$#**!>!;2HyiYtWei{|hux7xd?y=RK zNJAH-oIFH5%9nd+M&6?oX6>5YOYwf4u8-?Ioq9q&PG89DQE-5I@HBAD&lUY`V>jM5 z={dH^Utt^j`?fA`rLx`QbH;1C$8W}&UOD6D|N5mbp6qf%_3*y;q}1M9)sw9Iucbd9 zO@+oU<~O60#d}p;p8jXA+9rPk|L0Kb_wYMi^ykvr#*Y6#tjkrj_4pD7*?PPsv)Fq4 zd%bNv&VbCe8TS(V$(YR_n_MVlhwb-&A5y=S9eVzAg{DgSq^YrCh-0_~Salay|$jQAN9zZzU+<@_PyzG!#6IK%)S#o zb-=}+h3yBEOPyWSBR`)nU+Y>a))@Qt=XWE=@wxNWhsn=Zi(AVMiTY(s^rs2zJKwJj zKGru2zdxATy3Y$A`}U@-E55BBXwR@Sq(P;u^-`$|3+|{>vs$R5PdPl+6P}s-9i+7H;gP*N@lxt*7`%1T$7vl6vZ=e0Tp;Y*RL)EpCuM2;= zzDr%0dZI?FuV2*V5qD!;E|5*#p7JvOl2%B<@iW!AZz z((9P@n=|}asFQO1N*TY*rnH})=lhW2hvA

}F2uZ%xbQjUKp<1f4kO#4{_{{Or99pC2_zxwe>X+QJb zr;OkEDC37Cl=h$JO_lKvz6U1$QhkWh{?o@++MgPQDeYgYGIIUP`Udt;X8qPG_j`|XQD_K|AmT3D@L*-NJ9t`~Z#m%X7~(aPsjG`3G}Nk4JW z+!c0k&m)(%zwO8SAo`uQ7By7bZ|3b*+K)=SQf@zS{6=X%8nInzzuJ*e-MJR$N$YJ1 z)We9kQbrBis~*?*(I>*q_v-QZh^smu@2MU%$=~_>)Eeq;#8cG^9J{STHwO1g^tdzc z5y+6V`P~8P*_m_aPjuR-p09c{rP@#4TN5eQvdMAlsmJrizR5iNh3)vK?f9Q<{4vDV z{^>o`cKp@W{`)N&@8yzutJzLFwyB5TYwLR3*rpx6ziisIb^o_#vUUHC^Im@4->0_e z|2tcc=TitJNwes`!r8=rRV)bA*I)$)nwj7#XJWF zC_V3QqAI-(vpy=lE=i8qkIT)voVFF;@2d8!%Z1Po`(^j+-x3VrcawW=1gEGKWQSIc z^et@WZPl`4r?-iQbg_eHe%$`J)j|7Kx23IWgkG{wSDAnDQ;KR7Pr?dR8XMs5{a|9)WMqnFg( z`LU{1-Y|^!K8f8B+oXAZuk-KQ#y+8K%0IH*<8_bAJx=$y%{)X@w`+u^ja)Z_y0JN| z;Or&w)x-56QJ#M4!2gHtvNP?ebsS%k1!NE0Zfm=G$2m`uUet*HSFQ7HW53Nd<*(R! zy!=>DkE=yf+l+IVt;fB-zOCn>KwdlS-SR_rxVd~!>hd#>z7_44l^ZtOaCWF2x~1>= zx9@t{H!H?^S^Y+M`*hYRGq!%}&HqiXW@bpSSOv8mQ|hhXBXz&dk(}Y9&Q}N8gz!f9 z;>wiot91FdpOvYf|C4I!SGvCIU6roy{SaH%Q?0G7>rJ)ZcIvU+_0F!m>TBCiW7L-H zcSF)G?_qBp+v&@jWb^Ex-dBn}KAzq_aw6%&`TYvn&+DI@v*ybVJIt@c%>U|EwI4sO z+OS}%MfTIUgSJ-K!T0Th>&C7b{(*gHOR~cq?MrGxsvBWN-j7m;Z)IpRJ{<4q_BeU2 z?4i{5p5*m*o_R3McDL))uXKAS`YLz(PJfhX_pH+OM@Xx5dpmh=l5YRCGX3FwC7FtE z9MmzH{cQ1tG#PsGJC!!AYxPXzX+L_qqvp-DiS5VBYZWZDmCsj;2fj_w@2h>gf4h%o zN58X!W7RG`I__0FcyWViGcK&L@8t7+Iq7;MW$cJi z9={%EXan0Bx4Iv=W_TCBac?_*Dhs zGxBq8vWDabtSWbSg+WGA2EhE*LuxJO%hFw!v0?Jg`dZ@U%8gJi(T;nF&ccUe4_x5m9`_9S! zUjw@H|3A84o1W3-vV$g7$~EQMYx`{1GrMYbu4i9)G^D}E9_8%YMdppn+$g^tVqM#x z?Mi@sYhR)p)w{;F?@tKH9lpd@`%(O>mjZ+K+xM^LKRR(xRkb`_jwv})_qGowOLHc5 z*c1EoxJDHUp5Xi9T}SU8Kk1Cx@c7)bV>>#j{SVJQtJUs+{n-E3u_C;m@+nWc>P>HI zKOeX`0jp zdDfm#Crfwj7tsHe3cg(N*plR9)$Ju$&*ZJa`yISeS6!cXvAWwc)u%%#3aSU;V@B*5 zSX|xh(RuHuk_}ZzxI|kHv@5JGeR+OlLyv0qzViLbwJJB;zOZwBk4526*&%0}HaSvt zqa89l@$3g75$vEnQ`_&n5ZT`IJ=LS5pRy~xu05(Nv(8rysWPB%E`_17!w)UfES^1s6_A6WaWy7qt_S3PRw(-Mtw(-jV+woId`|YfL zw(*C`wtKzZ>#zMGAgglxLur3`R$6I4yE;-CzdNXmU-ADM#m~<2`!n&ok~@^~!yC%@ z&t0Ycqawe96o1(@NE!c`qqIN0&!~)l9aVaLpEXft{oW{NJ(aV*%B=T5W!68LGJexl z89y4wb6oM0&Pw}@5ATuEeiWj#U-^7c#xHv-)HCw*7ZK! zV(WG)^i}S5P5;X()4x8-^tZ3l{r|!Bzl>+cNu|d(>4|Oj;X2#dm$u$_)!*38KD3>E zX`6kzRGE2CqRcvQou%h_voiBOU72-gugtn+Rl7GXnqKwrZFS=Lt)|loCAPN}xUeYB zYmN)itCpW~?v)+1GT=b_t8eVbqnoYu9a7wW6s6galanIYPwp@ORKTOK{o?3_>Q#M) z+0S2{PrYwkLi>Ebn~P%)d7x(RPxGcz<5%j!@MhP7z9(0Q#;xqsocD4af4^&PuEO<| zt~bP2>2}VKSEk+Dw(dva?zX#Mrv8=6v}Y^RenMsXV=2>a4Ey=R63w$7>t;XBoZ#w^ z@SJB)Pb$56=urF7`HSNxaR2!{cJbVEvhule-)a5IZz*QqDVjF8=+7tispl618yuc& zpZReo*3B&+?YnW)N4&6>|Fa-=o1ZF^PONnPX5I&=?RzLwzOZtS*BO`X9;Y*IThBui z5A|eWlBIqb_?^n5SBaK1Kcg@k{a{J>Y>DQ@w66_qeme5TF#AQ|`KDDvquI}%`5g=hs$t)~lk8jN3OCg1TA#m# zo7PL+dog5Uod!G)l=8tPl_?)+oBD0rsbA^(HgTNL_3^$;T~C10^_~f}oqB9{y?4J( z&zIuI26e4?o$DX`ZmA1P+P}}$R1llHr1WNI&?&o|zr6LH-k{eHZ{ z$S1xge{CLR*!$Hq zjOuIjZ7Z4f`f+_Ar8t=RZM!FhpFd3r`$|uJZoy0ddA!_`SDAtEB=qR1cck5uFt*mzIU&E z>VD1gPbycurXKq5{m=JuW0S5?%jA!%WFPsyt-h}{z~26?M2u>;JE;weqwQYxeSp&X z=;4&sw{BpIes%j#TCQ1mSnI2%Z=vlA%9rrd3RS zqQb_0c6=iPT}GdNnVyHLX*TG2S+nq>o+sZmr}Vs4uaaHQ%PQ7c{G;U5ty*90#&Tn4 z4bbIQxAoQaRBJik%xAPWrrh@dw)V^3d2QpTolSaNMqB%7jrz9ZrzTx-vTgjKlC9^1 z_15EWcGAd3;f>y_ps@=mWy<9UHuY?F>dlhhw3GIWZg=wX5+B$zGucgIhq;Y$?B$^(dkXQF-n?#)Ei-B{l-S`dD*f1X4>I7pNx0%flMZ?xKEIM znR~wJ=U%?oEa}2EP5VoC>G`YLF1+bq{#~Yh-g_YaTfG_IUz2@WsE{fD#Hp{@E>r&o z{_lVs2Xb68>D^sT{r=p~i(h4T=COSa)9=mBH2ej7hG{pncGw9D_m&AtmZa#J;<&soRVwGVX0u}|s#JUL|2{()wk zyIY!Z=Xhz-8$PKAV4RpO^H1V16Tq6;#I~MBX($e6V`b{%pPyi_6(MZEex|Y1TTUA6Luh`?ogw z;k=hc>?gl6a`jguH_p-d(=Gm=2lY;~MwxQqpV{vEwH(*W*!3@M^Z`+f{cwI~D)nxh zW8~_u;rI~QDn}LM^Bbb)YWPgZj__ z8bTvz3{9XZG=t{Q0$PGUw1U>q2HHY9Xb&BrBXok!&;`0eH|P#MpeOW#-p~j7LO*U@pvq z`LF;MLLe-H#jpgH!ZKJ6D_|w8g4M7F*1|ei4;x@3Y=X_O1-8OA*bX~jC+vdVum|?S zKCodw9Dsvx2oA#$I10z$IGli!a0*Vt83=;2a1PGH1-J;8AQ&#g6}Sr5AOx<%4Y&!n z;5OWWyKoQg!vlB-k02Bt!xIRDr|=A(!wYx`ui!Pjfw%Au-opp@2%q3He1Wg<4Zgz< z_-VHO|JQ$i`N#1j6Z4W8B;i+U8o23p*Bcp0u7)c zG=jz;p(QkhX3!j3fP^+6X@6)1GDoeQczdMur!BMtnY#|q5wd{jI)Sv+1+s$Vbp@H< z&d?oXKD&X`n+?QP`q%?{f~$4kYcWB5Iy1zAc8~?cCKE`W=wd@h$O@7t_ELw$#a{9}p$mwO z$T%QvNIhbg8^rJ8f%st#2=9>H(McU$At!i2PsjygC-#!p2c$01iJjPZgVZ59u@{@T zAnl32CWL{I7a~9b7y|j6bRMMS$A|omPHe>{0dxbg6)EHD4pN8ICF7Acqz*|-dC3<) zEd&vvC`^K+P!J?95y&2qJs~!ULH0!vCoN?qKM9NnsYC3q9A-dqkUR^dti;7$@_b-6h>ge;AazSUVwV!;gD-eM8R!L~ zO9kVg6i8l4M~bcJWsI_KWdHOADJynjFZ$FVdRb?w(_LQd`-0@lo+}5Dp(4n9%3doE zqLcmTwh?_A7zPzU@)JTokg}4Na_)T5#|3Fi>XCdOmlY zH9^WP1Suh+fJk zf>9uKh)&9gjp)QrrCup7c~Y(+M29BO1H7OEG;m~WWG83@Vz&qyJ8>y1c{0~hUh>6G zY-FtSK>SwfmHH&VF^Iky#DJDC7gE3+@Q3Cgc}+odvQ}-N1&ChCid{^Q@}iS6Vv_)d zg4l}82vWb)})`25~_2(w4L%mgV9(0G!j*NrM22z*UbpUB6CrBC5bp)9M8K2nahkPL8mwKem0w8+H zmo|GrPmsE#@8Tcg3!?XiTp(>ozW9gOiWHlYPzJ=Vk0T{r8d5`Fhq%bxAa#jtZxDNt zVpkR_g4hj!{!UzU6(AvqUivR{BXx;wKM-4yVv`4Cd}1f*VK4~dLtc=4(WL>|Z*@TO z2Eq_0F1p$v{wRJa$o%C4sYC1sgV>7{JMnGtFR>d0QlGRheJ>Bv-UvtpvX`Z8(T#=S zj!cX!423}K3PMr{fKebi$&++4s1H(?=tsgBkUGkN%x7wt;E)7a1f+hk84c2|^g;S3 zWkfH1oDAbZ%9H|MNCl$*4~l}+CHdn(+LijGJy~}tBYDYT3QPhiR~@Q>j7i2O^@x5V zh@G@i1H?}B(g&$u#xCQNauz6%@k{;1zyisWcBHOpAo*ff0#d+SnCi%kNLk{^2KQN|&C7MDx6J4zlhN_MvAk<-Iua%)%veB4IIXf|cl+A(wyzSue)t%88WqmN}3hd+Zskg$-~H zBn*OkZ~{KVS&%Rqe!yYs@_^W)B&;GH6}+G*>4#ctp7p#WejDDC7unH^kBd*T&MqHt zf%I!XQs(p%GBsp@Or%F33qcg(!N|JSa$Lp`Vg)Z^*|wXv6UBji$KeefjS0;Umf z0<`b)C%qHd7}*RdYs%W`_f9QF%9=CxE}4hU#3w)>;;f0@ z=mjf4bUmEm)m zMdh%aCR-=<~ojC$A>*DEVWJUcajhztQh2n}kk$SN0ZuqTg+H z3W;9cU8b)Sk02SBod5KD%&rkfr{8B*9N8acKq~ZYk;z~taqP6d0P*~AhxBZu%n$kU z4m5pE9dGpdU1;9K=b>i}^|S4-$Ut-rkTRbKiT?-lA){mS7`cZuW7qFb%SC#hv61UD zecVRpS_oIk+XkXv1Swz%X=&pCG7WMmtR=n@-oRjB{baAW(79HD_^a4ZR;O1JXI)&( zxoZP)=G?UzDSk}awF%0Ro`_5e8Hh{Wq8B^nQ13PA+i8&T;Zt(lk@w8WwW&UrkmuU& zq{TNPKzZ1KZZ60?$vC>gJfnBv>ymci({gNe?FOlvH3(+`#|+mXPMBz`UDDbcxP zUOPcUU{3YtAJ0tORRJk`{GHLeY~&|&hu{ctnV)HJnY7d^^MGF7A*YXTuaGz49t?$? zPzX+d=-0p_>;sVK^*iQJMqtox`YfpNRqi3%4@o5}7X=4EC(-0fd zL2@X9J`?gd#DO!U>95%7Ynvy;gP=0$A5MO8;vI<7u0A(Q-YA%iPL8kCq3ic>Y-A3j zf|QYQtw5efiv9u;ACULt$>%Y0PM7;c*D@&T=wuE(i7$YMPTm#dV!%h_*skBDNB#P9 zDeUC^dit9GCGi_@o3zw<2bl`F6Dhh1keakVG9|JK^1YLvgZL!yAsyMtOF{ZAG%|X* ze$~h4DM;GUpEpVUC5X2{9!AQ(m%8?o?g4GV$K<)=3 zuRH0RT&s(nKEI?P-VUViHahXcq!0|N(QypX*E87Yel#JC?`i)GL`u8=0sYa}JFJ6# zZ((9b_l?*Wn{!_bX~vBbr!g1|LgJB- z96MZtkskm%eIF=grUH6>9HM=>kCbaMea})8DZZHi;=yfgs33(N!=Sk*0?k}=|>lsL3%I99#~2G3&=i+ z2y=+v0ogCu>1UXH1Gyj_ z`Q4BmAt&*15TCdQ(t?Gs0M>)pq1RJ6`s6K>{7MjCB(y^fy3?aP~nHI!O?2n@x zfm{S1L2Q{^xz^U-BdCXL1|yALKQEvTeO_*elzOX~e7TmEcdzR6iqtP-kp9bJNn0`= z3F2GgcOJl6X#YUy{UYtldWuire>!;a*Bj!3>?_%G5)wf?khMqv?0X#&^EV0n2T4JE zT7s;p52S#UkP0Noe6Ys)`j>gu_cI8+&od*lKn9SxmyiwEFZx`AKWINg==-a@$b66s z#0MqdU;6n^A!K2YeJ7z96o(>E6vRg?C=F$x1e63{CZ`FbYP)7#I#CU?hwM_Mm=l zG66XeCc$I~fGIE)ronWW4Rc^F%z&9N3+BTDSO|fz2y- zU^Q%jjj#ze!xmT%+hGUngk7)|w!vQ52R7`7-LMA^!x1K5AMSQxD9vUE`-5Dcm$#F7@ojOcm+@4 z89av<;DXnnpeVe7@Zbw?A%c?@eJQ8_<>4Jf1j!e>NKRVxeozJ8JJdwh0BJ|s6Wb5a z0mKJCLKKMXNJ&?RYVZl7LPPir(VVpC8^IUw0MSc+GiVA@m*^$G9kc~$a}W%KZx92b z!&mqYF+p^a*B!b+ED*iq_l5!R!=VncHvELxAhw-A>XUj!CwZM2wv5{g;+QGWKD)@# zk3m10xXin^V>gks%xzqw(|MDKPe(8F?rHM0&1B-#D>|JwgZNzH2zfTBkF)WK%e*gx zrLcgs%uzx};MgrBz7p1vmi0&km%cUh3C2TS&_s zrGeDMW!=`BwANoH9*kb*CoQBiQm3yGzeS#`1L$)|8RD`w=|ODol9oEDTXZ__9`V)0 zWqvY4CgQSYYaol!>Acy*XOSoCAaivH4icBO$pF~Ovj=^yB(3!iNrxGo&bvtbDtS`B zv?Jrr3OR|hPWo8#9N7zcqVqv!18lVI1>)z4%iLx+c`hI12y`;%IRL%RA4z;MtRO9Q z(}v`0yS&8n5SMkS5A}%4e9OF)g-XO_4e|lDE}83GCav{1h+l`?Mz8Z96A#5!=GjI{ z-LxS#TDPBgK`204)?){3CoXf9k$52>U()(m{D3%fq^}#u(>BFP7lZspuk-s6?@K;w zC+K7IE8@}yZE0KTlHF{10E$&%!Gp--0iYukGlIoO`tYl_4%|ivP%(mIKy6uSGrLjU9be;(qWB z$^&+~F4~s!nO+yxO}AMYSsgw?d+0*GtUc)Kpqj|{PzA)cF*JftPz`8H`l9DS#$5+~ zzyOdrpgw*5^AlMcYB}}iB;Ek(lTL-i5A^xIIkFjihK7KRJZ}?p{$AqIh_?W-Z2~RP z#X=r~)5OL9{GlCufu?|+=ye`8a@^9#lUBsBb%}qrf!4&mkwaiGbc7D@0onpKVyo>X ze-M0yP9V0!$P-`g0o~yn41_aqfjl2%Pat30btT@5cyi=(ct$)gvJdoz@6Zjf6TQyE zMn8AXjKrVCR@X(_a$Ts8ok7H<%~3ECT?*uRI7d96L0=B#?4v7pvw*gAKWJBvjd=*SkhlkODaahGBpn&K04T5Pp-p}5yNq-s;=5oc@xlguE%<_X zGW08qt*Z!fEv!MG2zeE*z-HKlE(&rTU?aBLUh*#!kB8g>V!Hv>ql=5Y3wMYYMQ($w z#G@iN0(PR;d3Q+PCY}Mg9mMt(yhKNz^|km8;^{$=-VOVSr$dIqBRBv@(M3n@0c^Cb z?TLyiteT{g0^ud6w)cuaVr{WKak_0919DBI}Ag|vCGB$ z=wmgua!;$TMSicxZ7=oxRpy_%DgP_vAvPI0z#;0D<5G3xEhNVzeGbo!bcbR@Mv)%` z(ib@=Z2>uU7l8i6dx1O1Iqt7AE~$SbjDgbROL-^AIpUvZ=W)oK0Ib2U=jM+6|Ltd) zZhPig?lbgtTOXv<9g8;qZWpxa7YFh?hi`MRLCU^?V!kMqd}j0VoH5+j=|3BEDmbKm74oN&i6C%s+L(lr;vhnu3j4}aUrvBn8-@G1R#rEq!)^uKPW{8%*Q$Le>gT$A91zDTF%hX2~ z8*0H2Q=XU(u5O5qkQ4nlko_R@xd&ZKP~ZXXP?5+B@=qe4BIQ^r?a3aOeH#~K4V>_g z@5^4n-~ZW1?gGomlXeRrWu9Nd@AHxx{T*~p_@CcFV6W)=vN%Xj@NBT3bB8(N#gZ z{W=xt5~P3kQIa}H-~XBqw_nKfwqO3@nCol)JS1|%K=QL8WuLV`mPRH=o&k4o+sGKw zqLi}HLFV%o@-c9%`4xVz*Ii~ZxgFpTWmp4U-YFn+te@dYU96L=mtMDdNZOKnR(&7s z*z0@$&Ln2RM)(f#fb+&5=K)j_^z)p;MDTz4%ve7UxkFs``EcY+kh+}k%ZFuuJHG9@ zNL=QKx%zeO)gh~SwsE&W*BlIv_)3{`V;R3%;)ddKk_U{Kd+@7{kh*9WMlAz zSmY&wn#A3~A|gKX`#$g>oesO&z~0fvA%qTp+sdA4iQOP1^W++hl>Orl^wCuu*r)pW zF7bb^|6~mAP>jgmW$?9szPIGI|3Y3D5^`+)NIWmd{L4CsO*@c$C;YS5WjwOKBfx9Q zux9#R0wKYbia*x?-_oBEjzc#MM3)7Q!0$oU`v#PQ22d7T=JucOg`kq4KYQ;?WCHXd zT?E{Fmi_i?FB8>4*0U)5zRoX5m-?$6ZEW;^e}DLo>qsxBoZH^%v;IuYJ(jrG?8kt9 z{<>~-+gB!X6g__R>pROSh^K^t=-qpVqz*69F+ch{2Fs9l(56QEBbn=8=VfiopTFi( z_lxoVe*V*+MY!$P5s|qOAD4cOh4Fx&%J-05lhE&V8#^|?_q{y2WFTu0=9HxlcW@0Q z#__{71UUxT1{i0!_#nrv@1%zz-JvlN)=YmVs4DU}=}a&HSZ{s*AbZmZI^T~(V{n(d zKwQ>I_C^zA9HcueC0zkz%(4f>=|>`3!4=}&koDch#DgIwOh=am>JYz$bRYNScpn{{ z;A%uf+Mo{o?6L(i4Y03VvTio{@7&g;Hxpls+yI+E_Q82b0L;;^`va0XID1xqw)BPk zE@+usxyI6;M@ipfYfJs-pX*E?)Y8Z25DR2(SZ8<8`!p~0-9&Q`DPygU{CghU1+J3U z8@d8>A=mr*x}Q1u<9pSP{r~(fXEQQc16hk-&p+JthoBRh1N{jX9%Ov7cE69Cqz)WE z{@9B$p9g^c>1VY&sAm^(C;W5lFG_w*GM&Bf>wDJj0zTwNhAJphz+3oxE^DK62e(gF zrHuHp_^S9XbN}nyLYu$B6ZAnK$B7@vGaze3dHtOOFVe++S^v7m)}L2SMgNoh>(GJt zDp)|A`E!T=`5pYf=f0lX8Yz2oA+&`e)KwbB5SKO2i9C(mj&z3_q{YARNB181$w2mDv= zFZI1C<>YgiKd$@JlP~k;4jc#cXSlzW`R93ox7(Pqhl$UI9FU5131|Yx;1C2t-2Y?m zp2Od-QRLZ_T!;PfeR`*ed_M8dclx@JDeX*#0QjwXJ-)xj@cUfp&!iImp923}Cln`7 z`ptgV$F)32+Hr?}?nN?_gCG2J&FvJB&k4jA<#~es9QZu(3&b}gFCoQOy&Pi^vtT1| zUeKTYb|=3s@dF@xnYncBN4mod(tBYwtVH<*{+=6-y}!ad>~_E|G!1}yb^X%LZrF-W z*1`#{W$0voNI34=Cv{DNPxfV>uZDr zS5n9U1)&XyjreaOmPU;s12XqL;qP@prNce`o`BDAmU0r`j;sgch!2I|LmKo;VJ%AW=QqS6sepq8=Ke{=FTGrWSEApILenZNsmT8D-FZ40ck``+rG=i^be zrTH@+;frl5An56ih*&ckv( z{ym6K>Sw*Oju*)v0{G;wXRWN6_E~qKXGCOeH-Py6AlL()pdsu-|6BOy-if)D`zo17 zIWNd}3go-mzrt^GFZW>o*p2&TX(IUgKYO?~S#l144zWP?f)n(96o14o_2e&s%+vj8l$T`kt}{Qr1t7O}CA${`-@^O(6ti&!++RF{dAK)>j{k z5EAsczXZ~s1Y>hGM#}MRG?F#{_4_E2I%MW=NA&BV4DqbUqY#65D6D~3;Dmq9b?kq+ zXaDCq!CgR(fwJdi?!`~KlCF#Fj@%7fiT@rplK2k!02_S|yC3Ndt+5w>9!cB@E+5jr zuk9o}58CE<_zIzJSRp(u#&CxDa0Wggr?`aBcduY@EkLzxCnZJ&m`g&T9mr=0K z4znPc+n9I*;&M*!38&yIx&uhDT?8p$327(%dR|rt{ba~X#z%+^;X#gDBarT}iO5QH z&5+dd$9-o_(wp6ej$DlVt&Bd;tVYK=$#XX;|9fy>^Xu8V5 zJI{#y9ds4oC2{uaAN%nc>3u-HzR!4vw6Xn!JOoFG%kk9-u9QSRKoEJQfa8OlTf%ih zvJStlP2Kj+yvRMd9`_q0`(5_^uk*5FD?-=dg)B?L3P>?tw+Y;Q--+1#U+laGv{c2n?n{$VlH?$;0TGal z1c|-LAUWrpB`cD16p$<+Iira1N20{uNL0x|g5)e&Ns0pR_f^$udT$)(ojcwecZ@sk zS;Nm6)~s2xX3eTqy}CE#(VUw!f1%eX^ja&-gZUkAPWa>Sze1TuQx``=8k9g^1PnyS zT$!_T0^~YC`_NcWA;kVF{4sQjH92Sq$^qqS0MObFz4D@~2Hpn?z$tE)1u;Pk@H;xz zy>~@AHxA+6kKAbtRJWsn_Tsuw@9F|WEav;J^V}mhdOp7PJuE6+z83tCxry`~dk6U< zde)|iDHhbb6z>*r37fBha>V`+sm?uHQD>*xH@{75gzY-;CeWJEyoX$RCYs-dkRR`F zzF+%pif5*GMXF`?C%wOU?R}L zfX?vDS@5&EYopKqW^YmsbAcS77w81Sz%Gys8}IrAW-hw#fpXLodH%$(eVV z_j9VT(`X7o&w;lA<1oK3{u%vE&l*}9lmg6?_u25Z&-z_o*?Z?;^E=KLw5iU$&w%`3 zXU=D@%wPZRQDdI&`Wgr&SLU;UYZK``cx9CMH{ZE4Z{~N3&5-N8>cITw?-ueE^lySe z@Y)}#=}2cj*F^6OeTzW{6s!@w|9b7WQPG{T$Y0Q>@)`PFmDz_;d6$`gJ%2*# zBKTZfLSNs26`nPeI*at#y*IMvM{$PE8!B^Y9@oQ>JMTa)k_ASp-&yz4T| zaG*88v)z1l`WAYfHdCO?iK$(PuGiiT<5RxP??irvP6AV{t@*!K2gi|A8&$wN*tCZ# zUz?yImp*@+&*9`P_#Q<21$mA1%y4bIc{0yN=1$K;^IIbyRCAy_gkD;2ti#}*6?N>| zze29yp1Buaes0_X8Q>3tIUpfm4ujV*NW#=uj{$Xla>m(8U8EQ?D(0!FTHJ_~t zK;Hy)xPShYz4i@XZ4d+K-Wxz;t&YtL=mBWUSN4&fLoTQ0yfF?NwMh@iuXmX<_9~=p z_#zh7Sv2a|)b%~&pFB%P-M;Ca>7#!3(fK~o^$@8tP~XZ$YG^{BeQ*L`?=tUpAi7L{ zx1+>S9=R+xk3!|YqAcBY0s0ty94Kob`1x!Z{6%zH522UlFY1~Mou?bz*ZTCX_h96! z9h7~?eBNCIzX2$wn1HznUesWu&t}V!*U)Y)bRskz$R4zQJvVo`*Bpdl$C!fO>v>Hy z{yXTm1IA`PbJm5j=SBKX*V%h{@tzyO_gm~E!S`OCZ_$JJDGx%Q!HPE(x>(S$P>s=D z=JS0SglFhKhSmp(Ky!A`InS+&LZ%;!T`a~f7970_`Qs4V9un#6xU2Q7oaF`sfYy(96^8i~==nSoe8>F}&>GO9&=ep!@UC6xV*}-I4DK&=-Wfu3t zKqpWM`4W_|X^#$GbRZuf-{xH9)$kK6$dh?zYORBL*WPO0yQ}^t(@yzi zE_9xabg}1nYs>6I%xmy_c(;xt*`L5iPBhvNM!~C|(*nlny?@d9%)7kjmU)-+4R&1t zbLxF}srLhsF4c*b#~AQDGomE`%!B!y^%*+q$h*wvmA&ZvXl{Wk@YIy~+`9$25&SyO z8cOZF_I!(uF3P+ITkM1@4TvfD8E-m#F3=DVpZUB8tINE*japyCqqsuP1+q2Q82<0g zRczWbKl(gjzC#!U-S6x?(iWyJddn)?~D_BD#11p`0E9fqcFgJ zVSa~73_i^x`8J>NQJZT$|Gx^3q3;N74`nZp)&<%hXbwUy^FBxUsSgypp3llDInwt6 zI>-7Hn|G~(*@o_C=xyjv(28I-P(GW1&?}^0bB0qqqcNBXjesTviqpI9!aM|(f%d*V z*wur|cMFghBm~~|HTN?iJOR|X_T}JrcB7D)3-i9!*+*S><~>m{Zj$5B@4cP*>+dms zz#sdWeiIwI&jsy6_8s&5)P9tmyMKT+@Y)9^de+Ea!k>n+k9nVSqAftDT8fJ`$d z&?V4?&>c|4vk2;4A$bp;A-=^%b^j8i1{%M2G1kz}c;?@5n2vpEFc4@z54obA9n8C| z00xhM>NX4L{Hj5uGm*1-?QbvAy|NE}hdvlTdC)752jI!8c`hG9=6T>R zwkv2$f5A&@mhnV--$Opknd%>`GzRh%yqF)8sSln4)_2tJ^*npcOQcv-)AVo7G$H$_XCHirM((_KM*8i%$@>U&=%2DOnx;~9xgwJ;d{dT2usWva^80nr`t^6!tTXLZ=A5w@x*Dtru~*H`0opedYYgCWXx_QF+Xx;h2XW9==sqFzA^Mi=po=P@3|HHTuxq0eNw01 ze(2d6`hxoh;8Zu%rul618GLEb6`_jY4$Agr|(pv6$f#F_Al@9Y-(cT+UmQKsK5F1%tPPnd7pvJ=Zdmegw%D=UVUkt zr2z9}+J6V7#?1G=uJ(fV%!_x9H-DQ*&ZBJvqOHMb-~-;Z2XP!iHJ~vvw^7g9<{cht zT~U3T?|S7=HAEelcU1D>E-wbz1wUh`-rE5`hz+s=cLhHqq(VW>XzjlC{Wa@4cxkO{ z0n&X5J^?$xhaegH2xt%Bu1Ik-;C>45IX8!cA@HhC<;z`WA8w1F_*E~eb&Zug>N`E} zZ#K}_YcQlY_!;~S7GkpmS_FCw8V8yO>MpG({Z_&^1dtb#U*<4$Z7HY0OF3==5~Fc( znX~X$$lm%4wV#G_SAiOKo&esC1+V2HBfyP2kwO@Oe_A2xJ zdn4{qi+aBw^|O1V_eILZVxT=~Irb}|!=N7n$=@J{Uh@&Ip@jMm z+`r=)LtAr?IyUb{e}Gr~jKjQxU>?0%f-{%*JxA~}{BZ0?a7$}`G_<|7^Zp<8>|@?B za^BJJRJ`AV>K(3kMSYKFo-twA?*f|J2xu?Q8lJJ3_p^`CX$@q7a?T1p%j+{*@Y;us zH%``v*>f7B)4F1B*16KWBX$j$kKZ;dMk z#n0Lb{?0XI-Ff!jnJwsBwW3-$3q20P06v44*6LI4>seO@JcD<2tlzkK_3hb5`rh*t z_xpmhpcod)`#n0U_%%^!v4_ z=L+-ruMxLnO?viDHhFY*`mAlv_@l-uE4W@ zntfDb4H<`L9z4@}u?)ao{U3x51;gO42CZNHjYEHQ;XwP~R?r>roc5kSUJI|i#_jnv z=i=;Gq(T=98nU0LuTWdXV7@c@2pX~vA=&E~_KFc7=H0w(Mj(gmZ|1$FGmok_`%fst+3}syNCTx(A&_T!6d-GV*b|a3#fO6&eIT-hrw!WRZrB4p2wljYjcige~9$n zp#rjMEdg!n0CF3=s==)Sn)`atJ)k;B4>EvT=*L67i*ZI9g6>D?MrdQ;_My;s#Gbio z`L)lZPu}%COc%f$nBQNVxA)a&Xgef( zZG(jPHnD}|-TR$|XCC~_qB;8u6vMyv5gBv_AA>Ry+{OLi?_$4&zy7+dJ_`je#mjtq z{hB(661&FiU6Jfu+>w5Fau!R~n7e$ZVR8ZLMZclZ_X_&^jNo^3(a{wG?h1YH>V001 z`dy9rjPKcJhEXj}2c^M4p#7{9lp6Bh+c@*r-fbJ3ncV#VT*Q7BbR_VuD0=x*n`jF_ zJfLSnUZ{7`*4TvloQ*CiVBFsKDxP^%z3KlVcLQJsV2<@2P4Ln_qZ%bnUl^be%rmwX zx+{P+qu;IsFY+DwJ&x(GAU5%V%e}rY^)B1yJw9LCwuP=%sa&Y zpzn!I$hp945%s;I+Iwe)&^DfZ@SQC+6nvMPjJxdT=5vIeS385&(R>+^mmn{M?tp4; zHh{7~d5i&m2kz2mrq|BIZP3jJ*YL3noOH$_A%4BTKZyES%6xzCnway}a)dv?P>=<@ z3z~pLpgtG|+!gvAVDOzA{YUz}5OD>sTHM=*R1>rS)SCJJ>mIZ`_cZRKz`HiXoB~_G zCGJ-Oy+97o8}#uEE%kkat~FSROq}Mu>@#@nJ2Q~k&q9CC8P(RDJy&4MvqtYM%-=^W zLH4dFdefis6{Qc)JoI~K^NzABK7ZxzOOP65N6r+qHlOK2bMITqJvWZfbFtULi_gU2 z;tqY!Vt$Lc#&-Zk&?Expvr^0qMnm|L%7in_(5!tl{O8a-W7l$f02HpPhG3_ zL)VyR{@S~rX4o?RsPA2a_J4A3C)kZfd&F1Z5vYqU3+N2UUGVCGOwEM86Z5`v*E!Yu zE(89KKZhRE3qBfW8sdMugU>iK^D)e)R@#RcKLcg;z z??eWn%z=I%2pLDPNywNyo6vXl-tSmFdryw^PG9>-0-(7m1vEbI+C{(B5u_^(x`N5* z-i023YCURQy35D@6ZO1iKI>G*AR}N;F~5y)_U89H@o0nJ;Jc|b+@u!F`x(v4QO_8< z7Wp`oXQKK3t~>Myh|(t5A`bWR0QRlmd*OEQ+V`eHyMeYr>&Sl9ulmagO$TMYd*8u( zEp%4b**x?NAMH)#f`B}kd{M976+H7Sqg_d~8VBQv+DFiSG555d=YwnTw2S&K)12{K z17A3d_DWI4ZO(-opm}Lq4ZIH)AbVFKWY%2p-~MR`p9|~;ovAU+m-hD< zAQspJ-T~gV0EWHHJd-uY+J9$*nT{8gJOg|Qz5v=2$ASq!`>S|S$y%pVz(_F4(Q(jW zU^tiu)Xz{Lov8Ffzz8r9XwMnoXg_Fw&=-gom8^XA0lgg6ynGBqKLx6fPe5nD8a3Zx z^n`W}P`~O=w(^}GD8Hi0qo{Jq`Zs5yWKfMiEhquhujWMir^ZnS)CIb)`A7pa z4~2m89R?J?Xf==;Xs$#XgUmqtMi`g{VgijJ6&MWO1By@nBuhWSQO#LOpg39q@I`=B340fss{1iBC`0?L`jpA~ch6+mC0_~rr4-Eg41$Tm07ep3%D0CR!t z)W6n*;#CZqcePW$%10wG1Y`m&L3XeJqyZlS^{@I-uD%59V`iNuhYkXw13`Px1*lzd zFdit6y013Mt>#g6Fo&4A`pdHWp5Ua?jM&4Kcu`;xl?o)u)uHqn#~7gbF98w(#jpS6M#0L2~b|Nc0UE0*EvA*p}4YvT0nW52$Uz~HV#l; zlpp0^{xk=gSIxEVD=(tTsi<<%0%)E!zOg{LC_DxIzaI%AF_`DRDX$q#wxwWrT7Z~`Kbt$=Vo9O7zd(*e4sR#1vEDc zfaSNT#t!+~=BA<+Cz1Ikx^Ap3eCE>Nw$15^)#!4RPFs+Qt|c0l=1 zK9#GPK=$&japndwL2-}?%mvC_bC3!Q0;*@lr+gIw^4l7e0Lp{>U;=Rou92i?FYAQ|WfG@pu7dFll;j>#Ya(D)~TsX*hY z40K=lNea}j+BX4;NAYQ1aslN>>p<;B0_7nO(AeZt^)wo&59L*U8UU>;%~L}#1jvu( zPW_ey9Y7bLdB_Xi1=E4@tMx72K%jgL1IkZVpqMoFaG>!he&w||7z@<*G>`zO59PlM zPjB;aidp$pzBNA|fG{uv zXx@eZ&6CER4YUV?!3dBK$dB@)_|t%KAOldWIe^Bicq)L(K<#RPg+MXa1Imxar5bDu z+JLq|?It+dAF6oNUR3>P-WPzvU>J}rzoK7)yg)Hl1p|Qe^?~YZ5%2@$yE;%i=@pyC zInzFOHyw;6uwT?8mS|4eF;*nef)C8Tt0HAq{1vFNTdoIv@ z<)b}Ny-C*mP6SgOulveFLr@QBt_y-fAO_IfH2}IVXw3^FL1WMabOav(<*6%Z=BRQ$ z$I0EHqrqoD`Ip=RXx)VY&0#vA8qwNP{G!TxE6@(;zTy_00;HFJ(b-I6IP^HU4(|u5 zmko}thQDZeUkUiN@Wr8DK{ZDF8LD~v(eggk^%D5PP{n=N@;>FJA3S}Se*;wVC*Te` zc%Slj09pXr4Ggus?|pdY!cdKEzvX@T;Wh8uk>P#$;5Ek|JH7=pFUW(An(*a@4g}1D zcwbIIM5i< zLUVyBK=VEci~~KvbHE&$-%6~5Dwfee^EArwJE6M(G3vJ{X5UN#9RUV|d*D9!2WX!f z4s>rQ7zUKffyANwsV>!K2zUV!0`8f8trhecP_9%5y+Ci;g+tGPh48ASbKpGKi>&s0 zzz%rL-)(@MSr3n)TFbY%mk#*hHD`)RF+K+gK!4X(eUcMj81yIzfC%s#z=zp0#zW5n z#X!D%%$GT*Jb`ut-9cB!U*W#;J`s>}pT_-sTRkB2|)f5gG%5-@EiBTf%;VKmPV!REA z(ad2x_)?%WvU0K*nh&ad^Z|S}C^4De0bGJ!2H$`c+*4ks0p&>JQSLece3|DVb86~R z@yWN~*@n?RD{=v#v1zVSpo<0l0s1yHJxBx6IsPvi( z1XZw;k6A#r8cza{5L5>>fOM(>#oignUcRaUa&7j{aqw|LJn$VlsA^lkb2ZP#F3=0$ zB8UNE0*!^l`{F>C1NEsINCh;n%H4EO9#jCsuvN{?2MfSRFbb&Nm=m*iY5flZeSzkv z0k{UPgPb52m9j)vV-@4#-0W22Yazm?s|bYfP8!aiU4wH zp6S$qsqLSTe+J%I6?-xe0k&WxpM}8?FcevNRgB$14^R{o1LVLbALJ@pbdUqct`VpU zlyk-MKG59G1hX7w(@Zfg07roKkA^^P-vsf&yC5s*2l|7-;2)qd6$L9?Z0q4SfV9Z! zPqqWVd1TdEF;E;d1anubl1zyFm(|{A>PdfzF^k&>EF)I|zU;fyPu4 z__0?%^T0uH2+RPoX${%{YQdb96^rJkGN=MHZ-u~Npj;?-^_;)i@MFP9@H?O%<0Cip zS8x{e#x@KpABDjjK;LFBRei<*S_3D+DWKeHEj{9lgii&^f%2d# zs0QRix<5g4unc?!CW9$Jv9$zYAS1ps*3_T|s0oxqpz$iVs^tctAy@;}f}`L77y~p{SHM*;0^ge3r$D|GdkZiQd=6HD<=_vX z81{mF;2`aifE4hjp>IP~pBhtgWX7mEH0Q43@M@#6{0#O3#exr0gQ}@x;5c{=`a1pu z^cSF-P<-Uf>|q_@RU>M122f*WZ&%J~V`4fFPh8P1f=l2s(3tB2)u`f42X0`W6)490 z;CrwMC^r|tP4F9t4dMXhzBN!ykrPuhn)_Se8YqY^GgQ7F0B;Q9Z-d=n4^Ztg$L5T= z65aYV7oD=wzsJkl4`~@O}^u=AeU^y&x3~b*nXI*4Y~5 z3ZNn~IWhYH^KRyreZkbuRw!fjy^H>R@G0m8lvBmPy!ql=sBXlY=U`@N6J+J-Z7>x+ z0aSCU7{sTCz6Cwx2fn`w>dY-2t@6m#-$bRFQ8SIzG) z;0rJfIR%t4m}g2EXaVGpfb4YdI?&ja3-V<4-x8KLXO(Z@)sJ#`8T1BykhNCc1OEVx zU9l=(#I3WTFASOo9D&aWh}oPgxEC!PG)AWy0_Kd#8Z&FI8kG7pp8-Z%Ue60N4>z&> z4ru-s0o6xVXaoqrEB8g9*`V{le6Rp4bh_{1H^OI!zVSbZlmo4U|66kMZ)1(B^W>=X zzmp@cjb|&_gQxT8e^(!BCtmG68kPSy-=Xcj`yLJT^Iz$7FRGuY^1s=5KD~P$_4@rc zA6}cN_q;aVeUC=fd$LEpK0W(?qq6nvJsQ<6RR3>$s4i;Tf0O?!`~T{meEq-OkJ{h= zs{Q}p?W6kpug38IHP(OY>;Dwp|BkW$cl-G7+WoKQR%e48K!-HZoQ{_)K9_^e3)0D6 zRI<)vI>$r@c^wUv^Pv;+yL++|uXZs2X9V;4wIH;R(-n49@&_S0@v>Jt&PC>&uQR7m z5)^gdjAV4ui!bhEQMKdj6)hGh4g8LmF5K~+PI4&+*{Y2@tD5t9Y-G-A=JRD)XgMIA zAiligMb)kQ5MSyZ-a(;jpH2^_@daOfDK7yl7ZTiJ?NTk+B{)SviO`%7F8cY7pIerc=<>KG^Vbgo0G*$FF6;GKgr^|JKobtR{M`ZV$j3!xt%O3 zKf3pcqmsqvbF%JK?+MgaviKxGticVG=*+}jWk^!|9U)9Odi67uF(CI}B zI9^okWSboH0C^oRD%(L$_IT|@vK3Xnymq3Jhk+DeC@AE3=>|LA(@7rU(9qpe8?}{O z7^q!JFx*kes&`MP+7&NdPsd9qUhQ>H{S*YLfW{&!S$Xkv;;T4WI`Jc%EGi$V!D#TA z<3+U(NH01PjB>Ka7jv>~#H+n*#(^{r;y-j4<8)&k4V8;Kov3`20J2L9CO9fte0e8} ze(w13j)uze@ir(4bbqpw#Y-=_4w&d(u)swm;#kfxe%4C zcAhRhd=sGj>Yike_d8ki3&&4$RI+%rRhtYz&oT8aXbjQ`;%hrhcRJA-j+c#iwGRW; zz)Ua;goW^>oh+SrwfPc+h45-CIU^_o!_@ zJ)k+#97-17!ttI?vSQPH(FH*5B#R#o-UIExP{&IrzJZf$x z?*PpSayi-=+70A(I?<1T+7ti-L0*v0(H_wJPW}Y?zLR@H`vJ9)t{^A^27y9O9t<4@ z3Ok)>Pw)Zw6v$qDNiYc%1*1VR#}{|}DCk&F0!XKQP;Dfa0a}NCpnWOa@ueI;6)JuP zDD5x{s&-{TMbHP7bF$Ws)>L_?(>kf(S0#v7(t6@O-P~RHA=25k$Jd6gaFV&&udIA^^R4dB6>R0oidQ&}W%*wawZaUCB ztL`-C8oTC5wWb^`0LtA$u!WDaivN;@kJ|Em4Zqn^pT;6yV>tyH0or)qbD4jaML8m0 zzT`msddF*y#cRA*!7)pH8uxAVKLd?b{7)byyyke7|2lhr*PV%DP0pwfb|J%y?{?GTGmCbLu;C;&1cIf*+_KTrwp=-c$ zNB^+AZzcCy!!NPa_YFMuMqUl2Zy&iZb0EL_k@G{9SLV*}qTeHHjy?g3S3K=~W1xq@ zI7@v)Ebk*e^V^%Rpu}K!#X=s;Z>SDI|3a<`9RiTezbzwvIJgXcbo3mw7{K1>=+l=7 z5Ucqf?*Q}#*k!4&y5)WH8QUSNZDamcu_XL5Y*c&7$2CiRUD3yb?+#7k_+O#2M~)T- zj>E@s^epr~r~)oH9_rf!|JcG;!`V|8KH~F*0iUCt9G?)XbyCz)^Y4gcLY`!G<{6~A zlKdw&0q~APXJ{^v!%?mE2q$aJi%$krQ)&w}|8B=9WNOCz4J&r$cMj-$aV_sl%l#GL zgw^T%>&t0n(+*yLA8I}){0F``yl6jAz=2xv6-NILG@IoO9RN@5tBs-Mu(<-?3cTsC z4mt?^6tLIvq7#50-7QOfuz!7Be-vZ?_J9y&wgN`2xh4B9k zs;&4G(4rt8yyoyDD{J4=_n(r@=QE83S^KDuK78a!`=8nwJ~^^-@Z3`07U-v-ho!zo zmiI+KXIhxQ#s3Z-_(nk0whLel`uamVTG^KhniOe}qVk}GomD<{`-{841_Z-IUA)R@{Ceh;)BNR2Kbxa;`l@DHtQJ`YrOa(ZZW zAQ|c#2gQduU)+OM1=+w^$0vh+=w#6yped+isc$n>^Mvdx3*8A#0H{}UzCkw6!BS5C z3>qKw0n$Nz13_umPINl@5JI1J{Od>=4>Q?@4`<8Z-N2< zouR#@=_K^C%OUKQy|>Y1?XY}ztx$uay#U3@H@JUU=e%_@SfF)`YysRv(Vp8`_!h0 zrTRN-!!JRmz4>f*2HFH z7<@hW51~ga^{s){M1BLR8Wn%i@#ITqLSHSbGw-L= zPI3}xZBPo3AH!F6{C7~*i`Jj~ivP{=2jN@7)6V=ZXd`@G&=}dre~MMIXfx0qG;mb4 zkrh6L(}{i#)Rvt3nu99vic9%co>e34aYku zP-LB14X?K1_X5VDv$s!kAU-pcc9PA%>)Hi=6nu2(4^GzxezBE(9UM=-b^e!Z&eM#? zM_comfU)aL?dt)33-q(Hp@YzuK(1h^FN5PXu5R#CZ9DP4gy2)q3TThizSIjF?Kgcu zZ;%#z0{WtF1f?(EI(Us~G)M)?frg+z(3%|#24FK5It&bjuK^tahQnureg?jPuL%7K zdJmL@uLot0^%+n5zn{?w)6IUo8(IX+0IJROU?%tyn~TudU>5vB zsMcUpc*bUatMoIJ8q#@L`;>e+yo(^;S`TLbKL>pPilLhVCc`r})uTCMH%2BdbH+XZrFMK- z(dB>=xA_c6P5X3Db*DM34i3V11^6@PX?X2bI#-!<@)7u>@X{Rz8cR>;Nw5rF`PLdZ z1Fw0$0M5d{12oQy(99qo{3d8dXbos45C)oq2H+#~$-pjn)xcpWV=(8^&d6EdnPc;N zt6tDFAT1yl=A5RvO%0NR6d)x?O1lUse$DqDjKiEI5+WxC%0qmR0IUOVa{mOh4u}il zfj0pCo9Fk(+>Z`uW9K?iaSi4QLt^VvLCyx)k zpXcN@bD#F3{AjZm8n`Z%XtCwDHd}hLgVld?$jF=8Ubgx>=dJ$x z+*aSCtd-Z-arcv1dA^Ic<{B$6ZfWmdS#9a%8J6C3?XK6ec1vylo0?6q@~Uqg{o2@X zYS!1;eP`t5*DG6kvzBOu3e!efIy8o*4USn_wWy^%T>P_pNiMdj(sI#K1Lxc|^74zn zIO_6$u~VoXyDLWCRB4uIse!Xio{BBMJk#1;S!?x+O}>k5s`8`}~c21s`m; z^pB;YFRvf@#?pNkt^WQU(MRo4A93_gOVjkR_y0O->D{rSFR$OGPDQVmwEAZoM4u(- zn#R%@^Q`{%Kce`Ju=1U!R!&pb%6~St^vMCym)FnDwDj5)(Ps(9PqnmJY^&e)!qUx| ztiIMBD{oJ1>E2zU_X^rL>GzP8ZTyeg+3}$N+v@*VWa)j@pZec-%hDreJmhVw0DxH>`f|W=pp{_rF}!KW4vTX##&N?7t+zlQaGY<7<~b{cG{? zco`Qh$)0vec(h+@t~r_S+rYayt~UB&P9-ZBt`(vC9W!BaN; z5AO3HjorNMwg(sdkB-*Ovc1n{|MujwRz`3BFv6Fn+l8dH*ZMbPZ&|l%&JuwKCBN-( z_fbyk=Xv{x=u_@2Sg?4J?dNu=MW=|k+uxt@(cW_rzUWIc97we$VB4=Sadp_!C*0g0+a=uG&yv;1wp;>y+dV8Hk<_rQ$f-aCy=`?e9~!%aW!d-)#}UcWl+^4NjHzF19@b!}(c z7wQ){G!)iwljqpSnp^w9u6@NL)I-`4&vG8~znkOM)Mte*+Bov}3%B#J z#^$l+LI3j}3y1xo_HVV%XygBOm#xQt-VK;Mj!9(eyx=>w|M&g~JV=z{R?Cfp1IpuDZFf_z zvz|o1Ho&jCx|jLmfLRZBhS<8?8r}7mBcS?g`&)Wj|M|iq;^wP1=xmBDX5Kq>+3Npe z^t@%m{XOjbzvmB+cE59$nei9e`e}MO+}LNFZ0mbiWgFl8pX|Kk$Ykv^_#;eS`W_8r zOEL1p@>4dMy6aRoYebyvFOw%algZSPf6TkK{%@|beiN^>e)2@O>!ngWJKl0%+i`5n zW91}S%sB5pddqL}uy<;N$@j07ZM&-1jo*ZCG>$OwXQ*fMe$i*+UDd(N%c86S<~2dQ zvO7A3o49VTw&Tvleq-wWxQ*w7q<+9Gjn-E2zuf z`{p$N)7H(^asK<||0vx1*lkny+XZ0c72SSW!G)$c@d^Qn%}qiIhfSgm!EUM zj;Gc}vz{ju{?Ynda?R!;e{~ygs@dTto?Mr09)2rm6i*nAb+Z^v7vsr8$@xvBfaGc%bwPWIxQ)#pBH*U#CV0kf|a+V9VnvPz#U z#nzj39(PS0o4@(rSo`eNZQgGGZuhS{(QN(STxG`5drLn%?gPKtI_&by)_tZF*3YVn zW`4_5-RSbx%g$5wN4EbD&xMu`Z`RA2mG-%`r?HK{VoUp+tzOcOZ_0GL&#s(j##4Ui zUGx0uc>9s9zwWhd+{G4K{}ry-^Fz3sm)w1Ayw7`D|JkdX=j_&Jscqh8)Ub7Qyn^cO z+kY~CXy?ppW4m%T&hc%`c={x$YUQo(k;gyMeU)mt z9dC=?w!X^Ux9#svH-3M;HrLkky#%&C$HcPxOZ$Ol-roBxy4^2RRx|r(t1YeV{`UJB zBe$u(*yeX}Yr8)UonYo+X3;ceUbDr&V#l5AmYq+3L*uvD_hamSdgZ#`*3o%Or*5{- zm1Ldme6G4-<2%ya<~Q*vyIyj3vUVxk**w%LZre|)XrEuj(%5<1G|KK5ExOtJn{QY@ z*&hVV^C!H9ZQo#oea?KfDPYc1U+bJW>yL8H?%4G(e!jI&l)*l)5B(i5aZd`j{@c0d zSP}B4{XD;YUMAz*VV>((FW5S-u-VpU?sWD!v*BAC=eYwmPgPo)eePZ=_8s%wy%R9! zyi*_8edNY$ZR_S@I-9?@9-H;oHMiTJ!hf{&S+S~JKe?9M zdgwIG)=##R_8f7o2+yqojfb}CWA}{|=WO1GhuLv__m_<$$z*qKYGKYP*%A!5ab#X$ z&oR3%S$`W}uwVBsm~%{S>n~S48%L&YCcftVhuG&%g?P4~1$Q;C&+^20VeOL?Gy8nB zEb;6&P&#YHgA=Z*nK3Trfom_tle)$yXQlhUbe0t^!J-{#V3DSzkS*o ze{m-TY`eh=?0)`1dt1LNoBPdkviwDx=VVokee{+wG`=S({^{mdy>3l0&DvF1W9Mo8 zZ}xmqa+qB|r_bB`Cmm+&x1DZb=RN*fn}<2y*>#>Nqm8d>43o!~Ust7{MN7V}`IFth zw*c3j;L+I4)7eV=pt%tAd}o%FVOey@4)1ziQ}3maaDCRt)(%J+xy}DME&`n z#TG5MyUKohKXM%9c9$<~@0b7Gf4oV|oqflD?LSts>*a;>B1G@ZTlI_R`O?#7h~5~q zWS;-X(05{4ZDg z^QGtJvi51-urzHF<=3D8#0R1|=T;plecFDP6Ump~bXYNsJH>;*- zuKh(ODc-zKW7f9qvZu3kQvPwUe;fY?u3ZzGpR+9`XPuYfuCw=B|I>3vj_cNL>p#{A z>Ccy*=ha&(*>COIdW-dwvb*lzev!+Jk9nMn$gfn$+uq+5B;a@cd40+H?8D6mF5qVoj?DD_iS9Z&H7+nnETYp-eA5> z-QF~IjI+1wlP&E?ohhy{SLKiVJdwPm;Z0jl2UEwlcJ0er|5tX(F6+G1zuP#nv5rl? zH#<3*wY!|k((~`SJWvl>&xI>ke;s}hC5}GUU&cpPPdp|McYm>R`?St)n4RyKA4s2a z$RqYQH0DAae3j^n@;W<)SL0V9jlIh8lXuy1Wq(z7 zW?Z))1>*oq0Yy zQ2%)<4XyXeUh&<`A$_j>>9?5vG;g+lo?ohu5^HonP3?7GySOg}`!VyKBZur$4!Pp( z<7S`FQ>krx*|A@r3HE3D$@r`F-zlf+=hgms^`iB2o_*P@t8y;ho;Lm~Deby?7TwC6 zXN;f%J^;w9B`#PdVg#3ER)<>(XCd zINhzgcF9BAv!70sf72g+m{0a;C!2Pg3)yjy5AK`iBzm4J)op*Pd48EZHI;n6^p~7( z%>Giz#(Vjxy`O!Wjg#lDnV*h-I(eDRBj*?ECz+ckvu~&EckNs2cWM>2W1W8C{F&#X zd9K>HT>S?3ZIfU2EB2qz_1l4UrTuB{DfvlR=l)vV&$Yimddc4UqhFqPZhvMx?7Ky5 zys9IYZ)nzenYLR$C#Tx@&Tk9#!@kM-Hv94r>CJqa_G_tE&0krQuRHTr|LXjj{g!dD z?|boRKWtOOw$H}?taY`D^O~b({qfv2`FZ%E%ikzF&I!&h{n@yC+jvsDbH?SjB zir#*aC$_VnWaBN6(b*rgey4LzG5&tB{&)^rf9_n$^Tx#Y`xM#TImr1@bl&Q*va{y| zGyjXEKVN$KRBIQO#nQZ)EzNPrQl5v5pXc}(+n#;d_zlbN?9N%rzG(DCZyGvpwW%xC zbu}C3wPQBUHfEihd=t;kzBW&09_c6RythlK{pE%0IhX4Gyj@n_?Dl_+sqZ-dk8ItSbLZ4618hC&bCszVQ}1@2sScKv zlz&6X3+sn-j`B5zb!_t1#Kx7~JmGy?FE5<-x2?;&`z*b`QhMgeoG&>)mRG-9ySBP)$Db)6%D%b6`kiX>P98t?@7?u7 zj{*~VSvsVGXv4D&Pl$Hz952ktGb77&wnUce*SB`{6Ij|ft)+E$gxd8eFnpb4sPW&U zfbGxg$D=(8tdC~xRxS-~@5Kc*@iaVJ=ThkXSM9f&ej1*Q97m4=Q%u}F3e5h(za#0f zdD(hr_3wQrRr=dEONwTlT>cHwG@JWnc436QAETUT z<|IEo)O}xrGjCgZVWKF0mx?Am`cZ39+MjZ9^>z1`*!v9*$?yCNBl26jR1JgojsNLo ztX*W!7B>Ei0oSfgFdoHmv~3Y< zPkowk4E)5!mBGa|(fV)nr}X4)oBWu1Gj%kpgYkQ`dI#N4J@Sn;l9|^P&fd_hlZ&3U z@ttU(d2Ml^UVdXg;^i1o@^U;hF5}t@=goBL|tv2o3{ ze%t+N?Nim3{o#Z$ed?FGt?A-fA=;^H|IRk9p7&($YjE~OaJ=-JImwsZt=*>!ZTm-d zo{!d9YU4PZTlVAD&T3)hLyeu?I!7DX@m&2{G;@*z+pNF7+eKR(s8!U)`;m!@c-qJh z`^%TsuIE1I*X=9yGFrb+TARF#Oh6sT@8W~<6MyVQ6Q4Ivow^R{Cj0iu$v1ZCZ{MzD z`)!cc>cgJf{u>Q<u_YWsbV&hLYH=gm6Zl;;<76%&4 zbnUxY`$hkR=9PLKVaIi%zKy5GZBgcDPOv|-UZ^WGPeY5?`;XSYx^L=!h4pi+t84$# z+7B?}NqV$;JKO&kzu5TZ)C|U9;%7fL{y%l=p++j_XSryL0}WHCUHq{pdRe=n*G1bW z`}2++e{r{O%r(!6)FTt8apP`g`9$etRCb@)5H?`x>18 z(Z)S}nEajF(sqjOryd!jp7mqv(98?|*tc%kJRG@a`?GZwf9%0T*8k9(T2JPAh<)NI zVON}=c8W9clq4g9@!I-hA9VIs?&R_?R5YX>OrF{&_kCsiKfclS*JGabn{2v`e*|@< zePZKa>3t1&U&VN`POjnB<)bFDqwbqY&%V>h`T5(nADKn%8=kFSSNitJ!=A`b(sXmG zNp5j~`myz$(CW#DjdQ8==DA|}Yg6CGdDuKZ&@a~g)FYn%DEq`yQZKgmkFS($p1b%r z>&VDwlZEDm=VKFBUnOk&%=cY?;}lojvro)=-tiavzx6j;)a1{`^=jWnPyh3S_f5Zj zcgz30Y(uZO{FwbM{@AmlCA0p_czNE{3bw;-Z|QOFc3+=RnnOy)fAyP8g$)8~6Km zTuBSt{Pi{SYR;D?pKW{F_M8{Z`dRnLj&C6615?+gewptM(wn@R=h1@Ix}SA&vyVmD z_c^C3j>IWMv0q~Ic6OwVW0Bd{=Vcq%-QFiZk^C(*&yUo4-Vhh@Es|a4B-8VWW}V#p zr^xnm&r8qxGI`6KWJO17zw0|oS3k1!fSK2%>E><Ksdh$I-{uqzR6Z2@EV@Z!TGUurDw{P{d_lJ4U#e~*x^VQ0WH~+@JnTLi+bl;q3 zO#l9kw*99SWzYWmqvpL+SDriEr>=_I`+e$JzvW(v;%|i2_s%Ps{oJ%S=S4GbU)uB6 z$QWiH#QqD}n|U>UZ+#+p+}gPp)IRs)E9)E`Z2k88!QQuih~uj5=M9s0^02|gd$hqp zXCKgg`eB~jes9Jxw4|+%y#t*8fx-L1c(k8yHTffswYGkFUV8UEia&E6K%SSa=L6gC zr`1ecBVLvX+I#(*ab0xd;(Tn@jXu|zI7eH#(KFY+v?$Mu#;z{EbabxtoNtdlZ{E zUOuj3;HQNRuT|ffT=eev$xEfbx$@#b?9vv$zU89YwMqNgVfi_-|FbWmMY;T@-ISIA z)9!dgK<%#mn$d6CO}*qd?S@YhJ+*I7QR({>czh4LG)dMZ9in!Jmk+*bY1!pST*uTHNXH!1@I{-pKi5i9q*TZ;zX?;*p%D3JC}10lYV6KU2CyR8#Cv|_toyrm}5$+|L12r?1<`L{a%V2?A&H_Zh3Et{u8^wx6(hY*z!;8(jMLEAE$O*#+~XUKc~N~as1!> z-?^MGzwAaP|2d}G#mx2Y2K*4$lzlF)zEH;5`I_`^YYpHHveSawT$evzJbwb#+R>bahqE9Y7&MW=h=WQ2bNB---t#;$j zB#JBl4-0%axtwRbjel=6>Q@TYc+Yzp1-D^`O-8l{V53xSK2scGU6l5o-6i z!`AimuR7*$ha+F=_s->9BW?UYjI{C3E@I;!#Xe=~qAOH&cGt##FsF%s#@r${{+Tsw z{MY!K+Q^sm7j|7*B)b83-}+nr-@1SASNssyri3o8b)t-OjP#cqw*DSF*4O42YPYuf z_V4h+`r50%C606{z9$!+W(v^m?(;S`0%qPaz2&Ff_%n%5`OUml`csrT$S3{Mp3`?@ zM;#QJt#;JGYW06BV(+p@&Jh&C0smd5xW-KDzqzo3EjzdKuG@BD)uwEHJ^qb~!*^|a#X zbOFYFbXSp+e&Xsf?$S=b`v1071JO%yQxBAWQ29JbuuHQy!<9N}cXM*a4C?=3x19RB z`bd}hy%aae1;sI_e1@uue{Y5hsTAkU$;r;B|A*Z&_qB1fi7EZlil1e`j{WE7ylU5F z+#d_%=k&L&*F|!UVD#BaWtCme6Wj90f41FDN_Pp|oD*Cy@AH~qBZxmQ&5 z)V>`pr0-KOYijIxjxMOAc88Zwt{cG5wd&9Hck_`h_4|B!jkt{c_{nH^-0nXxSMJQcI+cxOj5hY9d_1I9PDqgBRNN~@#p>9#{bhA z6MwF(Wo-OYwyA&OPX#6ZE~#z&fsal6UuOT!#y|0ZjsN_asQ%Ud$%Vh-Dh~3RlDNog z!nTTY{F$WN6*td~v7(#N^&dN2?YfLRQ&WCUe_QRxsQwlI z-SNZQDvp~gZ#*C__O0z^pKO!1-*<}p$o}z(MNjS9l0@Kg~*mwkQbZ}NX(7vCSEA8Dta*hz7-JQ#M0~+VZiM1~U#ZXad}n{?d|LjmK539! z^y#Cqo1}j*b68XPuRpqdJ+-TS7TbFKQ(BaxgV1oo$IUtsE2 z-v%}eel%}PT)%(n%JhA!G`{HHxbMZKmDgqm_D-*|D8{8~qEkLku}u0C2l+gNU9+KI z_f@<86Hm32pLhP5Gv+n_KOf39p!1g-{p(M?n387D#lVD3!}=UPS>L}scCu?d;+^sT z@KKlkkDF}w?>qAMyXThH^lywA_taY#E(ChiTe@n}n*ILG&q`-5*kVaw@2Dx45-e*J z)xX*|?@{MS0DpbbmcM}iA%!z98Wf`zVe@BLv8+@lE_zp_AQG(^eaE}&aCK<|Dih`CJrb+ zC&OCB*7$mD8M#gQ$#-M*F6C!mjG=z*U;4a@!KkGTTheRHRG ziF%)wO!oKX&%1A25 zm7{a?z?KY$V=Xy-D6l(!yB6R4GRnWNbBBbfdt?oKpR&)WpPo($412Tum2NBE3T%qL z>GI+=9|eA#^u^Sv{q^s*M7r#{S^P|O{9xwrz0@`9=FHcs^V&DAR9D^aJG?KW=;>_5 z$4lSy#NPI*|9qtm_gA~qPkN?P{a<}jb7~~#2+Dtlwh6vh{#ShT^Y3s3}%KzA} z8*K`Z|3;5S{;K?^__$Y5<^TBq!`F8Q)cpPblcYqFJ&H)fC?lILWzUipGAmihDrH4V zLs1!th6XCq5S8j04T>b2NXffsn8_%9&&N5vKA-Qq>+^g6bKduM@6+=>&f_`G^E}Ua z6^(sK_(y)u&W#}aC;!>w?hE{%I|ThTe=f;-&Emw)8b#!-y*hrBN1ji9E2RbW`mujP z4~*mo5qHQBQlAp@gHHp*^m~%#i_6JBAitGZKLY=7?bU05|GC?1cIjy!?W{5}{x0x$ zs9os}{Ev@$wIBGml$!lSMEILue&|m4&yemS=Ro+U4P6r49{4BipYY*t`0QMNY69@_ z?|IOX@FDws;{bf#Zt7Ug?RQlq@KJvm`48}U+VO>01pD>tatQX;f8>&*z^Al*%%|q^ z!O!+{zSn-vuR)H!I_$UP`m8IkU-!u8 ziLl=a^*!rgPoskR^``kfyX?f6p0MAbSH&A)zf!ZSpZ!ZXy$}(;QOlqChzM_|t1(HL zL)(uD_UGGnCq{%kUziIlP-|IF0OYmiw| zg|V3LZO3E`G5^XY?^-sO55L_fRCXc!xOXz`=aFC6JeidaKmWpbtOor4(d|}N7(M3p z)57}!7e61N&-GE?Z9sbL`Fg&~)G3fNz43IAgnYu+DSL$&{JB*Y%lva*9`#Yud>m-e z$qw^lJnl^h_@mHCa{~BtF>vy9@Tb_hoj3TSVmqn}_)~m4SLtu^ZLNozGW%WE`2e%u z%yv=Seh;c~`}MrS?RSMIx8G5tNq*$#td8qRKH0BzKW@LCQ=7|&eO>yT=nVV1{(SLf zvMR@j$=N3m4dp%zdbW^z6<~MGJNJf*q6_GPf7pA zGzjNx-M0!HieH$4*|i0pHjB zCwVmRId~-`4*2912F(CIHQTNY20s2L&Pm!Ai;o%hJU&_&P<(tcu44Ws9v@Ex^7xq2 zkH^QmeR+JGmqGFIrtzXdC-Hiv>O~J8AMc7K@FD%fwb$8Ue$U-rEkpC$;)YR|8PMOM z)|S>)@Xs&O#iV~rsTnJPhxz5FPe}h6(w)D&MEuP< z4*zg6tED>Rw~j1m1NqBiUJe013O&{(viZ#WDFb}oZ2UM1_>@H?J41dzz;4`g_pkq8 z*8}_Mwt<~TDhEfxKQuHre}NwejL%;Mzu@9Ns3%6B7VAp>uT=zD%i z>trCOx0g=rzl75Z*w?FMwWWN1U+<^+HOe3T<20Y&DGT}h4%y7-H^r)He$)BdGPs7E#mX*D`{Vh|LJQu|5q3j|DPR+jJd)2f9e?L|JCE1 z|GB1||5N;l|JMd}_|hKncctptrJVoS))M*?{+V-1dIA4Xg_h3=f9cBIb@sqNVyad=lAAA z|L-a)H%R{(t;JJS!Jk!$);qX9 zqMHq;*N8Kh?ilFyRY$Mu{BCN}JCBIywYpY%wut_o7?W() zT(0mM`iB<&z6^ebw^P_d`OnzPYkNF`{-Nf7YM_6Z_}zT^{kC$cKaW8F15=OIlm62? ze$VU&{U7bg_qu)(_^*rWQv&>h zizgi;{L@Nl>pXe>Z~^yJ{_8*JAGQ3GH1v16dg&nHGw{On5yzo_ z`f)=a=$|q4?E>iU?&Y)z{Hc*^w*mSuys&u|@N&+Vvj=|i5f0V=5>79Ge~N{@Jn&DJ zQB5U&>OLuWrwaUC*IH2BceqWo)|M!Iztf|~`jF?Xo$J5v1=n9$i|e1ipX)E*2KdWIxJv9#Auj0K zs23qlxJs$TB5vrLl~Ntx*6-qUnZbxFDd(MAV2o>f`83|o%D*)gpF3{-D+k~6Pclh_ zoVmYFPLt4|tuH=Kg@4GgT5Ch=3(pUScaq=5`r@)G`3G8GC|bck=vKb|1Ankv*=>}b z_C5K#ve)~;zpWA3ZGeAJ&rUOLE}zFoRedpwkB5J;_}JLWg2%_D-*|jfoXq2+@(>mu zZw-6M;^W}=U!cEVl3RNoA7@`|E+6w7TKMWJ<~O`u2Uo&>(3E&hKg@5a`IFJyzTQ*< zU%9k4pG3^Q#FuD($-d%#V16I%(A$Rj-Ltsk6$$yoZQDruRmkNDN$)84iezo@?P_ypotzRs$Sh+kCSaFvkH`14D|`Qs1% z3=VqMUXJr;Mg!+h(NxYKuaBHRU6*tI9NmTA`sx!}zf*nHGEf2^`rfya4+diT z9@U3Y_oe!9)iu;He4fj0MP1{I?wr;bX@83C3(!9CL;4)mhyOW??@@i&9rc6v*D~`Y zU=VO_y2>hAx{qZgd`HVmHmpOkz*Mk4UZ@ulw`7`Sw=g)IB&Y%6x zoIf2EIe(ne@Ozx|ySC;0*}l5DeE5Gi6e_B7DwH)6A&F{9gKe%?0x- zZPM+-zl75Z*hQ&qw-MaF2JGSX^=luuFJ)=)`pZW3=NKDgmZfp~@^awz)olj1uc}gR zUsegt*J&0pLG{~h!9bI``Y{1ZNUHbiZ7>uzsmmHFbVlAzEo2jMSM|vPWGkKzr-yK z{0ud(&gAjs%R<`N_HNDPbNh1K!|hAU9QtQ! zT&?8xCEb?W*O5uwzD^(D_7(Jw*;m=wa{L|{HD%g=fPI}=*jzs5SMg{;9L?_%Io13Q zdMkDg>F;rC1?G2j?zp!!zXhkaBp$;2cAD(A3Gv)ep^rFI^g*iEr@P>p!7|Z=5(jeRL4N3=VI<$?X>wli$Vh5nFS7?j>=2rkvvVbd=aX zCH)H;Z(Ljser6>c{z&VWioDD@jGyVFJ}rX&r3uk*p?~+^xgDVI^iW6p%xU9$u9fb& z6|emYms7vvw!Zq)jEW`jA^(~CLDy77^GW&hMC8x6bhdPY|D4||M+W|r@@ML+M*cim zjr=F&&$7&aQvRHa-$VKH1pGc_lbUN1@`<0zHklm(KezXq><0fiy8o$H;OG6^Ol9!X zui>K|_*qeX;REKg+ssT8@Kbd{_7Lzh&gbqI@Kbh4o@D)&`K{WwV&=E%`-++0I@IL1 zEboYz-#W4qV~>$le_&7RPAM*>&ygQYe~0hYs?D1&Vt!CcqJIGXIkfO&UpBw;Wy@)P z$J{)hr^)BHbO@i{d~3%4)Gt~h#{a-*KEDp@`22P<zFYDukKjr^jGDIx@FE9oE&UqCb@bAk;S9GM$QGZ1bcYM#d z;(SNQxm9@Wp#(n6|DX1Q{x9xUey99jF{1m~eCS_3qq{rlPyRn+6ZF?G+bjirEseX` zlmEBt`}$sph+dQbp9lSCK3o1%LO$`I@}CLde`fI67Ko1%`<;vd|C24QJqG_blqXW3 zGxDEnJBa_3|15GP{}z5{*vG}-|HrbnRHtD1&ua#6>5>W>Tf6M4>*`uI_Q*w7jL`L0|!m-sWIb!LV+__``_=5OK;)gNX{)F%;l^ldaY z!ERio)aM{B>6_JDL7bxXS03V5%K4oN7~|Te7~uV^{M?TC+;JOuR^RhaGL?gzzplSX z|1{s~U(mlQF<%Gy&o0ZFd(dA`DX|9nuQcC4=VwqKwrBcNeRwGNZ=B_B3;i8zoLT*? zbwzK9{0+RQ@i{0>Jf{8eClkQy32z@90ne>^`4oZo)K}FO+BJ@w;>bVx}R!hj_LgiQnxsXL~GgD82A{0dNU#^*A1*|G|EV zc>htsetUdw>E6Ot_}-2~FM=RPt-)WpxqQUGWt$e<*o*j2@$q^k#m7NCPcJqB|3~Le=m7gY zy(Kyc{O>e*`wZ~iP(itGrl_`VZmE$X?7`EpAP4*(Kc@3v?U(ukFT)Pqy4-j|c1eCE z@HN>f`4#sIz?J+8$!Gn6$#|dofA+zCy^s4v;d|B#PPBskK3e%!LO$>@>HFv?@Cm6Z z?2GskT_@AD{~r7T_-uXP(H8iWw9E=m`n(41kNmkf%5oL_Kh@`z;s2>Ve-i$;rPRze@W)i2pG^Lr>ht%)$^TP*{=_0N z@i%F|$fvn{%=-k}fwADhDXU&D!HfI$Iv2r{*f8B`j5oFKMqu=cc67!2HySOU;d6VG z^6tSdPevz1LCzrSi1p3oBYr(vw)-aZ|MdJ?7RC1@N5`O4=-)c>=@;m~JmzT+G3l?+ z!=BY+-19!Tk^WTw&zOYxRThyn0Qv_6Y?G+Ji3wk)?4{IKi~YMRlflotJpFkhnvVl5 zx=j|-{1}gWrzRqN6*{Ti6%pRFf2Rxli=A>NVm?%Chx8WH{1o5LANM!?X@4yY`d@#( zr~}o9J1o6&qYnBzR>hxy{uiYzz5;*RUwaRK;Hz|U9`t`^w&6VVr~S1b(BEgh$KB@g zp}(zGo;CDOx8MH|{#iTwOn>Nq#m@5?^tasb;tupz+2vIR{YxtEOe6msaqb^Y>I?ok zxnAtW_V*t~bs5-Pev|!%G}*5n_&K_Na7~l_W^nuc){5J2z%i0X_M2u(^3i|d$?Z2; zgWGRTu!MXTe=qnVeq|*b{6z7$cBSj>@vz_YQSS_R{EhL0{dWJIvkmq-J#^P}ioYtm z6(Xj>p8X1~+5WixG?TyZCsnSqedRvXhg~ddJW(Ik98jo-`f%mKBzM$*pLLQNh5E2b z|J^I~hAzKw`23Zfs1J{kH#S23xWF`fN^`x1*PK7A$RDu%-;SI=Ewea(l=gA{l*MuW zXf^HsiuMvdwEw%{2=G#|9sii~=cYt`9`bjtKRpWa{d?}WC;8N$@Hr9k-)`zU0rRt? zZaLL|(VuX71LpT>M>;1!^5YJFqjLi2PguAD@~OW_vi?SP>C-|kSWI?#DYsEuOm=x< z$lRB(XTPLviXyVh-d=;HF%In4(i`>~sqDN1pKECB;$=nRzpUu%)m+3!FI~$oh>yO(lh%re-@y*qbrj!ee>VCe#YftoHT({H zaBuZT3-;0N+$$?Vt}u$I{@?lO@S~XD10J;>FuwzwN8HBzmg!n%V1A=5>rY^Q+tr;6 z&Ybk)YLZEzKIYdTwebS}es|_liFM0=#w(y>wnXh z>wo?!*Z*J~*S}#c*T1y2AXgX>-|N5bnu9p~@x)#kirdux)9(%9xN${7PsH_Gg*oh; z2lX)<6^Bc?5)u5vTkk635BUerb>Pp#sGfg`FR5}0Q@&3E{<~w>E(QKWGprSW zf5qH72jKf~Vf`lHuamc{O{PJEvx@l&Kj80h_fjAHz1E}nxu9?J5&Ewf_q_u8hdfW3 z0R69j{yrM|Kk2(Z82VHHSXbyz{bQ}6|E<{(lfeHQi+@u+5&dKDCqe(u6RYrSt$+Oo z{*eAVxc-~xa{X5YbNxU6Vfvf28q%cycGBOaQ*VvmT>opDT>rO~{}TFtUcfH=4-RMx zJ9$*FOAmIlbnjCg*catZOJG-O4L)iZbptL$z`jB}WwzjRPILBZ;d`YQ%ISO_^01;i z|3@_YU#|Z*7q0*Mja>hA$GHAqXK?+cE4cn%Nu>Way9dhaxc*m8i5dP?5`BBZzXSVC z^L?EI{#PZIucQ2S!I(*pD!_j|r5IJ7-#W7O-AP|ddE!6yKRARE|EC+@Y&eK~!okK$ z9r3w!g@$DR7@OZujeLG%%|w{rFS3SwewQ6>nqO}|zX@IV{GPu}{G|CkMt!ubKX4zP z-x|sN9m)@7G_R*oe4+lp6_?Qepu4>8HR6ltN`wsJOYH0Pv4}6)-?KvyUmPdSkEH$I zlD1~*YIwa;^_)HO!#*x|ZVI>vBj@MGvt0jZ9j?DpORm4APLuwHP5O(t{uiw{Kb=Q% z{lA!T{cG_&xqtnKyz!Z)-W24KTOZvBqr8&Vk8P;m2zI}{1pcG^+x$H4M`DNK{f{U1 z|AYL~%wrroKV@9;zUllz?$&<^rx#+fpILdQRmEgac3sST$-Z0+vMW1_$=*t?k7M;u z_sG&`BC?+f^=&7i@2DVM>QK9A!e38F}iL*aJKe^Pp9nkl{)X+_0 z(!b;PElu^4?heS`_AHi>sJ~%;OJx;KVt#i|Q^C1F+J8Boj(M!~YsdPU>3j#(_fQ|U z8jttuXY9I9pQHZv?RNOy2JcUu8BfkHeIltp_Q#j5hJT>_@dWsXu|e-r;U8*B2V8}J zxY-b~9R6XT+m|@nA6HABT)!Irf%eDO!9OI%T#jolS9lG49xdBw34A_1zww3U_mXc& zYqmetI`TU03n4!*Ig0qF&||d@@Nv&qt|NS=y|M4a_Fu{(Qdf?Q#Q|H=I@ z`YY$p{A=Kk^RT2_oIm+?oIfgbuK@UCfc<6SkMtbQpY;nxj6X7t`1h4fsz(WOg^~C} z{h1$)!Jizf)!I})G5F!om?``AN6cc$^LehpA7Q@{s%njRsw&@PbLHZGTr5@{~+4(hZOK{R~J-G`ANpj9o{a$ z-ypT&IOgZ>%;lp6eVdO>{{JrI?~Yx!7xIT@tlit>{}b^2hbApsK)z1ifqMA=z!OHs zkneCeIRJmJ^{DLMT(0mM@_U~lze93uLR)Nxd`;8&=8%6+#?u7+h)=(>0`j{U91vwr z505Jdy!jCF9VZ@W3;9oC7ikIdnvY@@_rjGC4=L_tiCEmT*ehaj@8ue-8^c?bAHnE< zu$wO4e^e0C8J}CaH|7D4qt7=(j#|TM$^4V}N$aoe;OF({3sWiolrtJ1Y$#^yuk$Y8 z=S3;=K=7k|!4KA-=BpHH1b#jn}sbPg;MiXs$ova$MVZBgE;f{JedL+sCbQ zm7te@lJRuJ^|`--#$cpA0NN)*9jgBvd`{o&!bNsU7^0?-_oi7gyP5N2cr|ciB3H3;#g z`gFen_=i#BRA0kCQ2$jC^c8@BE+4$5{^b7Pi+6IBFLyTxLR9&-m!9~=6Uzm(ADs6Tn$67cDKmsz2ZGrci5zq$M-{j0eC zO4e9k+bJ(;gntXSb6U>zr{@PT{rk@3`cFSa`ul%AHYS4W@9y2Cza&4Ie;eYB`90wA zClB*Gz+90jP1I+K;nN|{cx$rwU zf7Z6-{28gm_;a!a)m6bC4PVZm=wqBe)wRSQ)B2m*gE@cbTsq@VT9zQM`Pj6+@aFkj zd@=0n`WLx3JbzpFqbYxL=K0&jhdh6a^5pqjWz+hiit3|meIapv0rG9FyG(@q*tHF7 zA-`XSbSmU~om)H)@)vz|F@pTAn>RT^{=*`j-(VS`?shJ<}A^Y73e8_%dfKL|#@4K+yjw=^y10TnU`+MN;o@XuutX$d@k zI66{NeyVI;qfck(?=Zzo5&CzJ|Na*E)LqPE@v*>dV<7Zjx;e^BZ^V**ZI!h{p})3* z?PD?NpIUagyt!QAHQ5)fFH$VYzG!`6@d^EWC;0(Hw{ZvSpJhfst)}F$e(vW zf2zNIfWFg1H%U?c+`}T*V-sHc68nKL;HLZ^$qY-Zjglm@Y!NJ z@+a^a8}y9oGVl+B1^}O%4Ix*7&p@}@4Dt`HPPTEd13v0nsloVrVoah$Uy-0c>~mJ$ z`I&ruw9J^Tk8*VH^YxKYTf|@Y$YOK8KHAj+@pn{^nm_pmS|6>RTqXO{DE^*tT_ub78=`pWFy&XR&3uh+BR}`jU0Q_r>l-Y(!18m4EGvq? zRNspYqxehpJ;!g%o?A7t{>^UZUQ6Ep!1#Z#H|M`a4d?&Q%bfq$$8!GLP38P&_N#W} z1f7dy{I@!dzo(X^NbLLm7avytnF{&4W49DQ{?H67Rv)gI``H`ce`wN%*_Te%%%@e*JqcOeX$N{q_&#_o&~>yukeKsGD^k z^Q-Tww(NJKL8lL4Oy^>LTc`IY8_Q zd@2`SVD+hIomwdZACvz3#pIvE);`br1O3Oy+n!_indXfU@cUaz&4~pb=9eF! zznachs)z}H+8+$*3jFDOrDT1Z_|yB$)d}E_O~{Y6jGv~nNg z9{fKV(`z32|6Xr9?@0&$ql2eS0{{=VyA?B=6|ZAFuyKK^}^Zw4%l#x`=2N^*#9%7#?hExlm1Klk^j-@vv2QR%_}06#gCHPyKI|;J=*etxdGQKX|5Lw{qZ5`}=O-|LH9kS$(F{)!B#sBJUC_5)f)V{Z?9Pjp2UU?qjL{bcc^{a z5~EkN8=a%W{`JZ#d~T1@i-Y*y$>P&&to9i_aamt`5NW{F99IAZPBcKuP)Jf9U*v68uj{)vL|$ zKhbrYo-_aR;na8dpREt*`3mHJN?P7|fc&JxCzsQRFN0f46(c{%*IBzC{)hV8CkuKD zBjJzpNAN$i{*{hEeyFUd9|8Qs?KVXK|LW6yUV#6j#%V4A-Y&9+{b~IeI$Cx7JiNA8 z?LQv)4`}4@c1+W@&`C;vGrq_?xG!H;%BtwPx9YbpGKXa^=aCt6@O+xzCl{s zeUN{5=Cc32zr*~e?f54D*^T^Xhb4R74&nZD)o$)TUs9hYJD;Ax{pX`L+5%^YHzf4QGBr z{y?|NLzJJ->ft+p_Ro=@C*Q{WQ2pnEfJgIDME>oJ>+%fvw-CkDAx-hi1^&%TcS$My zn{TjwDdIbwf5iC<%0JI_C;vwI=fuO94|nMntp9?}KQ3=BU(D?5;R0@75BhQY(w^I7 zUs~M0yhm~S%JoEi@9@dhm)lqBQf^%?-#h)4TpTAVN#ccr-#+kZdlYxW)Si|TuIG{4ke*U1a<(!pjitM5{M?}#8*7`gwa zb1cmNs}AG-U;iNY|HJjT|9{W?vsd&M74HAdwsZggwyA$9?mPGYk=24+VFW)<)lXW) z;&1d9F~#4mY3f%idHn6Th{xZFMd0V*^SgfV_}fl}#ox^Q!94zMtmW}{iMAkD81e58 z?APKD;zXqKt}f84q2a|T=ouKlUkUoVxGU|#c<|4+qj>*PZgM6*cVftH7ktkzX-P8V z^!8Hn732yd@e}!vKlqtrwbPsSN5;ONulN=GEO?b%P5i|ENG|-p?z=XH;Nx!P#a5L6 z9QE1y&vE$wH6pL>;HP@_lmFp=4vpgcSrEnfQYQL452s zGpic;x$4BA3fOO)58b~<@lke3PJ@JevM;$oEApWK7TfX9p#RvQib2r7rc^sbO#G(v zr`MqWK(~K1$iHbG_##}OPE?0O>_C@jW%{j<-S=vGe@-+tp67R9T zJ2M^fX?;igx5%&Du2Fp4*sa6q8pt0bf4Ca*3rve7>W2T7&*}qoP6PFU;kSvO%KO)s z4@dlRU3)1#~q@X=f6us+2vst-(N=j&f~nsp2DYxkkUy9K$z2>pAX z$z$txn^0*3roZW;bm)Ii#-lUzi%-9&4E?(p>@lGHKi*`g!EESH^+`7WPhw5a3i6td zkWc*^8-Wk)f6at^TE7p4eCj`$3;EQ4k`4Jfc{`%9K27T*MfYd0{jbCLJN2LZZ+sa4 zKat&o{~PxZ|6PJwjxgo?PZ`bmuQ-qKzw(0<_`X|thYRulOTFKL&z%38Uvd85mfT-p z{$VUTzkf9P%RS(~$>*Xj_YcwsxqlGt=KjISkNF4XKPR|lpE4Ktbeils0eDdV zZ84osX&D=Lwj8fL4NEM5&-gJN>m}xw;bYi}`OlbvjfBtGkvH!>;rJ-0aeP)8a(p&k zilnai1W6Y*0-V2taV{HL@@tGN>L znST&B)dzlVV)&aHvicR(2igOFst?2gpDqS|Ij9f(^ZUyk;Nv*aZ7cA361ya>xm@8j ztIs5A8!`RY#rI_T2kw;P`j?f%|66#rjpq6lP_1i8Y9 z_?|hZ*dP21ZT}Pd%NOGFG<^jpAMhe zvi(OXy`h%`xxy%-xJ`M-Ix)rV8;y&Wiz#mJQF>&HeC1^H#c3jn+k>nlmtmY>t3mtR zSjYC!z~}DUk4?q*V#7vkf*k+a_mcjZ?6=*V1E=BNUfg}Mm;9TR&xh_su;20-z1P9N zDSC$M!G7ud$b8tVrEzC%IzO`3TlEgRALj7noer?yna`F<)LDW5th^|S-{>#0Xb1me zko}dN?<={kKMnZ1N4`GE;;*^`-Ls7PIo*fM>gR>mfq&4e=hXj${-RH91YCp>_SN~R z)^*s|0guK<;LiZ(!K+|jWx6Y#gCEhB4ST_#c6AW}l;39lC~#~5e+<%On)V-8N$lJG zS3c@v>s!FS&fT`j!u>*?Eowd>K2rbud)OEC&(DQ@(fu|HFrVg^pA9AZqW<}-Kgqsu zzl}BQi|)5E6XXgb>^HQqZaw6Ox9c;S>OaHhwx}|P{801yDUcr~{>18Ua%ru`!F~@+ z4R1yAss2+v4fgwJ$FQl8PyIzzf?Qz~vGx0CH_Y!V=XS}ozIb=Wg!)J6b-3NOWccUm z)2iv1-%;b#0x-WWva|ANeQ|7G%5c{2VzK%lJy(Y9U;jmaPjdg(?>YIm z{%RNYv;#h_YZqB@|F%z&{5$!#yV}6tTPJh{_ivl>xqmZDu$RD z^e5KgQCsP`x$Io~!$Vl7KO4J3j`qE9uH76Xos(zx=G8RXZpAv^X#AhA_#WL)OZOnK zbMkKmxxxtlllwt$Ec{Qsk*NdrPp`zRWar1{_j+gy|FbTx*LwJ$;NqF6Z`1v0KW*TD z))-Cd3%sbmj-CIb`_m-$r(j=lgVsm^AL`!-1U_R=witlc*S_B6kAe*f_<^l!{M*ZC{vx1+)2-G));-w2P8kRPp#A-h2>LrtG%lw7 zg$E9&oXYT;?gyiLV6lIFU$VcB<}Ygb`vCCM>1wJH>f46@yr=$WQ#v1NG!y*H82XOw z54wA8M*lpW4^_AgelEOl*bwvWoY%M!^CKUzL!!6w#4s`PtQIF{ks?(-G%(kCVpul^mm--&g!?czaz0O z`j7tHKcGK^{KNBV_=k=DnKIlz==b3MLF+5`52?l6KZLOTLGlk*xqrylZSoIS1i8Wp z{$``ewyp*~{v&z5jO^~|txR@5>h_TXrC`5GmnP-&`t7mSVzS$}hBE6h*7)pO#_PAGc)kGL zLp{I%_G{J4qmt*A6AMepOaA=I2P;2*GCej2zm=6_)qmoZ}8`U zr;IQ7Gr(D=J@`|mYx4^HiMDKf2>!IIJ3W^2x9jN*XFGvE25HjF{_f7SSt7`5K4N}l zG_zjP{E~lLc@X|hYNK*f|NmL4%d-Ce_n2Sp?^$g9?l^JbGV*UVzT-Ci;r{I$_1D9{ z72cANkM)uEWf$bPp%c8G1D^()KFU~M6nPD7M1C8V@Ol{X+e+(`uYiZ{qDEF;4|~t3W@v^ye-KpZ@M42vj=$H@k32}@Vw6UT3^I1y0^Xx<3B22 zAK`s^&c_n^9Nm+C+7kSy=X}g1Idng8_9sDaVFZ7uKcUHg+V2BC)c?%(FDmAKX#xH` zG?AYRe01`>lF5HMS(`7@1AiRuQjDX&Q-4CaAg}qz^M^-u)cht>Y4Et4odFmeQm(C9!q4gK_pA=pNU;B?Vih%u6 z|A~a1!T+49UttXYb2LVM9P;ylo%c1u|3n9is^Nbk&n>0rF`~Zh%+3dWSLwEu{15Ff zM_&P6s}l7`kpH3mWn+mr2K>XdZO#J!bGO%aB>&dUz}Pkf_)~wvBH&N`3BQ3q^(Ul5 z9`z@zCH((ye}We9Puf4eQNTqQu|75F`;gghNL8Uc>^Hiu2i?a__WR*f7uYYIzkUz< zrTf9yeJ!-V^Az|DZe5WD`_0#}=?43y^VbsR*dTw|rbXWH58Hc<4Tb#C{dajn{{7tS zru#!a#6W&Ub#emm?lv8!SSJeABKP@Y8r4`A_=&3Y{oUki3h5lQJQ{*m>qa;H6?a;U)N4eEVj! zAXgZ{AG%)`_cv1h@Ny#M4~7Ak6#Icc9Sx>cfj@TggFYbsY5tRv1^&?eV0OeGT3?(n z=lMfcEAYqv#97JtC48wryaxEca+WdS_-~H}{^53;je&pl>At;zKkbkA0{$+t^T*Np zI(X!Pwd{Nu?T@qkiuT6~1igh3_C@!LI>Ej|6{L!Y|E=zu`#y(#MNBnn3Hze>{u1^@ z_ls`k@!fqg*%!t45ji6IJr18*{)68qrKjaC$Q4HT&)#RUS^uO>s1%F8nx;l2@IO?a z9}NE!pI$Hp{-=w9YbVs_+n3Dh3jag-+b{h6NvyfVy`}$=&-8y~!}M?eXENz8H?{jA zSFZmYeXhS(d#=CwW3IpFX0Cs?#a#c^qqzQqyal*`weec=WWmAxUy}y7SW>C8%-x zI+8^5`=+PXKU=tc)wgW2FZw>SFJ*3D_H|@m-CykZsKM>avdO;YJ`nU4Mqc0Bbd=Zk z%zp9uUdd`+-@B01RNqVH^}PWbczsWG9HgIod0Q!od4$~_Nic>B*w4oTj4ZL!+-0sJSO?-(v2pYW&i zN7D&^-2bo@`T51uE}wxv-T!bE{)g^=r~>}&3%*W8DEy5A$2i@XkV+F~_{T|OD zpYHcq-CTe8XIrh@E%49j_U;ulzjVLE&NcAQSM1!EAU;@bC@z72ru!v=;h#$?Z{d6^ z-7jHlBIf7EUErS|Mk)QZK0xdD?eGsFifOnXpxgKfa@*h^X#Ktw{=qkx&h?Rg!4A2( z~AJp4XdzlSx~AO17IL;UvIR)jOt^T5|U30g8cctZjFQd zb#clqnf&5uvmw9LuNJ>ZKGnyJ&4JgpV{)wjj?NFB{F{90f4<1|Pr6L{w@4eC+>z_Q zOqJ`ecZchrWy1B3ea7{7e8lu`)u_Ssf2|=QpLkx^cxW{E?=WTAD)7I1eBB!GzwY97 zwtr0L7j}aG)L*d}`{SM|A$8!tw!#K`@ISRIX?%0J!fP?@8|VgH=>dNe;@K`*MEeF# zb9S-&{Yo#?2a9RnAiULmc2BMU!M+znw4d;(V0#Kaw{-8Vcko9$4n4XLIcg2=5_vS) z7p=ciQ^~$){k1F}{i}Mj=CbnzqAP*vurGQ(ODyb5`}?iouqVfflSb0|E6FJP*e<-L z`?nQfUwvF|NcbNS&0mVewiV!ivW%Jx_6OC*-iQbPUDqxz0{^46Hbes7PLG}>U_QNd zcF7X|Y5zLgl=x5kUn>@Z|GN(zlvuw&e>xwk2>(FmLmRL^GQc?G1pGt6tMjp>KlML! z@P+=mm4(&Ncek<&)u9Ti249L|_ld6&?VJPs)w54@6>t|u&QE<7AM1@wE~0*`9kQG! z-&*|ptt)yZ2y%sy+gH~2Ci}Wh_Vs9^e!U_1DY_CxeM79i(UaR(b~d*!GZnJ04~zB= z=qqCO6%)(ttEfznD~!kwX@AY0#a~aE?mYeu*}~(mO?w`He~;(!H)1pDGd_ATQ+WJs zW6tBRjgug+`N-|7u06M}z0s&Ybnxvnn%h@YIk&HN=FGm{NpI)&rDsX@rTk(|{B68m zBf7}?+tsr#1xUze_`iF{@xK#|{Qpq>5Ot1!oGZtF&2Nss6YC!!{Mr2j##zg9Fy9U~ z-p4uqDiVE0e16}u`dd|Ei5ldWSP^m%l`c%U6@Q z|AFv9|Ltbr^UAq3`UB~FR*x3Iht?PD`5m;r*Z_P+jZ-fK9xk%;Zc}~|qGPCh7_Ti> zABx8O()vOozH|O3YIFXZ!9TRSw_uwJx39QF&VP%6VrF08nZNN>3U0^wZ$E(B*Ze+B z{C_HeKjqc+UpFTr&;EGg-~-CL>0XLbE#%?G6(<)XKc{;Q7h!y+Inf61Z+&!U67u+A z`fg4vKmX>ag#4VI(`nIMKKzfH@@7T&pR{+Qwj=+{)bM)`|Kr#COFaC~>e7=Bk)J3g zhqLoj850xykbf$aw`AwFWYh*Vy4@67SH4CDCEInso`t%Q7vPwnPJ(0K^dx8DR({2KeJN~Z(( zQ$9mk4Es{_y!sCOq5XYp@Ws-&$7-tYtvf%`$PTX$Pj;OR{?Pr966b#r_v!p{ALIev z$+ee}7ts0TM&t=Ej7QPECOE%rNzXUJ`DL{Yc%RNMMiBqse@n~r;rw^hA^maw+L`me-$>5?S#7X?{rLUSRh<8xP5tLzyGh6= zek|K$O8w33e%JBf&*=WneZ<74`?=R@uzvAtsGEiLOGS0iD#WX9GjCK7f2cmQ(*o<4 zIG-z&2cSNaE#W^oKOO0L+TiE#Wt^WrRKEj1f3f>vR+k>%&-ob|j`)}{F_1kUP~lDX zOZY8r<)Kcmx|>kr%p_FG^iCLAM`6aN7JhK5Kh;2ao#RUNpyxDVIC=+i=@7VlrW zMfbYX=S~d1dk*;fCGFn>IlaBiCF|Sd|LOS}gW&(S*p8hB|35b9`78K;>R;Lp|4!#C zS^asS+xr~a|80A_$9tB4Qvdc@{GHBM{?Gki&L24&=$ZzTX@Ew#_virlOAm8NrKFR(Z<{w@yLwpHs|K|zWZ=2H2y>4>{ z@R{H1#!To>>#q#xA6z^olJuwbSF7*Pe~r=NLg-KHFFNOg{I+(8g#IM|RJ}zSM6!H(B-`p4Yd{>daM)Ilt>|~DLvnp{^4E;XbA1e{x zVZXN4%Jf`oI={cJEBc4&{C??V*ss^QtE&)S7JZGQ{wVC*`-1pKk=7 zC;O%I`wJ3azt0`c{k6WGVzDa~_$SK@8$|kdoqsZ%#b4L8uB`tdTFZs*Z^ZiLS_JU$ z);V<*_?KJsoeKP4cAC){`0qY+T+)9MK2gidS^uQdRWA|kUyO~9daMb2(vMG;0zMf- zpRni4xO;7DdVXL3yTE^8oHTo$igVuAb--Uf!bjp9C+tEuAo(!tB*e2VyT8I|j@wSy zQR#)x(XgxVR`;f2^gr0Y3hzHE*rp3RT)Ov37krPNuR0TQX#E)6e15tA+(OSKhX2&5 z6S4Rn*T2bs%JTRgn85vKLdxpe0Z_2M|efJgp8??UL z{Tlb@56p>uiugk3`(7hHcJyjT&#}b%?gz`4cPonmDF5uL{Ii_h_pwHFX)5BQdUp6< z&OgGwc0IuSetLd02K*n<<-}NSUk^RGeLeqx`Bms)w}aVN-p47J-!~hdMsWMOw20f6 zL;c_ISuySh^$lVDm4x_0>#yI`AAt3j-7Ya-e@y^B!zwOj10T8{PYd|mSkkfs@S*$h z=$ru7U+*RN*Xa9J+VQqxk{3E5Fc0yo!Ny|0nEt&YuddFBk6{V5E+YCqop1dHdAf@# zl8OIxKfmc8G5veDSJW;L)4#8>|E5=S{eeIA&)g;asek6$Q}oZgkWM&NThH?1GH!@hi#PUC)iT0dIuW%d1B z(eu|%ko*qIy^qd-ebN1vr(j>c!IQ7S|IqoPj^ux+eic22?2GDGhL!mD=zhz``1k34 z%h}E4WB#nPW1~3!$KG=MSJJ)Cz+ax7Um*N%a{PVPiW&Zut2zFy%sKua3Y+k6)m%Q- zcM4PLeGuR2{>HA3z{GIww z{`dJHrhf|E4+8%9;(l7reZ_ z)L%E3?Gs0BJyIh=UElwgzlhcKm%CzrfSxNph3^ymIEwdc8dq1*=cvCqonfW6UXQM z^Co;)e=_wi-Q@V}YESqm?|8oRCtg$k(p-*@$v{D_Fd}};$~$`y{Iu(`xRr?bX^VcpJ>qVHpVXgh0Del%{@@|V6-Mx?B?sUm{NzQQY>Jt1(4PaXaARrwTD!x2-TmK23S|3K7Na>5YNcG4=_S z(?dM>POex_pQCkP!DD>yg|T)zE{O4+a5%H1sr&hy1=M`QW2>)EOjn;$2U;h&kVS-#?1U~KN9I*yI zFYZ3FCw#m%E|YQxKIJp|e*->>p0O=~5A}Dc0uM{$?#1Zu(wIAz?cW`qyh|PUP=EXX zh+lWSNIu1{oZ%F|hV&owb0XxA&K+e>@+p4BTR=WNpCv>@@(mUG1yTI^v-Nt$0{nZP zhNagafBcxPDT3a@$n|%ldOTad&&K@&nzvKubNv-Ja{VJaasBP{xc(jLN&k(Bqs?OQ z`tW3z3tayd5_X3Cip~dZ6A|BUm8FNnKj-BwC>4=C9B9!!0`_4%t|~`F{8s4HzoUrw zPWPX8BmUF*py;89_bRqp^n6t8zu%S2pNaoZZ(U`5sM|+&8Uw$plsrxsJTHu<=SUFm z-_HM1iLu6q`iSWL$_tgsu#*Y@RE2^6R=u3_U`O}w9DFR`B8=db|3T_wV0q<%3EKq4wV5P%MC9F0UwoJ9=m`K?SK74_|W~yuk{EYxi0{GDV z>aK!ZVZ`_C>~@`i|H%xF$N4OJzQ*1{_@88pTzmMR4ds_#!~eYeoM`}gUvi8iNj^Pa z%!1t?^0BO?KKu_oUreGuL`=V1-)6vU;NdFOubqf~x4zkpTO#`1#p$iziRpKzoZrUQ z*KutV$&cXvf&8KP+;Qtig}{rRKNtWx^!y7|L2qHi`x9&jFNXY6R^8dX{`c*Nxj;^A z*ih=rVD~57#^@F8YJ>N0G%jQ7ggr`?tswto^wqhLL-!|C3vz`K_$=FG!OkDi`AzIU zQhg?HJMf|Uj0^DbYxuB$`RD4S2Jo@l%xltw58XdscM$%W`e*I{AL^fx$R8lzN;}j8 z_=ir|e^12nlbN-UU*!4K67s_m{#gY1mDUIPh*^I1Nd)=V-kDRMDDvAM{UKjvf6jD4 zZ()S|S$P*GK)zj<<$BQHAZO}C$S=9Rpa}BaBVVxozzX#}ve182kZM;Ei!YNSAfMt( z1LRB1uG%cf6-LgV-7=g%$s0L;0$BV?wzwtD`IE}(S1&(jpr4EQv!IsqXNd>o6My=0 z{ydU6AC3GcYWXWeo$CheV`#BG&mZiJk^j`lsj0I3Ax?qa z*XEp8r3C*iAK`mn&|4TW?^Hi@1b@7f>(_vHnkVyForj*!z688Ex_z|~Mvu9w8^Irn zLlN{jsvk~03jUn$GUYbp(DT_P@*lk4V`RMsK1cN*_WVlOwl`uSr&evzX2=a*EWHh* z>!Gi7KQrn-B`%P^zRG7NzL&qll*u=_en4`6klXJYj(-Z~m+aSbA-CVW!5n|OkBZrE z#t4qT(H4%sBi$QH_?P|;X~JJ(exW}-zoyB4)!4lw`ere-&Sm$9(7pcb-ca%{xIg3~ z)wf~4w+?gr%{u`7>Hd&&kTdt!i7^81!ie|j{*!1ieU9!wxr6WBw^ye=K-ll_!;nk& zpNKJfMelor_ir?=a>nQODBUl^_fAGre+bDT`<)}m6-F`f+bMgQhKTrntL$b+*jHZO z)Cv*V1I3pZ@Yi@;je&^xt%-ppQ(!NAbT<4SA089GXF&UKAKm} z<8R75ioXMsjB@gt;_q%AeOXOl1zdztOy8&ez?tCB_Fm&uNIv;DM_useer{GY_(RVZW6vL~sE$7l|Ilq_Zhw+b z_2;y&;7^>-ol5XWcFFBwf?Qz~(Yzh}^Ml2?OSwrFVw$%TL$mNU-V@V&2gZlhL5_=iZ;AMV_r0R`?8oQmxi0S^pZxk1&Wn^y zkW1$i+5PtPoETQGJ!RGNB|dlGeq=PhNB8V>gB<_bf4&NO3nTEQ=i@j4pOC5-(ZDCV zZpsAUQ}iM92JqSX;LuyxS4qp;tRJw$r(MrQ#Gk>f^=<&4d>yMNz(?t_(Q84jFhagn zqERczKfCVX4$N<6;A(rw_mjT-1oACBo+m*5dzl_>u7B_D z0xrUc_;RX#G`rv9XiS?HRR7col9KgBe2G5w!xHQ3$a8a9f8*ivcD0Bv-<99pq4+}2 z{|Z})_35g_g;}(|ruwa6KS8cA!Y>U}{n;9Pj8xth1i#hL*kK6#SYSNuTQXkLIeQup z{`r%M_b=s^+`#8f4B4EA@A)OI%ZHraUdj^vKOBF#XCnBAwGEaG|MXwoIR4JPIsQRo zIR1`PIQ|x08U9(xP5xnvE64wgg#YLG&tJvy|DDV5KViY{pYfCa#Gb!q;n{vA$KPf+ z$N#7j!(XExi!YZx$FJr1OMeq^5k{`R7Tp^J|0XXhV*V{Nkn6wAnCri9CfDD3I@f>5 zFRuR;70#c{=eYi!3k12sC}#X0d70~m{&;p|`P+LNQw6Sn@Hnpj z;Q^dKr_#9oZ6^tGg%SGG{x6*;VEb$PNIvcVo-2X=bUsB7|NiRIAXVtEm>fZI0{d%; z$e-zaiaDNdPW54RRp|fvNLEilt}rruv~IEZMbB4a_-kkTvHKgT|1Avo(DRkPK)%W@ z?{1J^Qkhpq`cwa#=@ybt`9GZtA^8uZI!V-*z$>~RtT%W@_k*C)PRS@>GIy- zWy*O6ihqdj2@ZIl?gu-G&mFgZohPF2`6n4RJs&)9gP^xCLO$i^%pa5AZH@2Uw;x3H zCiq?5VUX)zTR8-ySM&~byifUgH9ogTDWCc4lhK!+LJr0E?Sfok1U^pLE2@D1t+Lcv zz&|ffneLZIe5d_3!pC@A5bHw29{`}qo?!w6NuNukmSJLA6=UxHM zwQ93ZGW-{}F6Q`Gv-~r-_vcw0|G*w1hX3L?j=#Ue{w(md(mvh5^*^Y|^F-JB zahd+_Yq^G86Ml{UNM=o~hy9wEzCa z68}E+hb+MSQh$gg<~L*Fg%+4!h4LQkyoZe1kY1SI*GH~N)_*8Ig=^b>q52i|*V&H3 z{_OSvgWSYazjCOx`ic70@i8yi{nag{X0^k7nqPj5{tfD{dm~5nEA-cWMg5BU>y8R~ z3nTQm)w<34>!|)PgZN4Hhy7vTCq3V72lS`iuP-Tr&!YC&FsJ@Yb`OOTzY)kV?>m%3hnBQcJyn&eC4dtnLdB!KRusU9sE(9*y;o1$N5~^3;EQ)@llW~jEw)IWV&$vcUjB%fBFdL{}wCm ze|{-5{vU4HlKY=$<2nB~mvR2n{Zq{U?2+96W%-ZgwEx%Mm;YnAeSd4BK}TsgQaLm@ z(ww1Ex{D&wq$r|Ug(Jfe8c*}2(x6e)F+@U9N<|tBr9mk&gbXJQXqKGHv)+4M_xJPt z9Ixjec%Jjq`?cPCt+n@Fd+p)6?(4oT8Rg5tXq7>9{`tB8@{WhZ`6pPG!sj2Wl~^D4 z#%wkd=O2xJE5!LHb<6L_r$q_#jcI+L^Us_s@^;G+zwh)om05=S^Gia01mgD{9osqL z{S4WPJ5S>N{JG>T@qU9YrrW#I|2ty7_gGqk=l6V#H;Mlj)Bih~#QlH#Q~VbXidW_N zyY(s0-=4R{{N4GBINzR(FE!!$dw3d+FV-Jb#Q*y1m3MjmeosOCH8y9}$o%)LqX#cfe6K-LC?Nd~cp3gZb_Ago=#Q817_)iTP)`#*3$B<8oeSYc6 z>qBgl=bw50<_v@XWRERfk^h6zX`K}F_h1m_2b~|xrTIG@>rd&+Y(?@<_YZxt$UmJQ zw$<|dRX-q)iyS#W6<(vCuE#spaDE={!};kxp7WEwFC+HHAJLqjl?ypPJ*qfA?W#CG zw=_AwVSY`@3O$7JUEamM6XIiQ>G?g}zZz6V?Wf;I z4j;w%9xHkF0OQ--@`I+l-EzeJ|0KID(O5qd6$iWG{J;4(hb~w@H@R%k#rk==pDR5d zCEoAhi1WVx;FS|;{iNp~Hm#-glb(OD9*6al-cSBP?pKcJ7yT|F&OiQ%)m;%EouIs{ zh(png!7mXP`o2a7AbnqBFL=7YWQq7_uX7O3HNHEKHz|(uIMndlo)r#ZrPv8GdMIO?9 zqOtP0$dU6WGMe+p2iMnhKJS<;^2f8gE$7di9L}F$GtQrEC(fTg(`Y;>f2?#ke@+_9 z{mL=*5Sm@|gkukeK1Z~chdA2EvCUvA9pA11yJ+uOj2 z#s~R-GFt9ej(mMlwGHQoHwpRp{dC%9n**MFeQ{0==Lh=!kq=*AEO5m6;lt5Y#e97+ zX*^$FIBnqT3oUQCUpZo2^beek!uW>lXm<XC?;3`24Q=skuNdcDKhy7D z*nb0lf!^E=QO+1&`W@~H{Pg)2I8p9bj)>2)e%aj-A3EQP=a&YhC!a-pE-l~hh4|3_ z%RV7K^!u7J;#2za&IXDG^?vH!N%2YB+vJTxZr@0lzjeDn_|d)ZTi`dvMf995zOUyThuvuk1 z-EAk%S0|d+^7+b97tcS?|7UCYe04tu&p&iAU4M(tS7+xxIJ8HADLpT`Gw^%*Pr&U2NRHo^!w&b`A`3!ibej16rL6D zC%o}d`8VYMQ_W>tF+Q~Z`y>Bp{nteP-?ohWi1p#-ocr}+ebcGgiTtPazsdQQpC4FH zzo+5)u0$8lhh#pz-wVHwR8H*V$k%rv;{UC`&hqTR*LNyw>H5y?<5zlLt9X83?RI{C zVE)m6#s_iJ?PchIIEMEODMnnseXCGG{EwXT&O+SRZ|&v?+^wK97W{?Vrq7^vbU@Z` zu-kWjcUSo7v2#w7>m&AWc0nfmM~n0Aw{O0)*nh4&`yWR4*Wmx>hr)km-EQ_z-@g_9 zy~XuckDcS1yk`jWx3kAz-k87Z=XMp}FVpF}Q5Ey|<;?J2n7;*`nk8ZW+Vo!il-|EF z)3eH*-k*W@Zv+j&{C!~ir2jwTBA3zrZib5bXou_Z&SG6`-S(J1+F9GjI~(n${a2jR zH|?eWu@WEL<5@RqKkdKE%V2kR^=x1GvAOX_li#nw>mT^jHI?+}`bheIqGNOTDT^&! z1^=NfDk6aUcd0dk&wX&*5AmVrBnHAieTO&P+t&yAXn0I-8s#VLkBM5mKNcHeeQ0j^=~ua5Iig|sug~kl>ET$PZ+z@BoY#lB-K6!QjMs4XwWsx&)`vCt zJ`_FQ`*AF<54EOpzjDO-M$etS!8%oLx8!G9x70Lu7ieM~J3lowh}JcF?kv+5>sx7B zSv2@HH62@%9zDm_brS48FS~pd@uTP1nw$^OKKecdy+0Sf-#yDl{+#~2d^X~f^ET)Q z;^X~T{(gXafoQMF`;9ti|A1C%HOQa4gIz5VANoE;w!GbP#JH5Ey&Q&dT2rI$iE-=r zmd-b#T^YgLE+a9{FEr?Tx-4;(_cYf+6`kyEInbzd`0`aHoyVi*R?eYlm zei*vGb3y#KDX7jv|LFZB^#1`|-|5ox1LFB8+@GZDJKYPI4;ssi=sO>{zDw7aw_A?z zzvj22(*5V1M<_n@eW=1#@c)-*`#Shv{du7t{A(AV@`L~Uj?cTnf85@Nsqo+1U_li8 zm$i?6CHE^wtpD`;cml2e^nBW775aU=zuM0?0Nq@2CCL zTLHh1f48bx40*@22Lt){arGwG7wrH381|oX6~Dj7Z^$~u{zr-TWBmDhr1bpT3Rm{u zvnBa|UzNY}jZFB@vSt5Wnx4-oK3jVF(YXNkZ~JA?{G$6a7X~3d`xIM*BR)&Z0$U@w z&VI;A71yVBZLw~}beYmjM(Y;cR}jC$({nI%A4L2Q>LBUS_35qEuq&D|bS3=I|Dc-u z{)qEo?mV~2xPS7#FdgqVNc`o-s?J#oDVBJ zhIhvO6Z(GQa-0tj90;nHw_A?5zQ}w#E&E*W6B>o-?j_F_`WM|Jssmq=i4Uv zBMTj49$o1C37GhBv|58ViieBt#PczR@A72`8A z#+<2)FKijVtpHNJUl`qv0u$d2^afJ>gIT8eBKRS*nD?q?dFWnXbm8yy5_aJ7r6;*u z^C#vVk{Rc2WnL?svEM3S%67+vVpj&a%V%=hz^WLK%YCG*<90?A&qYnG=z;_@QVKBLTi%^@3Q9<_yDk=amV$Rk2xCNLg-%e>ki z$Od70!hT5Trz_yIa*9_mKYTK9Na&B{%$Hj*miaONeY~XCNwO318WQ^X74!No;IG+6 zo`S4fZqXK~w<19bj#O0PBfJT|&zc7XxK(EM5KMwMUkPAB*lP)no%3I)s zk3r0PIx^DxD`7XG-U+f%rne5GR|1#&z2owRY0R$@<873=u9E#K*|Xl3QH=hx8BeNk zd7r=7Zto(-ZRO0jc?`_TDHr{uxV}R<#lwsF5;3lnA2*q&aTofWH2IM`6Tj- zcp6`0@I_{$HbZYjNYXh*8ehTdUxI9u89EwBdD9X+1vKQyvLk$2Ujx zdq^ndCGlA~Wj@d|2-E)!NaOH^`Ql9GhpV!^r(~ae5xj2sh!Nng*&cCa+0X^3<9o#v ze9?r4<&Z~&_~kKfnIL#GB{2?^N6)|;Wv&wQlj=`}JS3F<&oA0lstlR(pab)Ni+Mur zRX};6kFW5n<9qWp%5}>}e*=HbHpqnK5k){9-xO8gh!B4#e|#2=P$^>i8zdaeF)uvV29%N8NHmvF^|~ID@D5 z>B%CLdZmTRD4&&jT{D(x0HYsIr~W{m;Lqh2d3!PriIAuNa z6P_>*w_|<)jT7{Sp&$KtnrP28<59HUK!11(Mmn#Ne9d^A=ucYc zxRcBqiuy@E4>H+n0;#{P8OuaHsYfCPvbo)+?Li^IZ2mZR5d0K}^ zj($9e)={DN^}l695kJzG;@|f^>kXIptC(f(C-u7!a$4vBD_QtZt>qftr+}_sA_ueV zb%1fzJLWUeS@w9u==YTQYnhBeYZwDnx%|KF|Cj&Kte26y9fF6)GLbY=bj-j<4WgY;H%w>>Lpw+c<&wdkl&6xFZ{h78Dh)wZbe3) zTGn&ZX4&@z%kyut>=DR#d?E9GBU#>@%ottHcGWQsfTgZfbsFQF>)PB&qf=C{cea!qrryxKi?2;-CH;4`kY!T6H?AV=0qmh9q2K<`J% z4CFJlC;KzX)BHRnevT8%zw2@NV*?=7ca9MCcVU02KTCT3Q2wK2&SFVllQH`R^E1T! zBL9YvKTnMt%=V%^XrIWRYWK&Y|I`m1lvDdP1bWZE4!gG_eJdF$KT+=OHjrhX_l&zS zUq~-x1|!{v5PCa;825H$K1q%7OfN>tYoX_-&obwInxBL4_oHM&2Kbk%$q|exEr35t z%tT(2-h9{>E5*o&r~dS+MfvBc;nMuP>jAmi{jQOy=knjkQz{SJ$@*Gi{uL{o5$g*1 z8;)}FFRxeho7!;(_Gx}|UZb4ab4aqk1DX8!aXZc&hfMYTF4ZUUk>o+F-%HB>m;r1T zF7ig?r|`ch7yL`rGzHezKFoTNd)R-17G%o*DZ>91g=Cf=h~}^V zu4gXU!@v1X;EJm^9-=%jpz{%wCyi@&0{%J&oL&ah^va@n_cXcALP-PVw*q?%gnrch zGGXtcTX>G`+S57@e`&MW-_O0-&zULg=k5W?&ja?;JxcP^19mRD1;)&)FJn2^fy+zl z*w495Y~N-(`+LxV>+PV+_4ORV_1|7D`ORT}JwC9%XXDvVWH9?t9K(LHjMz`>ME28x z@)P|??u&9ghcdE9yb~6({Fe#zf)jnpp*P3Ra|G<8;@2F9?74650hDh{S!@RWlFl`tT5x2klFn_Qk>gnIU#Tc|-GgC1fvex*ko#1DW((!M?u5a`==od_O z?#bvN@qwpVkIuou?&vAuC*$*6;aB;w9`tVos>`4kb-z@Kb8ct&U2*lQHTb{)l@riQ z8mA=2QPbl2 zUB{o;U&>bY*QNuv`}th(@^r0#P+ArZtPQ{f3%N_VL@$ zqkVlZ{L{Wq`(6}zd5)h=9m<0f?K^^xieDHAJ3WVM3gid- zzu3R@9O(X(D0eDh{RI?P*v}O_?GMqAX}{Qqa@s!z!B2nX$F}gJneo{Lvex*c3Gh=e znSQS)y*)}kI>N4RporrhrQohy?yS%J8BO7*eT&g-f3=zLm#G*9y{zU3)w#a!bY4XL zIbuDCieGaQGVPn=xV=7NU!Z;VBmC$&{DpDAe!GVIwJw15R?|KPJD-U_&wUFwg5Q`z z`zO^$`({4uX#$xENxSA!_`=oIl#C|FED`8(g7WLc;>?zi5+TUp2!4L0uwEu~9g7!se zf4e#p`m{eb0BL<*0{^rwzeIhsPM<>kTH`aqfV6IQKzuYa)$O5QFnO&S^!6x)KM{6) zujeo(w`M*ll=W7vV|#@K>@T^N{dC#I^@hiAea&f~NBw2T!PCA(*HdDjQbjrKTOqJl zP_4>Gzi40k#PW+|E?+?F8}uEW*xp0rRa88#yD@$qqMqQy70R%qeT~Ky<9%9z<)Vr3 zL;D`(0rZ!Nc|`kS9{eSZQ+Wn{#nsy)K7j!}#5$+xb)VKxFC_U9E>Q?{c#FrcF{`)xz>8}^DL(2mXlks3f6uMGA>=LF%WSj%&5 zbz=R{a|k`e?T--S9~Eya*0bQmsXe(LlQn>z`^GB5-^LWHB^;lbUa(V8eWHo_zv?Gl zkmP(Ww-)>S#*|57{r229dNli+7zIDUiI(f37ZpFV5_bKSBcG#Pv`>lgrhRKR^jG(t zC+1(lfMn7cLa~*6}v5 z&uZ?T#OQL6`PfbH6LI^|5XdgNPeq>6I;#Pm#$z-16<1TkA=7oo5ZEV;YkLj#(Yj0P zD#k(SXUKFN@*3rI-7$sj%fxy@*ByJ=UR|6&lE(cc&Os}#rq5?T_f^?X>$B`9ZVXV* z;p{8)r-kO0>nxv^^3%~3{(=){(LRT~vmFbh>!c^(H>OM%{k&vkzZiB3s!yptj8}32 z%lY+a=Ov?A;yk!9W%3BNm-S)4Q}t0E6>m3<{adDk-=nnf8u)_AR?We$?z_$j{BNc zbAjvKlgafRUe5K~iTM!~?>GfK^}j#2d)719={cMh=PZg_0?TKMSkE#J_S7#U)~9?Y z`I3?C50rcEqx<0^ev{?#%7Z<{Pmht}#_^^8ig=524aIpjc*>WJ@JIP0<`L!7Y}j=U zh@*LcJZUZFAI*Pp4xv040lUBeb(){Zvvhl)JYVR#Rh&yGpC+*%$}i40b;y)|L)k7> ztecdVSJ+=?F@AI&wM03cPsMfl9wo~wu+tj9ydTH;+Gfc8mH!muzuG!f4eifrK3A+G z5x4J1`{VrIgdgj$@$gUQ)MU25agY7|{(=3R@#OmKI>ArD~6+5T0UNyxNcBXZ#|Dqi*Qe}g!@N3$+4dfWZ zeKfw{>HFKL&s2@Z9sF@xC&2HV!E#6zmusAW9BuhZPhh(FL$0ru?w_C?Wp)zMxfkv6 zr|UMf`wgw5XwUd0ftFPxff2JV`Y}GH{cB*m^XG4#XW7n%(Pu34i)kN6`Bd6RU^j!t z0Z6|G30z707RonwVEOP&mh)&`fZlTvkBC{>ze1*dPe*yGY3G)ZDJ~9>!z!x#Lyoq5 z@sq%G(@#K(?=!|yHRw^CEm^ zkOw)&@Q*dXNEK&V?;vlb^AqCITI6k{itjbH8#7IyWtnJSSVa}xhY)&VyvZ(|{rFyF zbf)!L__vbk<2+3_p!*rfixN7YV_cq@K&Ez!yrX`&fv5J@hoSL zW_&LCbN+l@CzcP7XT8lLKO$zWoXGN2Z7#Q<>kpCdLCh}}`5Q6IN6PEj-@%7fv<`Tz zVv~r^cKyd6P`+LNiI|5}-(IQy9>UJ@v6$DS_x8W-Bz?;#biIT2l3%Vz%v-8|C}e7n zXh)>V?A0tgsY0LHT>_ctKQkuUW#Pv}Lb4-)IucKxSxeS`Mp#dH1i{$f#|l(*@_ z(ojzMJBj^!?P0%*#CWBe)GTJ+U91PGCY7R}G(X~HJF5poc5pEPZohEh6ytf?zpDov zY<$&=u{IgFsU)og7};UjG~o6N@h2J2IWW?FXTjq>81y`>fQJ;iga9{{e5C7l$g0D* ze92ni_6u=Z5=!-7p>q@Lmk(wB3f)(Pow5<5k}hylN!4ZG!Nx+m4uyPvH_Oi_u^i>c za!v$euCUvF;T&D3iS~2DZh>bF#`Jc{+J!hwerlsa?yl?Dl=cyT;7l|+g79&F6D zM)|?UoERYWe=p008jKW&w~dXJC)L|Ac3lH(Y^)pi4}K-feVQ@$TrQy>u(9!TD9bwf z5)NVP{aN;^SvFl9i6~cX7$>k(Hx=EW<95;%r~er?`F#U(g7?x_7LTrI@B@me4F`s zN11QcOp*^v<(bTP>&U!gcUfgav)q`0xxngk-){(vSNO{Ou%*D77RFhO`bo?i=mD$a zTTEnZZo$0X1z>~LwPeQ9JjN#r858<3(tb&LxiQ0BfYtHz{V$+Yzq~zE?`bY?F^TmJ z41hH)^bfMEcbUtZPi0wKoy(`*WLZZ;l%M;`^_;8MWckM~#=qCGo^C#{rp5TyED!oF z;Wy?@rFiH*fLwjZIIo^}t}n`L)QJtsb8 zoU{~Z)foPeF`^6e7K4B;Me&ar&$%~5B=M17{7bbTTG9$3T}=mWGV=(Lh?^akc*HZjJpWq#s1N&b^D zb~4b^(_9}&{mEf@vMtNpAD5zYZp=?|X0#Xzv}%k{W(+T7p5tay&~7X9dwK!o{WSID zep5Vpg8xrHx!=_OjcgYvwflgO<^47F%9$>U)@iS^@GNw^beQ!vqnarA2DV?`XT z8Yv!+e1>+upApJ?w{)W#O1pV z0yX+X8Ay7t`W-P2?-ut_dD?hYj{BnYM-_AmgEozi7l=&6Z|L#3p%C%*TT_se1#qz+8 zj0Pitu9e;mK&RNkzZeT1GB35K@&m;k<HF=JjaW5IdG{A8eOI&_m)aSzb=#j`rjyF&4xFUCIqcF%Im+ zsQ!jg-hXWq$gY*nQazPY++9R`oMQ7t|D0lT??aF9o`gTx&PBw_sj^Z_^5f6t^8P$^ zV%|-(!>O|B9m|Nv*0U->6TBJCCIfp+xVM)v^SxBw4%k^ajPg_9ehDvgxy4pSQyOpB z=X)~do|DR712sdwY!-N;-BRXF&46;hDnX_Rl6;oSCzeV$5ZGhFBN|6w?pL7Z3B_Q6 zA#XP^PtTEx^6TH2e-$M06mQsNrUR)S(N3yYv_~b#Lf$XYAL941o|zTn1ezDZUihW> z&^U>>{SMTW`a|uC5pu{EsoX-UH{T2V|LUjcH?=<;JlRQpA03CR5;Spugp`lMPKu)` zk#7l>Hy%i!h#AqS$YP7%nr(W_;`)z`2xu{o?f2990QKyneG2V3 zb_;0XJolX>mr3QKe#1-q2S|Leq$kDW*frMM-HpqGq;iLklDvz{{a4Fsw|C3dJ9bJ| zy|0CZ^2e7kk`(r4-6aDxjWTp0VgdOn*n5X;x%rKMG2Q{r4VYpF?kio z_&f=E1=^HX#J}nrAI11kg?XQPlBvBku7uRyj^L>uW0-fiAn{$9&rOhUhQ!-QdOIXL z4~e&9-hKf1Y`v4aA!}?KyA;@dih4EpLfbYbENf_T`2^9 zI5U4=;ziz2{DZ+${tRTE^G4(m^Jh^`d3g@y|H;##e}HdKe;Wc{X!Bl`Wy)L8zE@|! z7ijxDg8Zs)b|qtc81wVmvflG#LgZPI@MGH%dL(N-25M}xs0UAZx0PkHzkwouh^Kxh zK&JL~WuEd!@SIN~zeIcTe7IigO{7Qdi4bw5@!ie*Y;Evj{6)QEZMfc(0r0D_ZTt^l z`zadA;3}F7jf2SBLYtD7Y~OYcP-7c?AC>g7 z^$t#DSsQsn^|f9Cp7KUUGPRrXNMI%X{S~}_1LQX*4Ol-28=DhSeC$QuQy#pR>ThIz z&ORXV1~j=VMvEp2WDylzF?czzH8x)`KTMF6@V{CrI{cH|8zSXWFrBA;H4%*-UmGXOs*P*X^y~)7 zsweqg{EM!esJwQp4xKk#_tj-O&xGCTINBfI?2+A3xcflj>AC@YC|xh?kzIYUv^o6Q zPOh>BAMhw_8vGrD-L=hKDh) z6T$qEi$L4SFKGWYuDPq-(L!ps2lEy6tp95p<_lB7&r6(k8+>E7r7pK~_zH^4{x)52 z{RzI(ICUDATN|KW`{hsQKGh!CzEI-?*n5n;MDqsi*Rf~pDaOOS+3B64+~{X_v?ti1 z^>@^}cH7oJzUKPS`!zO6ORzOZRr+`u_gTdTW<4 zu2x~cR=*0nW;wqLztwfmxSiJTng1~bC?D6=aet0O`5lGyXz-48heVtUZ!JE~{DVJ{ zA8%W=r|U%IL1Yg67T#JU<;P!iKMwqvy`|8(F&9+?1}P5J$_Uf*)C|4BlE}Go5P?r_r>Zx$ak7^}PtZ{qoA;(7)wBN|DFs z-c|NvU&i&=xpKWWix9Vb182IQ?CpA?`H>oUZTtn{Nc-|`?CL& zmxR8NGM$%^ZweG2%qMf&w<(URjVD4cu14i1_tTNiX~s1vD_V(sq51U$_LK)%@Js82 zBgWx$!>%{X9~OD!@%8mwZjaG7ZqKl9+#VCU?nQfI(_t58kxu6-jHAaVl)E?kv5M<& z*_rFNDg^H_@<}@Q*u!S_9Je8=g8%wzFxr=pvkZ+UzlGU~}cB#6rDXZyvGc2V8UH`Q-UVm;+ z2+Q3B23zmH2RYdKP&3vqo(o)bEK!5?ciOPN8eOloH1<||O6MzJ-4^iCUk&XUJ5XFn zu7BkZj0icN2~0a0RmXnz%!f?vSO!cSN$-ClOi`x$!GiDp5_$_wI?_4}`=D~>)iS`l zuUoobl#hFw1U}f>hvEkR;WS_2C*dcQFFF=S;{`d2^1bWLvwi1Z049!{Sp|#;IZD?L z@aMUmd23_HE>#&bA%~?X4*`<>&)~h)p08)jp9MbptI}dd*(gSOE>_sJpzA1V|F~=~ z_LoBEXtd`V(srXod``Cxm-i^A?>P0s+xH5G1K z85igQ>zmzhW4&ZY#?#_=Tk=1W_2|6-qTb*HmSc;6_01lvW_jT}Mz<#KQE;NLv>|1%H%4~YwTqW}N^ literal 0 HcmV?d00001 From ec39fba68c889ea46e0e91106f891924e361a078 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 15 Feb 2019 15:27:25 -0700 Subject: [PATCH 091/180] Add local copy of netcdf-cxx-4.2 --- .../netcdf-cxx-4.2/COPYRIGHT | 42 + .../netcdf-cxx-4.2/README | 28 + .../netcdf-cxx-4.2/config.h | 93 + .../netcdf-cxx-4.2/ncvalues.cpp | 331 ++++ .../netcdf-cxx-4.2/ncvalues.h | 279 +++ .../netcdf-cxx-4.2/netcdf.cpp | 1658 +++++++++++++++++ .../netcdf-cxx-4.2/netcdf.hh | 1 + .../netcdf-cxx-4.2/netcdfcpp.h | 469 +++++ 8 files changed, 2901 insertions(+) create mode 100644 mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/COPYRIGHT create mode 100644 mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/README create mode 100644 mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/config.h create mode 100644 mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.cpp create mode 100644 mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.h create mode 100644 mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.cpp create mode 100644 mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.hh create mode 100644 mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdfcpp.h diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/COPYRIGHT b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/COPYRIGHT new file mode 100644 index 000000000..e21824767 --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/COPYRIGHT @@ -0,0 +1,42 @@ +/*! \file +The NetCDF Copyright. + +\page copyright Copyright + +Copyright 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, +2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 University +Corporation for Atmospheric Research/Unidata. + +Portions of this software were developed by the Unidata Program at the +University Corporation for Atmospheric Research. + +Access and use of this software shall impose the following obligations +and understandings on the user. The user is granted the right, without +any fee or cost, to use, copy, modify, alter, enhance and distribute +this software, and any derivative works thereof, and its supporting +documentation for any purpose whatsoever, provided that this entire +notice appears in all copies of the software, derivative works and +supporting documentation. Further, UCAR requests that the user credit +UCAR/Unidata in any publications that result from the use of this +software or in any product that includes this software, although this +is not an obligation. The names UCAR and/or Unidata, however, may not +be used in any advertising or publicity to endorse or promote any +products or commercial entity unless specific written permission is +obtained from UCAR/Unidata. The user also understands that +UCAR/Unidata is not obligated to provide the user with any support, +consulting, training or assistance of any kind with regard to the use, +operation and performance of this software nor to provide the user +with any updates, revisions, new versions or "bug fixes." + +THIS SOFTWARE IS PROVIDED BY UCAR/UNIDATA "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL UCAR/UNIDATA BE LIABLE FOR ANY SPECIAL, +INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING +FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +WITH THE ACCESS, USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + + + diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/README b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/README new file mode 100644 index 000000000..e816934e9 --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/README @@ -0,0 +1,28 @@ +netcdfcpp.h the C++ interface + +netcdf.cpp the implementation of the interface, on top of the current + C library interface + +nctst.cpp a test program for the interface that creates a netCDF file + and then dumps out its contents in ASCII form to stdout. + This example may also be helpful in understanding how the + interface is intended to be used. + +example.c example of C code needed to create a small netCDF file + +example.cpp analogous example of C++ code needed to do the same thing + +Makefile makefile for building nctst + +ncvalues.cpp interface for auxilliary classes of typed arrays needed by + netCDF interface; fairly simple + +ncvalues.cpp implementation of auxilliary classes of typed arrays needed by + netCDF interface + +README this file + + + + + diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/config.h b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/config.h new file mode 100644 index 000000000..b0f2b81ba --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/config.h @@ -0,0 +1,93 @@ +/* config.h. Generated from config.h.in by configure. */ +/* config.h.in. Generated from configure.ac by autoheader. */ + +/* if true, run extra tests which may not work yet */ +/* #undef EXTRA_TESTS */ + +/* Define to 1 if you have the header file. */ +#define HAVE_DLFCN_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_INTTYPES_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_MEMORY_H 1 + +/* Define to 1 if you have the `nccreate' function. */ +/* #undef HAVE_NCCREATE */ + +/* Define to 1 if you have the `nc_def_opaque' function. */ +/* #undef HAVE_NC_DEF_OPAQUE */ + +/* Define to 1 if you have the `nc_set_log_level' function. */ +/* #undef HAVE_NC_SET_LOG_LEVEL */ + +/* Define to 1 if you have the `nc_use_parallel_enabled' function. */ +/* #undef HAVE_NC_USE_PARALLEL_ENABLED */ + +/* Define to 1 if you have the header file. */ +#define HAVE_NETCDF_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_STDINT_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_STDLIB_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_STRINGS_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_STRING_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_SYS_STAT_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_SYS_TYPES_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_UNISTD_H 1 + +/* do large file tests */ +/* #undef LARGE_FILE_TESTS */ + +/* Define to the sub-directory in which libtool stores uninstalled libraries. + */ +#define LT_OBJDIR ".libs/" + +/* Name of package */ +#define PACKAGE "netcdf-cxx" + +/* Define to the address where bug reports for this package should be sent. */ +#define PACKAGE_BUGREPORT "support-netcdf@unidata.ucar.edu" + +/* Define to the full name of this package. */ +#define PACKAGE_NAME "netCDF-cxx" + +/* Define to the full name and version of this package. */ +#define PACKAGE_STRING "netCDF-cxx 4.2" + +/* Define to the one symbol short name of this package. */ +#define PACKAGE_TARNAME "netcdf-cxx" + +/* Define to the home page for this package. */ +#define PACKAGE_URL "" + +/* Define to the version of this package. */ +#define PACKAGE_VERSION "4.2" + +/* Define to 1 if you have the ANSI C header files. */ +#define STDC_HEADERS 1 + +/* Place to put very large netCDF test files. */ +#define TEMP_LARGE "." + +/* Version number of package */ +#define VERSION "4.2" + +/* Number of bits in a file offset, on hosts where this is settable. */ +/* #undef _FILE_OFFSET_BITS */ + +/* Define for large files, on AIX-style hosts. */ +/* #undef _LARGE_FILES */ diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.cpp b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.cpp new file mode 100644 index 000000000..30c642965 --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.cpp @@ -0,0 +1,331 @@ +/********************************************************************* + * Copyright 1992, University Corporation for Atmospheric Research + * See netcdf/README file for copying and redistribution conditions. + * + * Purpose: implementation of classes of typed arrays for netCDF + * + * $Header: /upc/share/CVS/netcdf-3/cxx/ncvalues.cpp,v 1.12 2008/03/05 16:45:32 russ Exp $ + *********************************************************************/ + +#include "config.h" +#include +#include +#include + +#include "ncvalues.h" + +NcValues::NcValues( void ) : the_type(ncNoType), the_number(0) +{} + +NcValues::NcValues(NcType type, long num) + : the_type(type), the_number(num) +{} + +NcValues::~NcValues( void ) +{} + +long NcValues::num( void ) +{ + return the_number; +} + +std::ostream& operator<< (std::ostream& os, const NcValues& vals) +{ + return vals.print(os); +} + +implement(NcValues,ncbyte) +implement(NcValues,char) +implement(NcValues,short) +implement(NcValues,int) +implement(NcValues,nclong) +implement(NcValues,long) +implement(NcValues,float) +implement(NcValues,double) + +Ncbytes_for_one_implement(ncbyte) +Ncbytes_for_one_implement(char) +Ncbytes_for_one_implement(short) +Ncbytes_for_one_implement(int) +Ncbytes_for_one_implement(nclong) +Ncbytes_for_one_implement(long) +Ncbytes_for_one_implement(float) +Ncbytes_for_one_implement(double) + +as_ncbyte_implement(short) +as_ncbyte_implement(int) +as_ncbyte_implement(nclong) +as_ncbyte_implement(long) +as_ncbyte_implement(float) +as_ncbyte_implement(double) + +inline ncbyte NcValues_char::as_ncbyte( long n ) const +{ + return the_values[n]; +} + +inline ncbyte NcValues_ncbyte::as_ncbyte( long n ) const +{ + return the_values[n]; +} + +as_char_implement(short) +as_char_implement(int) +as_char_implement(nclong) +as_char_implement(long) +as_char_implement(float) +as_char_implement(double) + +inline char NcValues_ncbyte::as_char( long n ) const +{ + return the_values[n] > CHAR_MAX ? ncBad_char : (char) the_values[n]; +} + +inline char NcValues_char::as_char( long n ) const +{ + return the_values[n]; +} + +as_short_implement(int) +as_short_implement(nclong) +as_short_implement(long) +as_short_implement(float) +as_short_implement(double) + +inline short NcValues_ncbyte::as_short( long n ) const +{ + return the_values[n]; +} + +inline short NcValues_char::as_short( long n ) const +{ + return the_values[n]; +} + +inline short NcValues_short::as_short( long n ) const +{ + return the_values[n]; +} + + +as_int_implement(float) +as_int_implement(double) + +inline int NcValues_ncbyte::as_int( long n ) const +{ + return the_values[n]; +} + +inline int NcValues_char::as_int( long n ) const +{ + return the_values[n]; +} + +inline int NcValues_short::as_int( long n ) const +{ + return the_values[n]; +} + +inline int NcValues_int::as_int( long n ) const +{ + return the_values[n]; +} + +inline int NcValues_nclong::as_int( long n ) const +{ + return the_values[n]; +} + +inline int NcValues_long::as_int( long n ) const +{ + return the_values[n]; +} + +as_nclong_implement(float) +as_nclong_implement(double) + +inline nclong NcValues_ncbyte::as_nclong( long n ) const +{ + return the_values[n]; +} + +inline nclong NcValues_char::as_nclong( long n ) const +{ + return the_values[n]; +} + +inline nclong NcValues_short::as_nclong( long n ) const +{ + return the_values[n]; +} + +inline nclong NcValues_int::as_nclong( long n ) const +{ + return the_values[n]; +} + +inline nclong NcValues_nclong::as_nclong( long n ) const +{ + return the_values[n]; +} + +inline nclong NcValues_long::as_nclong( long n ) const +{ + return the_values[n]; +} + +as_long_implement(float) +as_long_implement(double) + +inline long NcValues_ncbyte::as_long( long n ) const +{ + return the_values[n]; +} + +inline long NcValues_char::as_long( long n ) const +{ + return the_values[n]; +} + +inline long NcValues_short::as_long( long n ) const +{ + return the_values[n]; +} + +inline long NcValues_int::as_long( long n ) const +{ + return the_values[n]; +} + +inline long NcValues_nclong::as_long( long n ) const +{ + return the_values[n]; +} + +inline long NcValues_long::as_long( long n ) const +{ + return the_values[n]; +} + +as_float_implement(ncbyte) +as_float_implement(char) +as_float_implement(short) +as_float_implement(int) +as_float_implement(nclong) +as_float_implement(long) +as_float_implement(float) +as_float_implement(double) + +as_double_implement(ncbyte) +as_double_implement(char) +as_double_implement(short) +as_double_implement(int) +as_double_implement(nclong) +as_double_implement(long) +as_double_implement(float) +as_double_implement(double) + +as_string_implement(short) +as_string_implement(int) +as_string_implement(nclong) +as_string_implement(long) +as_string_implement(float) +as_string_implement(double) + +inline char* NcValues_ncbyte::as_string( long n ) const +{ + char* s = new char[the_number + 1]; + s[the_number] = '\0'; + strncpy(s, (const char*)the_values + n, (int)the_number); + return s; +} + +inline char* NcValues_char::as_string( long n ) const +{ + char* s = new char[the_number + 1]; + s[the_number] = '\0'; + strncpy(s, (const char*)the_values + n, (int)the_number); + return s; +} + +std::ostream& NcValues_short::print(std::ostream& os) const +{ + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1] ; + return os; +} + +std::ostream& NcValues_int::print(std::ostream& os) const +{ + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1] ; + return os; +} + +std::ostream& NcValues_nclong::print(std::ostream& os) const +{ + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1] ; + return os; +} + +std::ostream& NcValues_long::print(std::ostream& os) const +{ + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1] ; + return os; +} + +std::ostream& NcValues_ncbyte::print(std::ostream& os) const +{ + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1] ; + return os; +} + +std::ostream& NcValues_char::print(std::ostream& os) const +{ + os << '"'; + long len = the_number; + while (the_values[--len] == '\0') // don't output trailing null bytes + ; + for(int i = 0; i <= len; i++) + os << the_values[i] ; + os << '"'; + + return os; +} + +std::ostream& NcValues_float::print(std::ostream& os) const +{ + std::streamsize save=os.precision(); + os.precision(7); + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1] ; + os.precision(save); + return os; +} + +std::ostream& NcValues_double::print(std::ostream& os) const +{ + std::streamsize save=os.precision(); + os.precision(15); + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1]; + os.precision(save); + return os; +} diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.h b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.h new file mode 100644 index 000000000..e7655e4bd --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.h @@ -0,0 +1,279 @@ +/********************************************************************* + * Copyright 1992, University Corporation for Atmospheric Research + * See netcdf/README file for copying and redistribution conditions. + * + * Purpose: interface for classes of typed arrays for netCDF + * + * $Header: /upc/share/CVS/netcdf-3/cxx/ncvalues.h,v 1.7 2006/07/26 21:12:06 russ Exp $ + *********************************************************************/ + +#ifndef Ncvalues_def +#define Ncvalues_def + +#include +#include +#include +#include "netcdf.h" + +// Documentation warned this might change and now it has, for +// consistency with C interface +typedef signed char ncbyte; + +#define NC_UNSPECIFIED ((nc_type)0) + +// C++ interface dates from before netcdf-3, still uses some netcdf-2 names +#ifdef NO_NETCDF_2 +#define NC_LONG NC_INT +#define FILL_LONG NC_FILL_INT +typedef int nclong; +#define NC_FATAL 1 +#define NC_VERBOSE 2 +#endif + +enum NcType +{ + ncNoType = NC_UNSPECIFIED, + ncByte = NC_BYTE, + ncChar = NC_CHAR, + ncShort = NC_SHORT, + ncInt = NC_INT, + ncLong = NC_LONG, // deprecated, someday want to use for 64-bit ints + ncFloat = NC_FLOAT, + ncDouble = NC_DOUBLE +}; + +#define ncBad_ncbyte ncBad_byte +static const ncbyte ncBad_byte = NC_FILL_BYTE; +static const char ncBad_char = NC_FILL_CHAR; +static const short ncBad_short = NC_FILL_SHORT; +static const nclong ncBad_nclong = FILL_LONG; // deprecated +static const int ncBad_int = NC_FILL_INT; +static const long ncBad_long = FILL_LONG; // deprecated +static const float ncBad_float = NC_FILL_FLOAT; +static const double ncBad_double = NC_FILL_DOUBLE; + +// macros to glue tokens together to form new names (used to be in generic.h) +#define name2(a,b) a ## b +#define declare(clas,t) name2(clas,declare)(t) +#define implement(clas,t) name2(clas,implement)(t) +// This is the same as the name2 macro, but we need to define our own +// version since rescanning something generated with the name2 macro +// won't necessarily cause name2 to be expanded again. +#define makename2(z, y) makename2_x(z, y) +#define makename2_x(z, y) z##y + +#define NcVal(TYPE) makename2(NcValues_,TYPE) + +#define NcValuesdeclare(TYPE) \ +class NcVal(TYPE) : public NcValues \ +{ \ + public: \ + NcVal(TYPE)( void ); \ + NcVal(TYPE)(long num); \ + NcVal(TYPE)(long num, const TYPE* vals); \ + NcVal(TYPE)(const NcVal(TYPE)&); \ + virtual NcVal(TYPE)& operator=(const NcVal(TYPE)&); \ + virtual ~NcVal(TYPE)( void ); \ + virtual void* base( void ) const; \ + virtual int bytes_for_one( void ) const; \ + virtual ncbyte as_ncbyte( long n ) const; \ + virtual char as_char( long n ) const; \ + virtual short as_short( long n ) const; \ + virtual int as_int( long n ) const; \ + virtual int as_nclong( long n ) const; \ + virtual long as_long( long n ) const; \ + virtual float as_float( long n ) const; \ + virtual double as_double( long n ) const; \ + virtual char* as_string( long n ) const; \ + virtual int invalid( void ) const; \ + private: \ + TYPE* the_values; \ + std::ostream& print(std::ostream&) const; \ +}; + +#define NcTypeEnum(TYPE) makename2(_nc__,TYPE) +#define _nc__ncbyte ncByte +#define _nc__char ncChar +#define _nc__short ncShort +#define _nc__int ncInt +#define _nc__nclong ncLong +#define _nc__long ncLong +#define _nc__float ncFloat +#define _nc__double ncDouble +#define NcValuesimplement(TYPE) \ +NcVal(TYPE)::NcVal(TYPE)( void ) \ + : NcValues(NcTypeEnum(TYPE), 0), the_values(0) \ +{} \ + \ +NcVal(TYPE)::NcVal(TYPE)(long num, const TYPE* vals) \ + : NcValues(NcTypeEnum(TYPE), num) \ +{ \ + the_values = new TYPE[num]; \ + for(int i = 0; i < num; i++) \ + the_values[i] = vals[i]; \ +} \ + \ +NcVal(TYPE)::NcVal(TYPE)(long num) \ + : NcValues(NcTypeEnum(TYPE), num), the_values(new TYPE[num]) \ +{} \ + \ +NcVal(TYPE)::NcVal(TYPE)(const NcVal(TYPE)& v) : \ + NcValues(v) \ +{ \ + delete[] the_values; \ + the_values = new TYPE[v.the_number]; \ + for(int i = 0; i < v.the_number; i++) \ + the_values[i] = v.the_values[i]; \ +} \ + \ +NcVal(TYPE)& NcVal(TYPE)::operator=(const NcVal(TYPE)& v) \ +{ \ + if ( &v != this) { \ + NcValues::operator=(v); \ + delete[] the_values; \ + the_values = new TYPE[v.the_number]; \ + for(int i = 0; i < v.the_number; i++) \ + the_values[i] = v.the_values[i]; \ + } \ + return *this; \ +} \ + \ +void* NcVal(TYPE)::base( void ) const \ +{ \ + return the_values; \ +} \ + \ +NcVal(TYPE)::~NcVal(TYPE)( void ) \ +{ \ + delete[] the_values; \ +} \ + \ +int NcVal(TYPE)::invalid( void ) const \ +{ \ + for(int i=0;i UCHAR_MAX) \ + return ncBad_byte; \ + return (ncbyte) the_values[n]; \ +} + +#define as_char_implement(TYPE) \ +char NcVal(TYPE)::as_char( long n ) const \ +{ \ + if (the_values[n] < CHAR_MIN || the_values[n] > CHAR_MAX) \ + return ncBad_char; \ + return (char) the_values[n]; \ +} + +#define as_short_implement(TYPE) \ +short NcVal(TYPE)::as_short( long n ) const \ +{ \ + if (the_values[n] < SHRT_MIN || the_values[n] > SHRT_MAX) \ + return ncBad_short; \ + return (short) the_values[n]; \ +} + +#define NCINT_MIN INT_MIN +#define NCINT_MAX INT_MAX +#define as_int_implement(TYPE) \ +int NcVal(TYPE)::as_int( long n ) const \ +{ \ + if (the_values[n] < NCINT_MIN || the_values[n] > NCINT_MAX) \ + return ncBad_int; \ + return (int) the_values[n]; \ +} + +#define NCLONG_MIN INT_MIN +#define NCLONG_MAX INT_MAX +#define as_nclong_implement(TYPE) \ +nclong NcVal(TYPE)::as_nclong( long n ) const \ +{ \ + if (the_values[n] < NCLONG_MIN || the_values[n] > NCLONG_MAX) \ + return ncBad_nclong; \ + return (nclong) the_values[n]; \ +} + +#define as_long_implement(TYPE) \ +long NcVal(TYPE)::as_long( long n ) const \ +{ \ + if (the_values[n] < LONG_MIN || the_values[n] > LONG_MAX) \ + return ncBad_long; \ + return (long) the_values[n]; \ +} + +#define as_float_implement(TYPE) \ +inline float NcVal(TYPE)::as_float( long n ) const \ +{ \ + return (float) the_values[n]; \ +} + +#define as_double_implement(TYPE) \ +inline double NcVal(TYPE)::as_double( long n ) const \ +{ \ + return (double) the_values[n]; \ +} + +#define as_string_implement(TYPE) \ +char* NcVal(TYPE)::as_string( long n ) const \ +{ \ + char* s = new char[32]; \ + std::ostringstream ostr; \ + ostr << the_values[n]; \ + ostr.str().copy(s, std::string::npos); \ + s[ostr.str().length()] = 0; \ + return s; \ +} + +class NcValues // ABC for value blocks +{ + public: + NcValues( void ); + NcValues(NcType, long); + virtual ~NcValues( void ); + virtual long num( void ); + virtual std::ostream& print(std::ostream&) const = 0; + virtual void* base( void ) const = 0; + virtual int bytes_for_one( void ) const = 0; + + // The following member functions provide conversions from the value + // type to a desired basic type. If the value is out of range, the + // default "fill-value" for the appropriate type is returned. + virtual ncbyte as_ncbyte( long n ) const = 0; // nth value as a byte + virtual char as_char( long n ) const = 0; // nth value as char + virtual short as_short( long n ) const = 0; // nth value as short + virtual int as_int( long n ) const = 0; // nth value as int + virtual int as_nclong( long n ) const = 0; // nth value as nclong + virtual long as_long( long n ) const = 0; // nth value as long + virtual float as_float( long n ) const = 0; // nth value as floating-point + virtual double as_double( long n ) const = 0; // nth value as double + virtual char* as_string( long n ) const = 0; // value as string + + protected: + NcType the_type; + long the_number; + friend std::ostream& operator<< (std::ostream&, const NcValues&); +}; + +declare(NcValues,ncbyte) +declare(NcValues,char) +declare(NcValues,short) +declare(NcValues,int) +declare(NcValues,nclong) +declare(NcValues,long) +declare(NcValues,float) +declare(NcValues,double) + +#endif diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.cpp b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.cpp new file mode 100644 index 000000000..6cb0e748b --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.cpp @@ -0,0 +1,1658 @@ +/********************************************************************* + * Copyright 1992, University Corporation for Atmospheric Research + * See netcdf/README file for copying and redistribution conditions. + * + * Purpose: Implements class interface for netCDF over C interface + * + * $Header: /upc/share/CVS/netcdf-3/cxx/netcdf.cpp,v 1.18 2009/03/10 15:20:54 russ Exp $ + *********************************************************************/ + +#include +#include +#include +#include +#include "netcdfcpp.h" + +#ifndef TRUE +#define TRUE 1 +#define FALSE 0 +#endif + +static const int ncGlobal = NC_GLOBAL; // psuedo-variable for global attributes + +static const int ncBad = -1; // failure return for netCDF C interface + +NcFile::~NcFile( void ) +{ + (void) close(); +} + +NcBool NcFile::is_valid( void ) const +{ + return the_id != ncBad; +} + +int NcFile::num_dims( void ) const +{ + int num = 0; + if (is_valid()) + NcError::set_err( + nc_inq_ndims(the_id, &num) + ); + return num; +} + +int NcFile::num_vars( void ) const +{ + int num = 0; + if (is_valid()) + NcError::set_err( + nc_inq_nvars(the_id, &num) + ); + return num; +} + +int NcFile::num_atts( void ) const +{ + int num = 0; + if (is_valid()) + NcError::set_err( + nc_inq_natts(the_id, &num) + ); + return num; +} + +NcDim* NcFile::get_dim( NcToken name ) const +{ + int dimid; + if(NcError::set_err( + nc_inq_dimid(the_id, name, &dimid) + ) != NC_NOERR) + return 0; + return get_dim(dimid); +} + +NcVar* NcFile::get_var( NcToken name ) const +{ + int varid; + if(NcError::set_err( + nc_inq_varid(the_id, name, &varid) + ) != NC_NOERR) + return 0; + return get_var(varid); +} + +NcAtt* NcFile::get_att( NcToken aname ) const +{ + return is_valid() ? globalv->get_att(aname) : 0; +} + +NcDim* NcFile::get_dim( int i ) const +{ + if (! is_valid() || i < 0 || i >= num_dims()) + return 0; + return dimensions[i]; +} + +NcVar* NcFile::get_var( int i ) const +{ + if (! is_valid() || i < 0 || i >= num_vars()) + return 0; + return variables[i]; +} + +NcAtt* NcFile::get_att( int n ) const +{ + return is_valid() ? globalv->get_att(n) : 0; +} + +NcDim* NcFile::rec_dim( ) const +{ + if (! is_valid()) + return 0; + int recdim; + if(NcError::set_err( + nc_inq_unlimdim(the_id, &recdim) + ) != NC_NOERR) + return 0; + return get_dim(recdim); +} + +NcDim* NcFile::add_dim(NcToken name, long size) +{ + if (!is_valid() || !define_mode()) + return 0; + int n = num_dims(); + NcDim* dimp = new NcDim(this, name, size); + dimensions[n] = dimp; // for garbage collection on close() + return dimp; +} + +NcDim* NcFile::add_dim(NcToken name) +{ + return add_dim(name, NC_UNLIMITED); +} + +// To create scalar, 1-dimensional, ..., 5-dimensional variables, just supply +// as many dimension arguments as necessary + +NcVar* NcFile::add_var(NcToken name, NcType type, // scalar to 5D var + const NcDim* dim0, + const NcDim* dim1, + const NcDim* dim2, + const NcDim* dim3, + const NcDim* dim4) +{ + if (!is_valid() || !define_mode()) + return 0; + int dims[5]; + int ndims = 0; + if (dim0) { + ndims++; + dims[0] = dim0->id(); + if (dim1) { + ndims++; + dims[1] = dim1->id(); + if (dim2) { + ndims++; + dims[2] = dim2->id(); + if (dim3) { + ndims++; + dims[3] = dim3->id(); + if (dim4) { + ndims++; + dims[4] = dim4->id(); + } + } + } + } + } + int n = num_vars(); + int varid; + if(NcError::set_err( + nc_def_var(the_id, name, (nc_type) type, ndims, dims, &varid) + ) != NC_NOERR) + return 0; + NcVar* varp = + new NcVar(this, varid); + variables[n] = varp; + return varp; +} + +// For variables with more than 5 dimensions, use n-dimensional interface +// with vector of dimensions. + +NcVar* NcFile::add_var(NcToken name, NcType type, int ndims, const NcDim** dims) +{ + if (!is_valid() || !define_mode()) + return 0; + int* dimids = new int[ndims]; + for (int i=0; i < ndims; i++) + dimids[i] = dims[i]->id(); + int n = num_vars(); + int varid; + if(NcError::set_err( + nc_def_var(the_id, name, (nc_type) type, ndims, dimids, &varid) + ) != NC_NOERR) + return 0; + NcVar* varp = + new NcVar(this, varid); + variables[n] = varp; + delete [] dimids; + return varp; +} + +#define NcFile_add_scalar_att(TYPE) \ +NcBool NcFile::add_att(NcToken aname, TYPE val) \ +{ \ + return globalv->add_att(aname, val); \ +} + +NcFile_add_scalar_att(char) +NcFile_add_scalar_att(ncbyte) +NcFile_add_scalar_att(short) +NcFile_add_scalar_att(int) +NcFile_add_scalar_att(long) +NcFile_add_scalar_att(float) +NcFile_add_scalar_att(double) +NcFile_add_scalar_att(const char*) + +#define NcFile_add_vector_att(TYPE) \ +NcBool NcFile::add_att(NcToken aname, int n, const TYPE* val) \ +{ \ + return globalv->add_att(aname, n, val); \ +} + +NcFile_add_vector_att(char) +NcFile_add_vector_att(ncbyte) +NcFile_add_vector_att(short) +NcFile_add_vector_att(int) +NcFile_add_vector_att(long) +NcFile_add_vector_att(float) +NcFile_add_vector_att(double) + +NcBool NcFile::set_fill( FillMode a_mode ) +{ + int prev_mode; + if (NcError::set_err( + nc_set_fill(the_id, a_mode, &prev_mode) + ) == NC_NOERR) { + the_fill_mode = a_mode; + return TRUE; + } + return FALSE; +} + +NcFile::FillMode NcFile::get_fill( void ) const +{ + return the_fill_mode; +} + +NcFile::FileFormat NcFile::get_format( void ) const +{ + int the_format; + NcError::set_err( + nc_inq_format(the_id, &the_format) + ); + switch (the_format) { + case NC_FORMAT_CLASSIC: + return Classic; + case NC_FORMAT_64BIT: + return Offset64Bits; + case NC_FORMAT_NETCDF4: + return Netcdf4; + case NC_FORMAT_NETCDF4_CLASSIC: + return Netcdf4Classic; + default: + return BadFormat; + } +} + +NcBool NcFile::sync( void ) +{ + if (!data_mode()) + return 0; + if (NcError::set_err( + nc_sync(the_id) + ) != NC_NOERR) + return 0; + int i; + for (i = 0; i < num_dims(); i++) { + if (dimensions[i]->is_valid()) { + dimensions[i]->sync(); + } else { // someone else added a new dimension + dimensions[i] = new NcDim(this,i); + } + } + for (i = 0; i < num_vars(); i++) { + if (variables[i]->is_valid()) { + variables[i]->sync(); + } else { // someone else added a new variable + variables[i] = new NcVar(this,i); + } + } + return 1; +} + +NcBool NcFile::close( void ) +{ + int i; + + if (the_id == ncBad) + return 0; + for (i = 0; i < num_dims(); i++) + delete dimensions[i]; + for (i = 0; i < num_vars(); i++) + delete variables[i]; + delete [] dimensions; + delete [] variables; + delete globalv; + int old_id = the_id; + the_id = ncBad; + return NcError::set_err( + nc_close(old_id) + ) == NC_NOERR; +} + +NcBool NcFile::abort( void ) +{ + return NcError::set_err( + nc_abort(the_id) + ) == NC_NOERR; +} + +NcBool NcFile::define_mode( void ) +{ + if (! is_valid()) + return FALSE; + if (in_define_mode) + return TRUE; + if (NcError::set_err( + nc_redef(the_id) + ) != NC_NOERR) + return FALSE; + in_define_mode = 1; + return TRUE; +} + +NcBool NcFile::data_mode( void ) +{ + if (! is_valid()) + return FALSE; + if (! in_define_mode) + return TRUE; + if (NcError::set_err( + nc_enddef(the_id) + ) != NC_NOERR) + return FALSE; + in_define_mode = 0; + return TRUE; +} + +int NcFile::id( void ) const +{ + return the_id; +} + +NcFile::NcFile( const char* path, FileMode fmode, + size_t* bufrsizeptr, size_t initialsize, FileFormat fformat ) +{ + NcError err(NcError::silent_nonfatal); // constructor must not fail + + int mode = NC_NOWRITE; + the_fill_mode = Fill; + int status; + + // If the user wants a 64-bit offset format, set that flag. + if (fformat == Offset64Bits) + mode |= NC_64BIT_OFFSET; +#ifdef USE_NETCDF4 + else if (fformat == Netcdf4) + mode |= NC_NETCDF4; + else if (fformat == Netcdf4Classic) + mode |= NC_NETCDF4|NC_CLASSIC_MODEL; +#endif + + switch (fmode) { + case Write: + mode |= NC_WRITE; + /*FALLTHRU*/ + case ReadOnly: + // use netcdf-3 interface to permit specifying tuning parameter + status = NcError::set_err( + nc__open(path, mode, bufrsizeptr, &the_id) + ); + if(status != NC_NOERR) + { + NcError::set_err(status); + the_id = -1; + } + in_define_mode = 0; + break; + case New: + mode |= NC_NOCLOBBER; + /*FALLTHRU*/ + case Replace: + // use netcdf-3 interface to permit specifying tuning parameters + status = NcError::set_err( + nc__create(path, mode, initialsize, + bufrsizeptr, &the_id) + ); + if(status != NC_NOERR) + { + NcError::set_err(status); + the_id = -1; + } + in_define_mode = 1; + break; + default: + the_id = ncBad; + in_define_mode = 0; + break; + } + if (is_valid()) { + dimensions = new NcDim*[NC_MAX_DIMS]; + variables = new NcVar*[NC_MAX_VARS]; + int i; + for (i = 0; i < num_dims(); i++) + dimensions[i] = new NcDim(this, i); + for (i = 0; i < num_vars(); i++) + variables[i] = new NcVar(this, i); + globalv = new NcVar(this, ncGlobal); + } else { + dimensions = 0; + variables = 0; + globalv = 0; + } +} + +NcToken NcDim::name( void ) const +{ + return the_name; +} + +long NcDim::size( void ) const +{ + size_t sz = 0; + if (the_file) + NcError::set_err( + nc_inq_dimlen(the_file->id(), the_id, &sz) + ); + return sz; +} + +NcBool NcDim::is_valid( void ) const +{ + return the_file->is_valid() && the_id != ncBad; +} + +NcBool NcDim::is_unlimited( void ) const +{ + if (!the_file) + return FALSE; + int recdim; + NcError::set_err( + nc_inq_unlimdim(the_file->id(), &recdim) + ); + return the_id == recdim; +} + +NcBool NcDim::rename(NcToken newname) +{ + if (strlen(newname) > strlen(the_name)) { + if (! the_file->define_mode()) + return FALSE; + } + NcBool ret = NcError::set_err( + nc_rename_dim(the_file->id(), the_id, newname) + ) == NC_NOERR; + if (ret) { + delete [] the_name; + the_name = new char[1 + strlen(newname)]; + strcpy(the_name, newname); + } + return ret; +} + +int NcDim::id( void ) const +{ + return the_id; +} + +NcBool NcDim::sync(void) +{ + char nam[NC_MAX_NAME]; + if (the_name) { + delete [] the_name; + } + if (the_file && NcError::set_err( + nc_inq_dimname(the_file->id(), the_id, nam) + ) == NC_NOERR) { + the_name = new char[strlen(nam) + 1]; + strcpy(the_name, nam); + return TRUE; + } + the_name = 0; + return FALSE; +} + +NcDim::NcDim(NcFile* nc, int id) + : the_file(nc), the_id(id) +{ + char nam[NC_MAX_NAME]; + if (the_file && NcError::set_err( + nc_inq_dimname(the_file->id(), the_id, nam) + ) == NC_NOERR) { + the_name = new char[strlen(nam) + 1]; + strcpy(the_name, nam); + } else { + the_name = 0; + } +} + +NcDim::NcDim(NcFile* nc, NcToken name, long sz) + : the_file(nc) +{ + size_t dimlen = sz; + if(NcError::set_err( + nc_def_dim(the_file->id(), name, dimlen, &the_id) + ) == NC_NOERR) { + the_name = new char[strlen(name) + 1]; + strcpy(the_name, name); + } else { + the_name = 0; + } +} + +NcDim::~NcDim( void ) +{ + delete [] the_name; +} + +#define Nc_as(TYPE) name2(as_,TYPE) +#define NcTypedComponent_as(TYPE) \ +TYPE NcTypedComponent::Nc_as(TYPE)( long n ) const \ +{ \ + NcValues* tmp = values(); \ + TYPE rval = tmp->Nc_as(TYPE)(n); \ + delete tmp; \ + return rval; \ +} +NcTypedComponent_as(ncbyte) +NcTypedComponent_as(char) +NcTypedComponent_as(short) +NcTypedComponent_as(int) +NcTypedComponent_as(nclong) +NcTypedComponent_as(long) +NcTypedComponent_as(float) +NcTypedComponent_as(double) + +char* NcTypedComponent::as_string( long n ) const +{ + NcValues* tmp = values(); + char* rval = tmp->as_string(n); + delete tmp; + return rval; +} + +NcTypedComponent::NcTypedComponent ( NcFile* nc ) + : the_file(nc) +{} + +NcValues* NcTypedComponent::get_space( long numVals ) const +{ + NcValues* valp; + if (numVals < 1) + numVals = num_vals(); + switch (type()) { + case ncFloat: + valp = new NcValues_float(numVals); + break; + case ncDouble: + valp = new NcValues_double(numVals); + break; + case ncInt: + valp = new NcValues_int(numVals); + break; + case ncShort: + valp = new NcValues_short(numVals); + break; + case ncByte: + case ncChar: + valp = new NcValues_char(numVals); + break; + case ncNoType: + default: + valp = 0; + } + return valp; +} + +NcVar::~NcVar( void ) +{ + delete[] the_cur; + delete[] cur_rec; + delete[] the_name; +} + +NcToken NcVar::name( void ) const +{ + return the_name; +} + +NcType NcVar::type( void ) const +{ + nc_type typ; + NcError::set_err( + nc_inq_vartype(the_file->id(), the_id, &typ) + ); + return (NcType) typ; +} + +NcBool NcVar::is_valid( void ) const +{ + return the_file->is_valid() && the_id != ncBad; +} + +int NcVar::num_dims( void ) const +{ + int ndim; + NcError::set_err( + nc_inq_varndims(the_file->id(), the_id, &ndim) + ); + return ndim; +} + +// The i-th dimension for this variable +NcDim* NcVar::get_dim( int i ) const +{ + int ndim; + int dims[NC_MAX_DIMS]; + if(NcError::set_err( + nc_inq_var(the_file->id(), the_id, 0, 0, &ndim, dims, 0) + ) != NC_NOERR || + i < 0 || i >= ndim) + return 0; + return the_file->get_dim(dims[i]); +} + +long* NcVar::edges( void ) const // edge lengths (dimension sizes) +{ + long* evec = new long[num_dims()]; + for(int i=0; i < num_dims(); i++) + evec[i] = get_dim(i)->size(); + return evec; +} + +int NcVar::num_atts( void ) const // handles variable and global atts +{ + int natt = 0; + if (the_file->is_valid()) + if (the_id == ncGlobal) + natt = the_file->num_atts(); + else + NcError::set_err( + nc_inq_varnatts(the_file->id(), the_id, &natt) + ); + return natt; +} + +NcAtt* NcVar::get_att( NcToken aname ) const +{ + NcAtt* att = new NcAtt(the_file, this, aname); + if (! att->is_valid()) { + delete att; + return 0; + } + return att; +} + +NcAtt* NcVar::get_att( int n ) const +{ + if (n < 0 || n >= num_atts()) + return 0; + NcToken aname = attname(n); + NcAtt* ap = get_att(aname); + delete [] (char*)aname; + return ap; +} + +long NcVar::num_vals( void ) const +{ + long prod = 1; + for (int d = 0; d < num_dims(); d++) + prod *= get_dim(d)->size(); + return prod; +} + +NcValues* NcVar::values( void ) const +{ + int ndims = num_dims(); + size_t crnr[NC_MAX_DIMS]; + size_t edgs[NC_MAX_DIMS]; + for (int i = 0; i < ndims; i++) { + crnr[i] = 0; + edgs[i] = get_dim(i)->size(); + } + NcValues* valp = get_space(); + int status; + switch (type()) { + case ncFloat: + status = NcError::set_err( + nc_get_vara_float(the_file->id(), the_id, crnr, edgs, + (float *)valp->base()) + ); + break; + case ncDouble: + status = NcError::set_err( + nc_get_vara_double(the_file->id(), the_id, crnr, edgs, + (double *)valp->base()) + ); + break; + case ncInt: + status = NcError::set_err( + nc_get_vara_int(the_file->id(), the_id, crnr, edgs, + (int *)valp->base()) + ); + break; + case ncShort: + status = NcError::set_err( + nc_get_vara_short(the_file->id(), the_id, crnr, edgs, + (short *)valp->base()) + ); + break; + case ncByte: + status = NcError::set_err( + nc_get_vara_schar(the_file->id(), the_id, crnr, edgs, + (signed char *)valp->base()) + ); + break; + case ncChar: + status = NcError::set_err( + nc_get_vara_text(the_file->id(), the_id, crnr, edgs, + (char *)valp->base()) + ); + break; + case ncNoType: + default: + return 0; + } + if (status != NC_NOERR) + return 0; + return valp; +} + +int NcVar::dim_to_index(NcDim *rdim) +{ + for (int i=0; i < num_dims() ; i++) { + if (strcmp(get_dim(i)->name(),rdim->name()) == 0) { + return i; + } + } + // we should fail and gripe about it here.... + return -1; +} + +void NcVar::set_rec(NcDim *rdim, long slice) +{ + int i = dim_to_index(rdim); + // we should fail and gripe about it here.... + if (slice >= get_dim(i)->size() && ! get_dim(i)->is_unlimited()) + return; + cur_rec[i] = slice; + return; +} + +void NcVar::set_rec(long rec) +{ + // Since we can't ask for the record dimension here + // just assume [0] is it..... + set_rec(get_dim(0),rec); + return; +} + +NcValues* NcVar::get_rec(void) +{ + return get_rec(get_dim(0), cur_rec[0]); +} + +NcValues* NcVar::get_rec(long rec) +{ + return get_rec(get_dim(0), rec); +} + +NcValues* NcVar::get_rec(NcDim* rdim, long slice) +{ + int idx = dim_to_index(rdim); + long size = num_dims(); + size_t* start = new size_t[size]; + long* startl = new long[size]; + for (int i=1; i < size ; i++) { + start[i] = 0; + startl[i] = 0; + } + start[idx] = slice; + startl[idx] = slice; + NcBool result = set_cur(startl); + if (! result ) { + delete [] start; + delete [] startl; + return 0; + } + + long* edgel = edges(); + size_t* edge = new size_t[size]; + for (int i=1; i < size ; i++) { + edge[i] = edgel[i]; + } + edge[idx] = 1; + edgel[idx] = 1; + NcValues* valp = get_space(rec_size(rdim)); + int status; + switch (type()) { + case ncFloat: + status = NcError::set_err( + nc_get_vara_float(the_file->id(), the_id, start, edge, + (float *)valp->base()) + ); + break; + case ncDouble: + status = NcError::set_err( + nc_get_vara_double(the_file->id(), the_id, start, edge, + (double *)valp->base()) + ); + break; + case ncInt: + status = NcError::set_err( + nc_get_vara_int(the_file->id(), the_id, start, edge, + (int *)valp->base()) + ); + break; + case ncShort: + status = NcError::set_err( + nc_get_vara_short(the_file->id(), the_id, start, edge, + (short *)valp->base()) + ); + break; + case ncByte: + status = NcError::set_err( + nc_get_vara_schar(the_file->id(), the_id, start, edge, + (signed char *)valp->base()) + ); + break; + case ncChar: + status = NcError::set_err( + nc_get_vara_text(the_file->id(), the_id, start, edge, + (char *)valp->base()) + ); + break; + case ncNoType: + default: + return 0; + } + delete [] start; + delete [] startl; + delete [] edge; + delete [] edgel; + if (status != NC_NOERR) { + delete valp; + return 0; + } + return valp; +} + + +#define NcVar_put_rec(TYPE) \ +NcBool NcVar::put_rec( const TYPE* vals) \ +{ \ + return put_rec(get_dim(0), vals, cur_rec[0]); \ +} \ + \ +NcBool NcVar::put_rec( NcDim *rdim, const TYPE* vals) \ +{ \ + int idx = dim_to_index(rdim); \ + return put_rec(rdim, vals, cur_rec[idx]); \ +} \ + \ +NcBool NcVar::put_rec( const TYPE* vals, \ + long rec) \ +{ \ + return put_rec(get_dim(0), vals, rec); \ +} \ + \ +NcBool NcVar::put_rec( NcDim* rdim, const TYPE* vals, \ + long slice) \ +{ \ + int idx = dim_to_index(rdim); \ + long size = num_dims(); \ + long* start = new long[size]; \ + for (int i=1; i < size ; i++) start[i] = 0; \ + start[idx] = slice; \ + NcBool result = set_cur(start); \ + delete [] start; \ + if (! result ) \ + return FALSE; \ + \ + long* edge = edges(); \ + edge[idx] = 1; \ + result = put(vals, edge); \ + delete [] edge; \ + return result; \ +} + +NcVar_put_rec(ncbyte) +NcVar_put_rec(char) +NcVar_put_rec(short) +NcVar_put_rec(int) +NcVar_put_rec(long) +NcVar_put_rec(float) +NcVar_put_rec(double) + +long NcVar::rec_size(void) { + return rec_size(get_dim(0)); +} + +long NcVar::rec_size(NcDim *rdim) { + int idx = dim_to_index(rdim); + long size = 1; + long* edge = edges(); + for( int i = 0 ; idata_mode()) \ + return -1; \ +int idx = dim_to_index(rdim); \ +long maxrec = get_dim(idx)->size(); \ +long maxvals = rec_size(rdim); \ +NcValues* val; \ +int validx; \ +for (long j=0; jas_ ## TYPE(validx)) break; \ + } \ + delete val; \ + if (validx == maxvals) return j; \ + } \ +return -1; \ +} + + +NcVar_get_index(ncbyte) +NcVar_get_index(char) +NcVar_get_index(short) +NcVar_get_index(nclong) +NcVar_get_index(long) +NcVar_get_index(float) +NcVar_get_index(double) + +// Macros below work for short, nclong, long, float, and double, but for ncbyte +// and char, we must use corresponding schar, uchar, or text C functions, so in +// these cases macros are expanded manually. +#define NcVar_put_array(TYPE) \ +NcBool NcVar::put( const TYPE* vals, \ + long edge0, \ + long edge1, \ + long edge2, \ + long edge3, \ + long edge4) \ +{ \ + /* no need to check type() vs. TYPE, invoked C function will do that */ \ + if (! the_file->data_mode()) \ + return FALSE; \ + size_t count[5]; \ + count[0] = edge0; \ + count[1] = edge1; \ + count[2] = edge2; \ + count[3] = edge3; \ + count[4] = edge4; \ + for (int i = 0; i < 5; i++) { \ + if (count[i]) { \ + if (num_dims() < i) \ + return FALSE; \ + } else \ + break; \ + } \ + size_t start[5]; \ + for (int j = 0; j < 5; j++) { \ + start[j] = the_cur[j]; \ + } \ + return NcError::set_err( \ + makename2(nc_put_vara_,TYPE) (the_file->id(), the_id, start, count, vals) \ + ) == NC_NOERR; \ +} + +NcBool NcVar::put( const ncbyte* vals, + long edge0, + long edge1, + long edge2, + long edge3, + long edge4) +{ + /* no need to check type() vs. TYPE, invoked C function will do that */ + if (! the_file->data_mode()) + return FALSE; + size_t count[5]; + count[0] = edge0; + count[1] = edge1; + count[2] = edge2; + count[3] = edge3; + count[4] = edge4; + for (int i = 0; i < 5; i++) { + if (count[i]) { + if (num_dims() < i) + return FALSE; + } else + break; + } + size_t start[5]; + for (int j = 0; j < 5; j++) { + start[j] = the_cur[j]; + } + return NcError::set_err( + nc_put_vara_schar (the_file->id(), the_id, start, count, vals) + ) == NC_NOERR; +} + +NcBool NcVar::put( const char* vals, + long edge0, + long edge1, + long edge2, + long edge3, + long edge4) +{ + /* no need to check type() vs. TYPE, invoked C function will do that */ + if (! the_file->data_mode()) + return FALSE; + size_t count[5]; + count[0] = edge0; + count[1] = edge1; + count[2] = edge2; + count[3] = edge3; + count[4] = edge4; + for (int i = 0; i < 5; i++) { + if (count[i]) { + if (num_dims() < i) + return FALSE; + } else + break; + } + size_t start[5]; + for (int j = 0; j < 5; j++) { + start[j] = the_cur[j]; + } + return NcError::set_err( + nc_put_vara_text (the_file->id(), the_id, start, count, vals) + ) == NC_NOERR; +} + +NcVar_put_array(short) +NcVar_put_array(int) +NcVar_put_array(long) +NcVar_put_array(float) +NcVar_put_array(double) + +#define NcVar_put_nd_array(TYPE) \ +NcBool NcVar::put( const TYPE* vals, const long* count ) \ +{ \ + /* no need to check type() vs. TYPE, invoked C function will do that */ \ + if (! the_file->data_mode()) \ + return FALSE; \ + size_t start[NC_MAX_DIMS]; \ + for (int i = 0; i < num_dims(); i++) \ + start[i] = the_cur[i]; \ + return NcError::set_err( \ + makename2(nc_put_vara_,TYPE) (the_file->id(), the_id, start, (const size_t *) count, vals) \ + ) == NC_NOERR; \ +} + +NcBool NcVar::put( const ncbyte* vals, const long* count ) +{ + /* no need to check type() vs. TYPE, invoked C function will do that */ + if (! the_file->data_mode()) + return FALSE; + size_t start[NC_MAX_DIMS]; + for (int i = 0; i < num_dims(); i++) + start[i] = the_cur[i]; + return NcError::set_err( + nc_put_vara_schar (the_file->id(), the_id, start, (const size_t *)count, vals) + ) == NC_NOERR; +} + +NcBool NcVar::put( const char* vals, const long* count ) +{ + /* no need to check type() vs. TYPE, invoked C function will do that */ + if (! the_file->data_mode()) + return FALSE; + size_t start[NC_MAX_DIMS]; + for (int i = 0; i < num_dims(); i++) + start[i] = the_cur[i]; + return NcError::set_err( + nc_put_vara_text (the_file->id(), the_id, start, (const size_t *)count, vals) + ) == NC_NOERR; +} + +NcVar_put_nd_array(short) +NcVar_put_nd_array(int) +NcVar_put_nd_array(long) +NcVar_put_nd_array(float) +NcVar_put_nd_array(double) + +#define NcVar_get_array(TYPE) \ +NcBool NcVar::get( TYPE* vals, \ + long edge0, \ + long edge1, \ + long edge2, \ + long edge3, \ + long edge4) const \ +{ \ + if (! the_file->data_mode()) \ + return FALSE; \ + size_t count[5]; \ + count[0] = edge0; \ + count[1] = edge1; \ + count[2] = edge2; \ + count[3] = edge3; \ + count[4] = edge4; \ + for (int i = 0; i < 5; i++) { \ + if (count[i]) { \ + if (num_dims() < i) \ + return FALSE; \ + } else \ + break; \ + } \ + size_t start[5]; \ + for (int j = 0; j < 5; j++) { \ + start[j] = the_cur[j]; \ + } \ + return NcError::set_err( \ + makename2(nc_get_vara_,TYPE) (the_file->id(), the_id, start, count, vals) \ + ) == NC_NOERR; \ +} + +NcBool NcVar::get( ncbyte* vals, + long edge0, + long edge1, + long edge2, + long edge3, + long edge4) const +{ + if (! the_file->data_mode()) + return FALSE; + size_t count[5]; + count[0] = edge0; + count[1] = edge1; + count[2] = edge2; + count[3] = edge3; + count[4] = edge4; + for (int i = 0; i < 5; i++) { + if (count[i]) { + if (num_dims() < i) + return FALSE; + } else + break; + } + size_t start[5]; + for (int j = 0; j < 5; j++) { + start[j] = the_cur[j]; + } + return NcError::set_err( + nc_get_vara_schar (the_file->id(), the_id, start, count, vals) + ) == NC_NOERR; +} + +NcBool NcVar::get( char* vals, + long edge0, + long edge1, + long edge2, + long edge3, + long edge4) const +{ + if (! the_file->data_mode()) + return FALSE; + size_t count[5]; + count[0] = edge0; + count[1] = edge1; + count[2] = edge2; + count[3] = edge3; + count[4] = edge4; + for (int i = 0; i < 5; i++) { + if (count[i]) { + if (num_dims() < i) + return FALSE; + } else + break; + } + size_t start[5]; + for (int j = 0; j < 5; j++) { + start[j] = the_cur[j]; + } + return NcError::set_err( + nc_get_vara_text (the_file->id(), the_id, start, count, vals) + ) == NC_NOERR; +} + +NcVar_get_array(short) +NcVar_get_array(int) +NcVar_get_array(long) +NcVar_get_array(float) +NcVar_get_array(double) + +#define NcVar_get_nd_array(TYPE) \ +NcBool NcVar::get( TYPE* vals, const long* count ) const \ +{ \ + if (! the_file->data_mode()) \ + return FALSE; \ + size_t start[NC_MAX_DIMS]; \ + for (int i = 0; i < num_dims(); i++) \ + start[i] = the_cur[i]; \ + return NcError::set_err( \ + makename2(nc_get_vara_,TYPE) (the_file->id(), the_id, start, (const size_t *) count, vals) \ + ) == NC_NOERR; \ +} + +NcBool NcVar::get( ncbyte* vals, const long* count ) const +{ + if (! the_file->data_mode()) + return FALSE; + size_t start[NC_MAX_DIMS]; + for (int i = 0; i < num_dims(); i++) + start[i] = the_cur[i]; + return nc_get_vara_schar (the_file->id(), the_id, start, (const size_t *) count, vals) == NC_NOERR; +} + +NcBool NcVar::get( char* vals, const long* count ) const +{ + if (! the_file->data_mode()) + return FALSE; + size_t start[NC_MAX_DIMS]; + for (int i = 0; i < num_dims(); i++) + start[i] = the_cur[i]; + return nc_get_vara_text (the_file->id(), the_id, start, (const size_t*) count, vals) == NC_NOERR; +} + +NcVar_get_nd_array(short) +NcVar_get_nd_array(int) +NcVar_get_nd_array(long) +NcVar_get_nd_array(float) +NcVar_get_nd_array(double) + +// If no args, set cursor to all zeros. Else set initial elements of cursor +// to args provided, rest to zeros. +NcBool NcVar::set_cur(long c0, long c1, long c2, long c3, long c4) +{ + long t[6]; + t[0] = c0; + t[1] = c1; + t[2] = c2; + t[3] = c3; + t[4] = c4; + t[5] = -1; + for(int j = 0; j < 6; j++) { // find how many parameters were used + int i; + if (t[j] == -1) { + if (num_dims() < j) + return FALSE; // too many for variable's dimensionality + for (i = 0; i < j; i++) { + if (t[i] >= get_dim(i)->size() && ! get_dim(i)->is_unlimited()) + return FALSE; // too big for dimension + the_cur[i] = t[i]; + } + for(i = j; i < num_dims(); i++) + the_cur[i] = 0; + return TRUE; + } + } + return TRUE; +} + +NcBool NcVar::set_cur(long* cur) +{ + for(int i = 0; i < num_dims(); i++) { + if (cur[i] >= get_dim(i)->size() && ! get_dim(i)->is_unlimited()) + return FALSE; + the_cur[i] = cur[i]; + } + return TRUE; +} + +#define NcVar_add_scalar_att(TYPE) \ +NcBool NcVar::add_att(NcToken aname, TYPE val) \ +{ \ + if (! the_file->define_mode()) \ + return FALSE; \ + if (NcError::set_err( \ + makename2(nc_put_att_,TYPE) (the_file->id(), the_id, aname, (nc_type) NcTypeEnum(TYPE), \ + 1, &val) \ + ) != NC_NOERR) \ + return FALSE; \ + return TRUE; \ +} + +NcBool NcVar::add_att(NcToken aname, ncbyte val) +{ + if (! the_file->define_mode()) + return FALSE; + if (nc_put_att_schar (the_file->id(), the_id, aname, (nc_type) NcTypeEnum(ncbyte), + 1, &val) != NC_NOERR) + return FALSE; + return TRUE; +} + +NcBool NcVar::add_att(NcToken aname, char val) +{ + if (! the_file->define_mode()) + return FALSE; + if (nc_put_att_text (the_file->id(), the_id, aname, + 1, &val) != NC_NOERR) + return FALSE; + return TRUE; +} + +NcVar_add_scalar_att(short) +NcVar_add_scalar_att(int) +NcVar_add_scalar_att(long) +NcVar_add_scalar_att(double) + +NcBool NcVar::add_att(NcToken aname, float val) +{ + if (! the_file->define_mode()) + return FALSE; + float fval = (float) val; // workaround for bug, val passed as double?? + if (nc_put_att_float(the_file->id(), the_id, aname, (nc_type) ncFloat, + 1, &fval) != NC_NOERR) + return FALSE; + return TRUE; +} + +NcBool NcVar::add_att(NcToken aname, const char* val) +{ + if (! the_file->define_mode()) + return FALSE; + if (nc_put_att_text(the_file->id(), the_id, aname, + strlen(val), val) != NC_NOERR) + return FALSE; + return TRUE; +} + +#define NcVar_add_vector_att(TYPE) \ +NcBool NcVar::add_att(NcToken aname, int len, const TYPE* vals) \ +{ \ + if (! the_file->define_mode()) \ + return FALSE; \ + if (NcError::set_err( \ + makename2(nc_put_att_,TYPE) (the_file->id(), the_id, aname, (nc_type) NcTypeEnum(TYPE), \ + len, vals) \ + ) != NC_NOERR) \ + return FALSE; \ + return TRUE; \ +} + +NcBool NcVar::add_att(NcToken aname, int len, const ncbyte* vals) +{ + if (! the_file->define_mode()) + return FALSE; + if (NcError::set_err( + nc_put_att_schar (the_file->id(), the_id, aname, (nc_type) NcTypeEnum(ncbyte), + len, vals) + ) != NC_NOERR) + return FALSE; + return TRUE; +} + +NcBool NcVar::add_att(NcToken aname, int len, const char* vals) +{ + if (! the_file->define_mode()) + return FALSE; + if (NcError::set_err( + nc_put_att_text (the_file->id(), the_id, aname, + len, vals) + ) != NC_NOERR) + return FALSE; + return TRUE; +} + +NcVar_add_vector_att(short) +NcVar_add_vector_att(int) +NcVar_add_vector_att(long) +NcVar_add_vector_att(float) +NcVar_add_vector_att(double) + +NcBool NcVar::rename(NcToken newname) +{ + if (strlen(newname) > strlen(the_name)) { + if (! the_file->define_mode()) + return FALSE; + } + NcBool ret = NcError::set_err( + nc_rename_var(the_file->id(), the_id, newname) + ) == NC_NOERR; + if (ret) { + delete [] the_name; + the_name = new char [1 + strlen(newname)]; + strcpy(the_name, newname); + } + return ret; +} + +int NcVar::id( void ) const +{ + return the_id; +} + +NcBool NcVar::sync(void) +{ + if (the_name) { + delete [] the_name; + } + if (the_cur) { + delete [] the_cur; + } + if (cur_rec) { + delete [] cur_rec; + } + char nam[NC_MAX_NAME]; + if (the_file + && NcError::set_err( + nc_inq_varname(the_file->id(), the_id, nam) + ) == NC_NOERR) { + the_name = new char[1 + strlen(nam)]; + strcpy(the_name, nam); + } else { + the_name = 0; + return FALSE; + } + init_cur(); + return TRUE; +} + + +NcVar::NcVar(NcFile* nc, int id) + : NcTypedComponent(nc), the_id(id) +{ + char nam[NC_MAX_NAME]; + if (the_file + && NcError::set_err( + nc_inq_varname(the_file->id(), the_id, nam) + ) == NC_NOERR) { + the_name = new char[1 + strlen(nam)]; + strcpy(the_name, nam); + } else { + the_name = 0; + } + init_cur(); +} + +int NcVar::attnum( NcToken attrname ) const +{ + int num; + for(num=0; num < num_atts(); num++) { + char aname[NC_MAX_NAME]; + NcError::set_err( + nc_inq_attname(the_file->id(), the_id, num, aname) + ); + if (strcmp(aname, attrname) == 0) + break; + } + return num; // num_atts() if no such attribute +} + +NcToken NcVar::attname( int attnum ) const // caller must delete[] +{ + if (attnum < 0 || attnum >= num_atts()) + return 0; + char aname[NC_MAX_NAME]; + if (NcError::set_err( + nc_inq_attname(the_file->id(), the_id, attnum, aname) + ) != NC_NOERR) + return 0; + char* rname = new char[1 + strlen(aname)]; + strcpy(rname, aname); + return rname; +} + +void NcVar::init_cur( void ) +{ + the_cur = new long[NC_MAX_DIMS]; // *** don't know num_dims() yet? + cur_rec = new long[NC_MAX_DIMS]; // *** don't know num_dims() yet? + for(int i = 0; i < NC_MAX_DIMS; i++) { + the_cur[i] = 0; cur_rec[i] = 0; } +} + +NcAtt::NcAtt(NcFile* nc, const NcVar* var, NcToken name) + : NcTypedComponent(nc), the_variable(var) +{ + the_name = new char[1 + strlen(name)]; + strcpy(the_name, name); +} + +NcAtt::NcAtt(NcFile* nc, NcToken name) + : NcTypedComponent(nc), the_variable(NULL) +{ + the_name = new char[1 + strlen(name)]; + strcpy(the_name, name); +} + +NcAtt::~NcAtt( void ) +{ + delete [] the_name; +} + +NcToken NcAtt::name( void ) const +{ + return the_name; +} + +NcType NcAtt::type( void ) const +{ + nc_type typ; + NcError::set_err( + nc_inq_atttype(the_file->id(), the_variable->id(), the_name, &typ) + ); + return (NcType) typ; +} + +long NcAtt::num_vals( void ) const +{ + size_t len; + NcError::set_err( + nc_inq_attlen(the_file->id(), the_variable->id(), the_name, &len) + ); + return len; +} + +NcBool NcAtt::is_valid( void ) const +{ + int num; + return the_file->is_valid() && + (the_variable->id() == NC_GLOBAL || the_variable->is_valid()) && + NcError::set_err( + nc_inq_attid(the_file->id(), the_variable->id(), the_name, &num) + ) == NC_NOERR; +} + +NcValues* NcAtt::values( void ) const +{ + NcValues* valp = get_space(); + int status; + switch (type()) { + case ncFloat: + status = NcError::set_err( + nc_get_att_float(the_file->id(), the_variable->id(), the_name, + (float *)valp->base()) + ); + break; + case ncDouble: + status = NcError::set_err( + nc_get_att_double(the_file->id(), the_variable->id(), the_name, + (double *)valp->base()) + ); + break; + case ncInt: + status = NcError::set_err( + nc_get_att_int(the_file->id(), the_variable->id(), the_name, + (int *)valp->base()) + ); + break; + case ncShort: + status = NcError::set_err( + nc_get_att_short(the_file->id(), the_variable->id(), the_name, + (short *)valp->base()) + ); + break; + case ncByte: + status = NcError::set_err( + nc_get_att_schar(the_file->id(), the_variable->id(), the_name, + (signed char *)valp->base()) + ); + break; + case ncChar: + status = NcError::set_err( + nc_get_att_text(the_file->id(), the_variable->id(), the_name, + (char *)valp->base()) + ); + break; + case ncNoType: + default: + return 0; + } + if (status != NC_NOERR) { + delete valp; + return 0; + } + return valp; +} + +NcBool NcAtt::rename(NcToken newname) +{ + if (strlen(newname) > strlen(the_name)) { + if (! the_file->define_mode()) + return FALSE; + } + return NcError::set_err( + nc_rename_att(the_file->id(), the_variable->id(), + the_name, newname) + ) == NC_NOERR; +} + +NcBool NcAtt::remove( void ) +{ + if (! the_file->define_mode()) + return FALSE; + return NcError::set_err( + nc_del_att(the_file->id(), the_variable->id(), the_name) + ) == NC_NOERR; +} + +NcError::NcError( Behavior b ) +{ + the_old_state = ncopts; // global variable in version 2 C interface + the_old_err = ncerr; // global variable in version 2 C interface + ncopts = (int) b; +} + +NcError::~NcError( void ) +{ + ncopts = the_old_state; + ncerr = the_old_err; +} + +int NcError::get_err( void ) // returns most recent error +{ + return ncerr; +} + +int NcError::set_err (int err) +{ + ncerr = err; + // Check ncopts and handle appropriately + if(err != NC_NOERR) { + if(ncopts == verbose_nonfatal || ncopts == verbose_fatal) { + std::cout << nc_strerror(err) << std::endl; + } + if(ncopts == silent_fatal || ncopts == verbose_fatal) { + exit(ncopts); + } + } + return err; +} + +int NcError::ncerr = NC_NOERR; +int NcError::ncopts = NcError::verbose_fatal ; // for backward compatibility diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.hh b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.hh new file mode 100644 index 000000000..c93d8886e --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.hh @@ -0,0 +1 @@ +#include diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdfcpp.h b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdfcpp.h new file mode 100644 index 000000000..2f828e0b8 --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdfcpp.h @@ -0,0 +1,469 @@ +/********************************************************************* + * Copyright 1992, University Corporation for Atmospheric Research + * See netcdf/README file for copying and redistribution conditions. + * + * Purpose: C++ class interface for netCDF + * + * $Header: /upc/share/CVS/netcdf-3/cxx/netcdfcpp.h,v 1.15 2009/03/10 15:20:54 russ Exp $ + *********************************************************************/ + +#ifndef NETCDF_HH +#define NETCDF_HH + +#include "ncvalues.h" // arrays that know their element type + +typedef const char* NcToken; // names for netCDF objects +typedef unsigned int NcBool; // many members return 0 on failure + +class NcDim; // dimensions +class NcVar; // variables +class NcAtt; // attributes + +/* + * *********************************************************************** + * A netCDF file. + * *********************************************************************** + */ +class NcFile +{ + public: + + virtual ~NcFile( void ); + + enum FileMode { + ReadOnly, // file exists, open read-only + Write, // file exists, open for writing + Replace, // create new file, even if already exists + New // create new file, fail if already exists + }; + + enum FileFormat { + Classic, // netCDF classic format (i.e. version 1 format) + Offset64Bits, // netCDF 64-bit offset format + Netcdf4, // netCDF-4 using HDF5 format + Netcdf4Classic, // netCDF-4 using HDF5 format using only netCDF-3 calls + BadFormat + }; + + NcFile( const char * path, FileMode = ReadOnly , + size_t *bufrsizeptr = NULL, // optional tuning parameters + size_t initialsize = 0, + FileFormat = Classic ); + + NcBool is_valid( void ) const; // opened OK in ctr, still valid + + int num_dims( void ) const; // number of dimensions + int num_vars( void ) const; // number of variables + int num_atts( void ) const; // number of (global) attributes + + NcDim* get_dim( NcToken ) const; // dimension by name + NcVar* get_var( NcToken ) const; // variable by name + NcAtt* get_att( NcToken ) const; // global attribute by name + + NcDim* get_dim( int ) const; // n-th dimension + NcVar* get_var( int ) const; // n-th variable + NcAtt* get_att( int ) const; // n-th global attribute + NcDim* rec_dim( void ) const; // unlimited dimension, if any + + // Add new dimensions, variables, global attributes. + // These put the file in "define" mode, so could be expensive. + virtual NcDim* add_dim( NcToken dimname, long dimsize ); + virtual NcDim* add_dim( NcToken dimname ); // unlimited + + virtual NcVar* add_var( NcToken varname, NcType type, // scalar + const NcDim* dim0=0, // 1-dim + const NcDim* dim1=0, // 2-dim + const NcDim* dim2=0, // 3-dim + const NcDim* dim3=0, // 4-dim + const NcDim* dim4=0 ); // 5-dim + virtual NcVar* add_var( NcToken varname, NcType type, // n-dim + int ndims, const NcDim** dims ); + + NcBool add_att( NcToken attname, char ); // scalar attributes + NcBool add_att( NcToken attname, ncbyte ); + NcBool add_att( NcToken attname, short ); + NcBool add_att( NcToken attname, long ); + NcBool add_att( NcToken attname, int ); + NcBool add_att( NcToken attname, float ); + NcBool add_att( NcToken attname, double ); + NcBool add_att( NcToken attname, const char*); // string attribute + NcBool add_att( NcToken attname, int, const char* ); // vector attributes + NcBool add_att( NcToken attname, int, const ncbyte* ); + NcBool add_att( NcToken attname, int, const short* ); + NcBool add_att( NcToken attname, int, const long* ); + NcBool add_att( NcToken attname, int, const int* ); + NcBool add_att( NcToken attname, int, const float* ); + NcBool add_att( NcToken attname, int, const double* ); + + enum FillMode { + Fill = NC_FILL, // prefill (default) + NoFill = NC_NOFILL, // don't prefill + Bad + }; + + NcBool set_fill( FillMode = Fill ); // set fill-mode + FillMode get_fill( void ) const; // get fill-mode + FileFormat get_format( void ) const; // get format version + + NcBool sync( void ); // synchronize to disk + NcBool close( void ); // to close earlier than dtr + NcBool abort( void ); // back out of bad defines + + // Needed by other Nc classes, but users will not need them + NcBool define_mode( void ); // leaves in define mode, if possible + NcBool data_mode( void ); // leaves in data mode, if possible + int id( void ) const; // id used by C interface + + protected: + int the_id; + int in_define_mode; + FillMode the_fill_mode; + NcDim** dimensions; + NcVar** variables; + NcVar* globalv; // "variable" for global attributes +}; + +/* + * For backward compatibility. We used to derive NcOldFile and NcNewFile + * from NcFile, but that was over-zealous inheritance. + */ +#define NcOldFile NcFile +#define NcNewFile NcFile +#define Clobber Replace +#define NoClobber New + +/* + * ********************************************************************** + * A netCDF dimension, with a name and a size. These are only created + * by NcFile member functions, because they cannot exist independently + * of an open netCDF file. + * ********************************************************************** + */ +class NcDim +{ + public: + NcToken name( void ) const; + long size( void ) const; + NcBool is_valid( void ) const; + NcBool is_unlimited( void ) const; + NcBool rename( NcToken newname ); + int id( void ) const; + NcBool sync( void ); + + private: + NcFile *the_file; // not const because of rename + int the_id; + char *the_name; + + NcDim(NcFile*, int num); // existing dimension + NcDim(NcFile*, NcToken name, long sz); // defines a new dim + virtual ~NcDim( void ); + + // to construct dimensions, since constructor is private + friend class NcFile; +}; + + +/* + * ********************************************************************** + * Abstract base class for a netCDF variable or attribute, both of which + * have a name, a type, and associated values. These only exist as + * components of an open netCDF file. + * ********************************************************************** + */ +class NcTypedComponent +{ + public: + virtual ~NcTypedComponent( void ) {} + virtual NcToken name( void ) const = 0; + virtual NcType type( void ) const = 0; + virtual NcBool is_valid( void ) const = 0; + virtual long num_vals( void ) const = 0; + virtual NcBool rename( NcToken newname ) = 0; + virtual NcValues* values( void ) const = 0; // block of all values + + // The following member functions provide conversions from the value + // type to a desired basic type. If the value is out of range, + // the default "fill-value" for the appropriate type is returned. + + virtual ncbyte as_ncbyte( long n ) const; // nth value as an unsgnd char + virtual char as_char( long n ) const; // nth value as char + virtual short as_short( long n ) const; // nth value as short + virtual int as_int( long n ) const; // nth value as int + virtual int as_nclong( long n ) const; // nth value as nclong (deprecated) + virtual long as_long( long n ) const; // nth value as long + virtual float as_float( long n ) const; // nth value as floating-point + virtual double as_double( long n ) const; // nth value as double + virtual char* as_string( long n ) const; // nth value as string + + protected: + NcFile *the_file; + NcTypedComponent( NcFile* ); + virtual NcValues* get_space( long numVals = 0 ) const; // to hold values +}; + + +/* + * ********************************************************************** + * netCDF variables. In addition to a name and a type, these also have + * a shape, given by a list of dimensions + * ********************************************************************** + */ +class NcVar : public NcTypedComponent +{ + public: + virtual ~NcVar( void ); + NcToken name( void ) const; + NcType type( void ) const; + NcBool is_valid( void ) const; + int num_dims( void ) const; // dimensionality of variable + NcDim* get_dim( int ) const; // n-th dimension + long* edges( void ) const; // dimension sizes + int num_atts( void ) const; // number of attributes + NcAtt* get_att( NcToken ) const; // attribute by name + NcAtt* get_att( int ) const; // n-th attribute + long num_vals( void ) const; // product of dimension sizes + NcValues* values( void ) const; // all values + + // Put scalar or 1, ..., 5 dimensional arrays by providing enough + // arguments. Arguments are edge lengths, and their number must not + // exceed variable's dimensionality. Start corner is [0,0,..., 0] by + // default, but may be reset using the set_cur() member. FALSE is + // returned if type of values does not match type for variable. + NcBool put( const ncbyte* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + NcBool put( const char* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + NcBool put( const short* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + NcBool put( const int* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + NcBool put( const long* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + NcBool put( const float* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + NcBool put( const double* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + + // Put n-dimensional arrays, starting at [0, 0, ..., 0] by default, + // may be reset with set_cur(). + NcBool put( const ncbyte* vals, const long* counts ); + NcBool put( const char* vals, const long* counts ); + NcBool put( const short* vals, const long* counts ); + NcBool put( const int* vals, const long* counts ); + NcBool put( const long* vals, const long* counts ); + NcBool put( const float* vals, const long* counts ); + NcBool put( const double* vals, const long* counts ); + + // Get scalar or 1, ..., 5 dimensional arrays by providing enough + // arguments. Arguments are edge lengths, and their number must not + // exceed variable's dimensionality. Start corner is [0,0,..., 0] by + // default, but may be reset using the set_cur() member. + NcBool get( ncbyte* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + NcBool get( char* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + NcBool get( short* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + NcBool get( int* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + NcBool get( long* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + NcBool get( float* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + NcBool get( double* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + + // Get n-dimensional arrays, starting at [0, 0, ..., 0] by default, + // may be reset with set_cur(). + NcBool get( ncbyte* vals, const long* counts ) const; + NcBool get( char* vals, const long* counts ) const; + NcBool get( short* vals, const long* counts ) const; + NcBool get( int* vals, const long* counts ) const; + NcBool get( long* vals, const long* counts ) const; + NcBool get( float* vals, const long* counts ) const; + NcBool get( double* vals, const long* counts ) const; + + NcBool set_cur(long c0=-1, long c1=-1, long c2=-1, + long c3=-1, long c4=-1); + NcBool set_cur(long* cur); + + // these put file in define mode, so could be expensive + NcBool add_att( NcToken, char ); // add scalar attributes + NcBool add_att( NcToken, ncbyte ); + NcBool add_att( NcToken, short ); + NcBool add_att( NcToken, int ); + NcBool add_att( NcToken, long ); + NcBool add_att( NcToken, float ); + NcBool add_att( NcToken, double ); + NcBool add_att( NcToken, const char* ); // string attribute + NcBool add_att( NcToken, int, const char* ); // vector attributes + NcBool add_att( NcToken, int, const ncbyte* ); + NcBool add_att( NcToken, int, const short* ); + NcBool add_att( NcToken, int, const int* ); + NcBool add_att( NcToken, int, const long* ); + NcBool add_att( NcToken, int, const float* ); + NcBool add_att( NcToken, int, const double* ); + + NcBool rename( NcToken newname ); + + long rec_size ( void ); // number of values per record + long rec_size ( NcDim* ); // number of values per dimension slice + + // Though following are intended for record variables, they also work + // for other variables, using first dimension as record dimension. + + // Get a record's worth of data + NcValues *get_rec(void); // get current record + NcValues *get_rec(long rec); // get specified record + NcValues *get_rec(NcDim* d); // get current dimension slice + NcValues *get_rec(NcDim* d, long slice); // get specified dimension slice + + // Put a record's worth of data in current record + NcBool put_rec( const ncbyte* vals ); + NcBool put_rec( const char* vals ); + NcBool put_rec( const short* vals ); + NcBool put_rec( const int* vals ); + NcBool put_rec( const long* vals ); + NcBool put_rec( const float* vals ); + NcBool put_rec( const double* vals ); + + // Put a dimension slice worth of data in current dimension slice + NcBool put_rec( NcDim* d, const ncbyte* vals ); + NcBool put_rec( NcDim* d, const char* vals ); + NcBool put_rec( NcDim* d, const short* vals ); + NcBool put_rec( NcDim* d, const int* vals ); + NcBool put_rec( NcDim* d, const long* vals ); + NcBool put_rec( NcDim* d, const float* vals ); + NcBool put_rec( NcDim* d, const double* vals ); + + // Put a record's worth of data in specified record + NcBool put_rec( const ncbyte* vals, long rec ); + NcBool put_rec( const char* vals, long rec ); + NcBool put_rec( const short* vals, long rec ); + NcBool put_rec( const int* vals, long rec ); + NcBool put_rec( const long* vals, long rec ); + NcBool put_rec( const float* vals, long rec ); + NcBool put_rec( const double* vals, long rec ); + + // Put a dimension slice worth of data in specified dimension slice + NcBool put_rec( NcDim* d, const ncbyte* vals, long slice ); + NcBool put_rec( NcDim* d, const char* vals, long slice ); + NcBool put_rec( NcDim* d, const short* vals, long slice ); + NcBool put_rec( NcDim* d, const int* vals, long slice ); + NcBool put_rec( NcDim* d, const long* vals, long slice ); + NcBool put_rec( NcDim* d, const float* vals, long slice ); + NcBool put_rec( NcDim* d, const double* vals, long slice ); + + // Get first record index corresponding to specified key value(s) + long get_index( const ncbyte* vals ); + long get_index( const char* vals ); + long get_index( const short* vals ); + long get_index( const int* vals ); + long get_index( const long* vals ); + long get_index( const float* vals ); + long get_index( const double* vals ); + + // Get first index of specified dimension corresponding to key values + long get_index( NcDim* d, const ncbyte* vals ); + long get_index( NcDim* d, const char* vals ); + long get_index( NcDim* d, const short* vals ); + long get_index( NcDim* d, const int* vals ); + long get_index( NcDim* d, const long* vals ); + long get_index( NcDim* d, const float* vals ); + long get_index( NcDim* d, const double* vals ); + + // Set current record + void set_rec ( long rec ); + // Set current dimension slice + void set_rec ( NcDim* d, long slice ); + + int id( void ) const; // rarely needed, C interface id + NcBool sync( void ); + + private: + int dim_to_index(NcDim* rdim); + int the_id; + long* the_cur; + char* the_name; + long* cur_rec; + + // private constructors because only an NcFile creates these + NcVar( void ); + NcVar(NcFile*, int); + + int attnum( NcToken attname ) const; + NcToken attname( int attnum ) const; + void init_cur( void ); + + // to make variables, since constructor is private + friend class NcFile; +}; + + +/* + * ********************************************************************** + * netCDF attributes. In addition to a name and a type, these are each + * associated with a specific variable, or are global to the file. + * ********************************************************************** + */ +class NcAtt : public NcTypedComponent +{ + public: + virtual ~NcAtt( void ); + NcToken name( void ) const; + NcType type( void ) const; + NcBool is_valid( void ) const; + long num_vals( void ) const; + NcValues* values( void ) const; + NcBool rename( NcToken newname ); + NcBool remove( void ); + + private: + const NcVar* the_variable; + char* the_name; + // protected constructors because only NcVars and NcFiles create + // attributes + NcAtt( NcFile*, const NcVar*, NcToken); + NcAtt( NcFile*, NcToken); // global attribute + + // To make attributes, since constructor is private + friend class NcFile; + friend NcAtt* NcVar::get_att( NcToken ) const; +}; + + +/* + * ********************************************************************** + * To control error handling. Declaring an NcError object temporarily + * changes the error-handling behavior until the object is destroyed, at + * which time the previous error-handling behavior is restored. + * ********************************************************************** + */ +class NcError { + public: + enum Behavior { + silent_nonfatal = 0, + silent_fatal = 1, + verbose_nonfatal = 2, + verbose_fatal = 3 + }; + + // constructor saves previous error state, sets new state + NcError( Behavior b = verbose_fatal ); + + // destructor restores previous error state + virtual ~NcError( void ); + + int get_err( void ); // returns most recent error number + const char* get_errmsg( void ) {return nc_strerror(get_err());} + static int set_err( int err ); + + private: + int the_old_state; + int the_old_err; + static int ncopts; + static int ncerr; +}; + +#endif /* NETCDF_HH */ From 93655c5dfa9a83e7eae4a1eaaf9a3e06af3c0edb Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 15 Feb 2019 15:28:03 -0700 Subject: [PATCH 092/180] Update build to use local netcdf-cxx-4.2 --- mesh_tools/mesh_conversion_tools/Makefile | 36 +++++++++++-------- .../mesh_conversion_tools/conda/build.sh | 11 +++++- .../mesh_conversion_tools/conda/meta.yaml | 11 +++--- 3 files changed, 39 insertions(+), 19 deletions(-) diff --git a/mesh_tools/mesh_conversion_tools/Makefile b/mesh_tools/mesh_conversion_tools/Makefile index 3028c1e48..24cad329d 100644 --- a/mesh_tools/mesh_conversion_tools/Makefile +++ b/mesh_tools/mesh_conversion_tools/Makefile @@ -23,10 +23,15 @@ # make # gnu -CC=g++ -CFLAGS= -O3 -std=c++0x -fopenmp -lstdc++ -DFLAGS= -g -std=c++0x -D_DEBUG -fopenmp -lstdc++ - +ifeq (${CC}, ) + CC=g++ +endif +ifeq (${CFLAGS}, ) + CFLAGS= -O3 -std=c++0x -fopenmp -lstdc++ +endif +ifeq (${DFLAGS}, ) + DFLAGS= -g -std=c++0x -D_DEBUG -fopenmp -lstdc++ +endif # intel # CC=icpc # CFLAGS= -O3 -std=c++0x -qopenmp -lstdc++ @@ -38,31 +43,34 @@ MASK_EXECUTABLE= MpasMaskCreator.x ifneq (${NETCDF}, ) ifneq ($(shell which ${NETCDF}/bin/nc-config 2> /dev/null), ) - LIBS = $(shell ${NETCDF}/bin/nc-config --libs) -lnetcdf_c++ + LIBS = $(shell ${NETCDF}/bin/nc-config --libs) INCS = $(shell ${NETCDF}/bin/nc-config --cflags) else LIBS= -L${NETCDF}/lib - LIBS += -lnetcdf_c++ -lnetcdf + LIBS += -lnetcdf INCS = -I${NETCDF}/include endif else ifneq ($(shell which nc-config 2> /dev/null), ) - LIBS = $(shell nc-config --libs) -lnetcdf_c++ + LIBS = $(shell nc-config --libs) INCS = $(shell nc-config --cflags) else LIBS= -L${NETCDF}/lib - LIBS += -lnetcdf_c++ -lnetcdf + LIBS += -lnetcdf INCS = -I${NETCDF}/include endif +INCS += -Inetcdf-cxx-4.2/ +SRC = netcdf_utils.cpp netcdf-cxx-4.2/ncvalues.cpp netcdf-cxx-4.2/netcdf.cpp + all: - ${CC} mpas_mesh_converter.cpp netcdf_utils.cpp ${CFLAGS} -o ${CONV_EXECUTABLE} -I. ${INCS} ${LIBS} - ${CC} mpas_cell_culler.cpp netcdf_utils.cpp ${CFLAGS} -o ${CULL_EXECUTABLE} ${INCS} ${LIBS} - ${CC} mpas_mask_creator.cpp netcdf_utils.cpp jsoncpp.cpp ${CFLAGS} -o ${MASK_EXECUTABLE} -I. ${INCS} ${LIBS} + ${CC} mpas_mesh_converter.cpp ${SRC} ${CFLAGS} -o ${CONV_EXECUTABLE} -I. ${INCS} ${LIBS} + ${CC} mpas_cell_culler.cpp ${SRC} ${CFLAGS} -o ${CULL_EXECUTABLE} ${INCS} ${LIBS} + ${CC} mpas_mask_creator.cpp ${SRC} jsoncpp.cpp ${CFLAGS} -o ${MASK_EXECUTABLE} -I. ${INCS} ${LIBS} debug: - ${CC} mpas_mesh_converter.cpp netcdf_utils.cpp ${DFLAGS} -o ${CONV_EXECUTABLE} ${INCS} ${LIBS} - ${CC} mpas_cell_culler.cpp netcdf_utils.cpp ${DFLAGS} -o ${CULL_EXECUTABLE} ${INCS} ${LIBS} - ${CC} mpas_mask_creator.cpp netcdf_utils.cpp jsoncpp.cpp ${DFLAGS} -o ${MASK_EXECUTABLE} -I. ${INCS} ${LIBS} + ${CC} mpas_mesh_converter.cpp ${SRC} ${DFLAGS} -o ${CONV_EXECUTABLE} ${INCS} ${LIBS} + ${CC} mpas_cell_culler.cpp ${SRC} ${DFLAGS} -o ${CULL_EXECUTABLE} ${INCS} ${LIBS} + ${CC} mpas_mask_creator.cpp ${SRC} jsoncpp.cpp ${DFLAGS} -o ${MASK_EXECUTABLE} -I. ${INCS} ${LIBS} clean: rm -f grid.nc diff --git a/mesh_tools/mesh_conversion_tools/conda/build.sh b/mesh_tools/mesh_conversion_tools/conda/build.sh index 3b60f3029..dd48fb700 100644 --- a/mesh_tools/mesh_conversion_tools/conda/build.sh +++ b/mesh_tools/mesh_conversion_tools/conda/build.sh @@ -3,6 +3,15 @@ set -x set -e -export NETCDF=$NETCDF_DIR cd mesh_tools/mesh_conversion_tools + +export CC=${GXX} +export CFLAGS="-O3 -std=c++0x -fopenmp -lstdc++" + make + +install -d ${PREFIX}/bin/ +for exec in MpasMeshConverter.x MpasCellCuller.x MpasMaskCreator.x mark_horns_for_culling.py +do + install -m 755 ${exec} ${PREFIX}/bin/ +done \ No newline at end of file diff --git a/mesh_tools/mesh_conversion_tools/conda/meta.yaml b/mesh_tools/mesh_conversion_tools/conda/meta.yaml index a1644d2f0..e27ee0052 100644 --- a/mesh_tools/mesh_conversion_tools/conda/meta.yaml +++ b/mesh_tools/mesh_conversion_tools/conda/meta.yaml @@ -7,14 +7,13 @@ package: source: git_url: https://github.com/MPAS-Dev/MPAS-Tools.git - git_rev: eb772e863dd65dbff1a5cad948211dabd240285d + git_rev: 3501e5b8a060314cf2fd1e5493b3b98843e7fdc5 build: number: 0 requirements: build: - - {{ compiler('c') }} - {{ compiler('cxx') }} host: - netcdf4 =1.4.2 @@ -27,7 +26,9 @@ requirements: test: commands: - - cd mesh_tools/mesh_conversion_tools/test + - wget https://github.com/MPAS-Dev/MPAS-Tools/raw/master/mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson + - wget https://github.com/MPAS-Dev/MPAS-Tools/raw/master/mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc + - wget https://github.com/MPAS-Dev/MPAS-Tools/raw/master/mesh_tools/mesh_conversion_tools/test/land_mask_final.nc - MpasMeshConverter.x mesh.QU.1920km.151026.nc mesh.nc - MpasCellCuller.x mesh.nc culled_mesh.nc -m land_mask_final.nc - MpasMaskCreator.x mesh.nc arctic_mask.nc -f Arctic_Ocean.geojson @@ -61,4 +62,6 @@ about: dev_url: 'https://github.com/MPAS-Dev/MPAS-Tools/tree/master/mesh_tools/mesh_conversion_tools' extra: - recipe-maintainers: 'xylar' + recipe-maintainers: + - xylar + - jhkennedy From 5c49f824a16ab32a1de5488e1402c6f2204f12c2 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 15 Feb 2019 16:18:07 -0700 Subject: [PATCH 093/180] Update commit hash to use for the package --- mesh_tools/mesh_conversion_tools/conda/meta.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesh_tools/mesh_conversion_tools/conda/meta.yaml b/mesh_tools/mesh_conversion_tools/conda/meta.yaml index e27ee0052..12246821e 100644 --- a/mesh_tools/mesh_conversion_tools/conda/meta.yaml +++ b/mesh_tools/mesh_conversion_tools/conda/meta.yaml @@ -7,7 +7,7 @@ package: source: git_url: https://github.com/MPAS-Dev/MPAS-Tools.git - git_rev: 3501e5b8a060314cf2fd1e5493b3b98843e7fdc5 + git_rev: ef6345a552a543a239f900be23b121a4a84a056c build: number: 0 From de9b7338a88a3170efe9a89d27a0230cebe8f938 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 20 Feb 2019 13:29:59 -0700 Subject: [PATCH 094/180] Change CC to CXX in make file This is the expected env. variable for a c++ compiler --- mesh_tools/mesh_conversion_tools/Makefile | 29 ++++++++++------------- 1 file changed, 12 insertions(+), 17 deletions(-) diff --git a/mesh_tools/mesh_conversion_tools/Makefile b/mesh_tools/mesh_conversion_tools/Makefile index 24cad329d..d7cc6486e 100644 --- a/mesh_tools/mesh_conversion_tools/Makefile +++ b/mesh_tools/mesh_conversion_tools/Makefile @@ -12,7 +12,7 @@ # In this file: # comment gnu, uncomment intel flags # change to: -# CC=CC +# CXX=CC # may need to unload parallel NetCDF and HDF5 libraries # to avoid g++ conflicts: # module unload python @@ -23,17 +23,12 @@ # make # gnu -ifeq (${CC}, ) - CC=g++ -endif -ifeq (${CFLAGS}, ) - CFLAGS= -O3 -std=c++0x -fopenmp -lstdc++ -endif -ifeq (${DFLAGS}, ) - DFLAGS= -g -std=c++0x -D_DEBUG -fopenmp -lstdc++ -endif +CXX ?= g++ +CFLAGS ?= -O3 -std=c++0x -fopenmp -lstdc++ +DFLAGS ?= -g -std=c++0x -D_DEBUG -fopenmp -lstdc++ + # intel -# CC=icpc +# CXX=icpc # CFLAGS= -O3 -std=c++0x -qopenmp -lstdc++ # DFLAGS= -g -std=c++0x -D_DEBUG -qopenmp -lstdc++ @@ -63,14 +58,14 @@ INCS += -Inetcdf-cxx-4.2/ SRC = netcdf_utils.cpp netcdf-cxx-4.2/ncvalues.cpp netcdf-cxx-4.2/netcdf.cpp all: - ${CC} mpas_mesh_converter.cpp ${SRC} ${CFLAGS} -o ${CONV_EXECUTABLE} -I. ${INCS} ${LIBS} - ${CC} mpas_cell_culler.cpp ${SRC} ${CFLAGS} -o ${CULL_EXECUTABLE} ${INCS} ${LIBS} - ${CC} mpas_mask_creator.cpp ${SRC} jsoncpp.cpp ${CFLAGS} -o ${MASK_EXECUTABLE} -I. ${INCS} ${LIBS} + ${CXX} mpas_mesh_converter.cpp ${SRC} ${CFLAGS} -o ${CONV_EXECUTABLE} -I. ${INCS} ${LIBS} + ${CXX} mpas_cell_culler.cpp ${SRC} ${CFLAGS} -o ${CULL_EXECUTABLE} ${INCS} ${LIBS} + ${CXX} mpas_mask_creator.cpp ${SRC} jsoncpp.cpp ${CFLAGS} -o ${MASK_EXECUTABLE} -I. ${INCS} ${LIBS} debug: - ${CC} mpas_mesh_converter.cpp ${SRC} ${DFLAGS} -o ${CONV_EXECUTABLE} ${INCS} ${LIBS} - ${CC} mpas_cell_culler.cpp ${SRC} ${DFLAGS} -o ${CULL_EXECUTABLE} ${INCS} ${LIBS} - ${CC} mpas_mask_creator.cpp ${SRC} jsoncpp.cpp ${DFLAGS} -o ${MASK_EXECUTABLE} -I. ${INCS} ${LIBS} + ${CXX} mpas_mesh_converter.cpp ${SRC} ${DFLAGS} -o ${CONV_EXECUTABLE} ${INCS} ${LIBS} + ${CXX} mpas_cell_culler.cpp ${SRC} ${DFLAGS} -o ${CULL_EXECUTABLE} ${INCS} ${LIBS} + ${CXX} mpas_mask_creator.cpp ${SRC} jsoncpp.cpp ${DFLAGS} -o ${MASK_EXECUTABLE} -I. ${INCS} ${LIBS} clean: rm -f grid.nc From 0f1d7cedbc8f33e082b677a3a01fc219f33b211c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 20 Feb 2019 13:40:30 -0700 Subject: [PATCH 095/180] Update conda recipe to switch to CXX build --- mesh_tools/mesh_conversion_tools/conda/build.sh | 4 ++-- mesh_tools/mesh_conversion_tools/conda/meta.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mesh_tools/mesh_conversion_tools/conda/build.sh b/mesh_tools/mesh_conversion_tools/conda/build.sh index dd48fb700..79940f0c1 100644 --- a/mesh_tools/mesh_conversion_tools/conda/build.sh +++ b/mesh_tools/mesh_conversion_tools/conda/build.sh @@ -5,7 +5,7 @@ set -e cd mesh_tools/mesh_conversion_tools -export CC=${GXX} +export CXX=${GXX} export CFLAGS="-O3 -std=c++0x -fopenmp -lstdc++" make @@ -14,4 +14,4 @@ install -d ${PREFIX}/bin/ for exec in MpasMeshConverter.x MpasCellCuller.x MpasMaskCreator.x mark_horns_for_culling.py do install -m 755 ${exec} ${PREFIX}/bin/ -done \ No newline at end of file +done diff --git a/mesh_tools/mesh_conversion_tools/conda/meta.yaml b/mesh_tools/mesh_conversion_tools/conda/meta.yaml index 12246821e..377e1e439 100644 --- a/mesh_tools/mesh_conversion_tools/conda/meta.yaml +++ b/mesh_tools/mesh_conversion_tools/conda/meta.yaml @@ -7,7 +7,7 @@ package: source: git_url: https://github.com/MPAS-Dev/MPAS-Tools.git - git_rev: ef6345a552a543a239f900be23b121a4a84a056c + git_rev: 65ec4939c7419ff868ded1d1d0785ec6c7ec3e80 build: number: 0 From 9d479f1043cb6b4f63af35c7c789f2bda80ad492 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 25 Feb 2019 16:19:39 -0700 Subject: [PATCH 096/180] Check that ny is even --- mesh_tools/planar_hex/planar_hex.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mesh_tools/planar_hex/planar_hex.py b/mesh_tools/planar_hex/planar_hex.py index 282d1fb2f..936c5a55f 100755 --- a/mesh_tools/planar_hex/planar_hex.py +++ b/mesh_tools/planar_hex/planar_hex.py @@ -38,6 +38,10 @@ def parse_args(mesh): ny = args.ny dc = args.dc + if ny % 2 != 0: + raise ValueError('ny must be divisible by 2 for the grid\'s ' + 'periodicity to work properly.') + # non-periodic meshes aren't yet supported # if args.periodicX: # mesh.attrs['periodic_x'] = 'YES' From 772c0179d6cfbff8b33d2018625e18155332467a Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 25 Feb 2019 20:17:25 -0700 Subject: [PATCH 097/180] Make a main() function for later use as entry point This will help with incorporating planar_hex into a python package soon. --- mesh_tools/planar_hex/planar_hex.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/mesh_tools/planar_hex/planar_hex.py b/mesh_tools/planar_hex/planar_hex.py index 936c5a55f..ea41e16f8 100755 --- a/mesh_tools/planar_hex/planar_hex.py +++ b/mesh_tools/planar_hex/planar_hex.py @@ -376,7 +376,7 @@ def make_diff(mesh, refMeshFileName, diffFileName): write_netcdf(diff, diffFileName) -if __name__ == '__main__': +def main(): mesh = xarray.Dataset() outFileName = parse_args(mesh) @@ -396,3 +396,7 @@ def make_diff(mesh, refMeshFileName, diffFileName): # used to make sure results are exactly identical to periodic_hex # make_diff(mesh, '../periodic_hex/grid.nc', 'diff.nc') + + +if __name__ == '__main__': + main() From 1ff7a8a77c4858456bc8358f08330f308ab8caba Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 26 Feb 2019 16:43:38 -0700 Subject: [PATCH 098/180] Restructure for better use as python module By restructuring the file, the function `make_periodic_planar_hex_mesh` can be called from another python script to generate a mesh object for further manipulation. Eventually, this should help with making a non-periodic version of the mesh, perhaps with another tool. It would also allow for culling cells or other operations without the need to write out and read back in the mesh or to call this script as a subprocess of another script. --- mesh_tools/planar_hex/planar_hex.py | 128 +++++++++++++++++----------- 1 file changed, 77 insertions(+), 51 deletions(-) diff --git a/mesh_tools/planar_hex/planar_hex.py b/mesh_tools/planar_hex/planar_hex.py index ea41e16f8..f110b4077 100755 --- a/mesh_tools/planar_hex/planar_hex.py +++ b/mesh_tools/planar_hex/planar_hex.py @@ -9,52 +9,70 @@ import netCDF4 -def parse_args(mesh): +def make_periodic_planar_hex_mesh(nx, ny, dc, outFileName=None, + compareWithFileName=None): ''' - Parse the command-line arguments and put them into the mesh as dimensions - or attributes. + Builds an MPAS periodic, planar hexagonal mesh with the requested + dimensions, optionally saving it to a file, and returs it as an + ``xarray.Dataset``. + + Parameters + ---------- + nx : int + The number of cells in the x direction + + ny : even int + The number of cells in the y direction (must be an even number for + periodicity to work out) + + dc : float + The distance in meters between adjacent cell centers. + + outFileName : str, optional + The name of a file to save the mesh to. The mesh is not saved to a + file if no file name is supplied. + + compareWithFileName : str, optional + The name of a grid file to compare with to see if they are identical, + used for testing purposes + + Returns + ------- + mesh : ``xarray.Dataset`` + The mesh data set, available for further maniuplation such as culling + cells or removing periodicity. ''' - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawTextHelpFormatter) - parser.add_argument('--nx', dest='nx', type=int, required=True, - help='Cells in x direction') - parser.add_argument('--ny', dest='ny', type=int, required=True, - help='Cells in y direction') - parser.add_argument('--dc', dest='dc', type=float, required=True, - help='Distance between cell centers in meters') - parser.add_argument('-o', '--outFileName', dest='outFileName', type=str, - required=False, default='grid.nc', - help='The name of the output file') + mesh = initial_setup(nx, ny, dc) + compute_indices_on_cell(mesh) + compute_indices_on_edge(mesh) + compute_indices_on_vertex(mesh) + compute_weights_on_edge(mesh) + compute_coordinates(mesh) + add_one_to_indices(mesh) + + # drop some arrays that aren't stantard for MPAS but were used to compute + # the hex mesh + mesh = mesh.drop(['cellIdx', 'cellRow', 'cellCol']) + mesh.attrs.pop('dc') -# parser.add_argument('--periodicX', dest='periodicX', action='store_true', -# help='Make the mesh periodic in x') -# parser.add_argument('--periodicY', dest='periodicY', action='store_true', -# help='Make the mesh periodic in y') + if outFileName is not None: + write_netcdf(mesh, outFileName) - args = parser.parse_args() + if compareWithFileName is not None: + # used to make sure results are exactly identical to periodic_hex + make_diff(mesh, compareWithFileName, 'diff.nc') + + return mesh - nx = args.nx - ny = args.ny - dc = args.dc +def initial_setup(nx, ny, dc): + '''Setup the dimensions and add placeholders for some index variables''' if ny % 2 != 0: raise ValueError('ny must be divisible by 2 for the grid\'s ' 'periodicity to work properly.') - # non-periodic meshes aren't yet supported -# if args.periodicX: -# mesh.attrs['periodic_x'] = 'YES' -# else: -# mesh.attrs['periodic_x'] = 'NO' -# if args.periodicY: -# mesh.attrs['periodic_y'] = 'YES' -# else: -# mesh.attrs['periodic_y'] = 'NO' -# if args.periodicX or args.periodicY: -# mesh.attrs['is_periodic'] = 'YES' -# else: -# mesh.attrs['is_periodic'] = 'NO' + mesh = xarray.Dataset() mesh.attrs['is_periodic'] = 'YES' mesh.attrs['x_period'] = nx*dc @@ -109,7 +127,7 @@ def parse_args(mesh): mesh['edgesOnVertex'] = (('nVertices', 'vertexDegree'), numpy.zeros((nVertices, vertexDegree), 'i4')) - return args.outFileName + return mesh def compute_indices_on_cell(mesh): @@ -378,24 +396,32 @@ def make_diff(mesh, refMeshFileName, diffFileName): def main(): - mesh = xarray.Dataset() - outFileName = parse_args(mesh) - compute_indices_on_cell(mesh) - compute_indices_on_edge(mesh) - compute_indices_on_vertex(mesh) - compute_weights_on_edge(mesh) - compute_coordinates(mesh) - add_one_to_indices(mesh) + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('--nx', dest='nx', type=int, required=True, + help='Cells in x direction') + parser.add_argument('--ny', dest='ny', type=int, required=True, + help='Cells in y direction') + parser.add_argument('--dc', dest='dc', type=float, required=True, + help='Distance between cell centers in meters') + parser.add_argument('-o', '--outFileName', dest='outFileName', type=str, + required=False, default='grid.nc', + help='The name of the output file') - # drop some arrays that aren't stantard for MPAS but were used to compute - # the hex mesh - mesh = mesh.drop(['cellIdx', 'cellRow', 'cellCol']) - mesh.attrs.pop('dc') + # parser.add_argument('--periodicX', dest='periodicX', action='store_true', + # help='Make the mesh periodic in x') + # parser.add_argument('--periodicY', dest='periodicY', action='store_true', + # help='Make the mesh periodic in y') + + args = parser.parse_args() - write_netcdf(mesh, outFileName) + make_periodic_planar_hex_mesh(args.nx, args.ny, args.dc, args.outFileName) - # used to make sure results are exactly identical to periodic_hex - # make_diff(mesh, '../periodic_hex/grid.nc', 'diff.nc') + # used this instead to make sure results are exactly identical to + # periodic_hex + # make_periodic_planar_hex_mesh( + # args.nx, args.ny, args.dc, args.outFileName, + # compareWithFileName='../periodic_hex/grid.nc') if __name__ == '__main__': From 4916a9e8491cea759ef913f3dcf7880d0a94b71c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 27 Feb 2019 18:56:59 -0700 Subject: [PATCH 099/180] Add a script to mark critical land blockages These are transects that must be land cells, the opposite of critical passages (transects that must be ocean). --- .../add_critical_land_blockages_to_mask.py | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100755 ocean/coastline_alteration/add_critical_land_blockages_to_mask.py diff --git a/ocean/coastline_alteration/add_critical_land_blockages_to_mask.py b/ocean/coastline_alteration/add_critical_land_blockages_to_mask.py new file mode 100755 index 000000000..9f98deb15 --- /dev/null +++ b/ocean/coastline_alteration/add_critical_land_blockages_to_mask.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +""" +Name: add_critical_land_blockages_to_mask.py +Author: Xylar Asay-Davis + +Add transects that identify critical regions where narrow strips of land block +ocean flow. These are, essentially, the opposite of critical passages, which +must remain open for ocean flow. +""" + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import os +import shutil +from netCDF4 import Dataset +import numpy as np +import argparse + + +def removeFile(fileName): + try: + os.remove(fileName) + except OSError: + pass + + +parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument("-f", "--input_mask_file", dest="input_mask_filename", + help="Mask file that includes cell and edge masks.", + metavar="INPUTMASKFILE", required=True) +parser.add_argument("-o", "--output_mask_file", dest="output_mask_filename", + help="Mask file that includes cell and edge masks.", + metavar="OUTPUTMASKFILE", required=True) +parser.add_argument("-b", "--blockage_file", dest="blockage_file", + help="Masks for each transect identifying critical land" + "blockage.", metavar="BLOCKFILE", + required=True) +args = parser.parse_args() + +removeFile(args.output_mask_filename) +shutil.copyfile(args.input_mask_filename, args.output_mask_filename) + +outMaskFile = Dataset(args.output_mask_filename, "r+") +nRegions = len(outMaskFile.dimensions["nRegions"]) +regionCellMasks = outMaskFile.variables["regionCellMasks"] + +blockageFile = Dataset(args.blockage_file, "r+") +nTransects = len(blockageFile.dimensions["nTransects"]) +transectCellMasks = blockageFile.variables["transectCellMasks"] +for transectIndex in range(nTransects): + # make sure the regionCellMasks for the first region is 1 anywhere a + # transectCellMask is 1 + regionCellMasks[:, 0] = np.maximum(transectCellMasks[:, transectIndex], + regionCellMasks[:, 0]) + +blockageFile.close() +outMaskFile.close() From 106b14c115f17af88179a78367e3828a3ec113f7 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 28 Feb 2019 15:16:58 -0700 Subject: [PATCH 100/180] Update framework/vector_reconstruction.py Change output_interval in docstring to initial_only --- framework/vector_reconstruction.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/vector_reconstruction.py b/framework/vector_reconstruction.py index c1334c4d9..674b069fd 100755 --- a/framework/vector_reconstruction.py +++ b/framework/vector_reconstruction.py @@ -11,7 +11,7 @@ From 2428602c47c7d09820bfdb3ca4504d56751ed608 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 28 Feb 2019 15:31:33 -0700 Subject: [PATCH 101/180] Move vector reconstruciton to operators directory --- {framework => operators}/vector_reconstruction.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {framework => operators}/vector_reconstruction.py (100%) diff --git a/framework/vector_reconstruction.py b/operators/vector_reconstruction.py similarity index 100% rename from framework/vector_reconstruction.py rename to operators/vector_reconstruction.py From 0c0efb75ee5e3526862bb7a6ee98582fe959fa0c Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Tue, 26 Mar 2019 10:59:42 -0600 Subject: [PATCH 102/180] Add Tool to merge 2 MPAS non-contiguous meshes into a single file --- mesh_tools/merge_split_meshes/merge_grids.py | 191 +++++++++++++++++++ 1 file changed, 191 insertions(+) create mode 100755 mesh_tools/merge_split_meshes/merge_grids.py diff --git a/mesh_tools/merge_split_meshes/merge_grids.py b/mesh_tools/merge_split_meshes/merge_grids.py new file mode 100755 index 000000000..12eebb91e --- /dev/null +++ b/mesh_tools/merge_split_meshes/merge_grids.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python +''' +Tool to merge 2 MPAS non-contiguous meshes together into a single file +''' + +import sys +import numpy as np +import netCDF4 +import argparse +import math +from collections import OrderedDict +import scipy.spatial +import time +from datetime import datetime + + +#print "== Gathering information. (Invoke with --help for more details. All arguments are optional)\n" +parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) +parser.description = __doc__ +parser.add_argument("-1", dest="file1", help="name of file 1", metavar="FILENAME") +parser.add_argument("-2", dest="file2", help="name of file 2", metavar="FILENAME") +parser.add_argument("-o", dest="outFile", help="name of output file", default="merged_mpas.nc", metavar="FILENAME") +#for option in parser.option_list: +# if option.default != ("NO", "DEFAULT"): +# option.help += (" " if option.help else "") + "[default: %default]" +options = parser.parse_args() + + + +f1 = netCDF4.Dataset(options.file1) +nCells1 = len(f1.dimensions['nCells']) +nEdges1 = len(f1.dimensions['nEdges']) +nVertices1 = len(f1.dimensions['nVertices']) +Time1= len(f1.dimensions['Time']) + +f2 = netCDF4.Dataset(options.file2) +nCells2 = len(f2.dimensions['nCells']) +nEdges2 = len(f2.dimensions['nEdges']) +nVertices2 = len(f2.dimensions['nVertices']) +Time2= len(f2.dimensions['Time']) + +if Time1 != Time2: + sys.exit("ERROR: The two files have different lengths of the Time dimension.") +if len(f1.dimensions['vertexDegree']) != len(f2.dimensions['vertexDegree']): + sys.exit("ERROR: The two files have different lengths of the vertexDegree dimension.") +if len(f1.dimensions['nVertLevels']) != len(f2.dimensions['nVertLevels']): + sys.exit("ERROR: The two files have different lengths of the nVertLevels dimension.") + + +# Create new file +fout = netCDF4.Dataset(options.outFile, "w", format="NETCDF3_CLASSIC") + +# add merged dimensions +print("Adding merged dimensions to new file.") +fout.createDimension('nCells', nCells1+nCells2) +fout.createDimension('nEdges', nEdges1+nEdges2) +fout.createDimension('nVertices', nVertices1+nVertices2) +fout.createDimension('TWO', 2) +fout.createDimension('vertexDegree', len(f1.dimensions['vertexDegree'])) +if 'StrLen' in f1.dimensions: + fout.createDimension('StrLen', len(f1.dimensions['StrLen'])) +maxEdges = max(len(f1.dimensions['maxEdges']), len(f2.dimensions['maxEdges'])) +fout.createDimension('maxEdges', maxEdges) +fout.createDimension('maxEdges2', maxEdges*2) +fout.createDimension('nVertLevels', len(f1.dimensions['nVertLevels'])) +fout.createDimension('nVertInterfaces', len(f1.dimensions['nVertInterfaces'])) + +fout.createDimension('Time', size=None) # make unlimited dimension + + +# compare list of variables +vars1 = f1.variables +vars2 = f2.variables + +# only copy variables common to both files +for varname in vars1: + if varname in vars2: + print("Merging variable {}".format(varname)) + if f1.variables[varname].dimensions != f2.variables[varname].dimensions: + sys.exit("Error: Variable {} has different dimensions in the two files.").format(varname) + + theVar = f1.variables[varname] + newVar = fout.createVariable(varname, theVar.dtype, theVar.dimensions) + # (Assuming here that nCells, nEdges, and nVertices are never both in a variable) + # now assign value + if 'nCells' in theVar.dimensions: + ind = theVar.dimensions.index('nCells') + tup1 = () + tup2 = () + tupMerge = () + for ind in range(len(theVar.dimensions)): + if theVar.dimensions[ind] == 'nCells': + tup1 += (slice(0,nCells1),) + tup2 += (slice(0,nCells2),) + tupMerge += (slice(nCells1, nCells1+nCells2),) + else: + tup1 += (slice(None),) + tup2 += (slice(None),) + tupMerge += (slice(None),) + newVar[tup1] = f1.variables[varname][tup1] + newVar[tupMerge] = f2.variables[varname][tup2] + elif 'nEdges' in theVar.dimensions: + ind = theVar.dimensions.index('nEdges') + tup1 = () + tup2 = () + tupMerge = () + for ind in range(len(theVar.dimensions)): + if theVar.dimensions[ind] == 'nEdges': + tup1 += (slice(0,nEdges1),) + tup2 += (slice(0,nEdges2),) + tupMerge += (slice(nEdges1, nEdges1+nEdges2),) + else: + tup1 += (slice(None),) + tup2 += (slice(None),) + tupMerge += (slice(None),) + newVar[tup1] = f1.variables[varname][tup1] + newVar[tupMerge] = f2.variables[varname][tup2] + elif 'nVertices' in theVar.dimensions: + ind = theVar.dimensions.index('nVertices') + tup1 = () + tup2 = () + tupMerge = () + for ind in range(len(theVar.dimensions)): + if theVar.dimensions[ind] == 'nVertices': + tup1 += (slice(0,nVertices1),) + tup2 += (slice(0,nVertices2),) + tupMerge += (slice(nVertices1, nVertices1+nVertices2),) + else: + tup1 += (slice(None),) + tup2 += (slice(None),) + tupMerge += (slice(None),) + newVar[tup1] = f1.variables[varname][tup1] + newVar[tupMerge] = f2.variables[varname][tup2] + else: + # just take file 1's version + newVar[:] = theVar[:] + + # Indexes need adjusting: + if varname == "indexToCellID": + newVar[nCells1:] += nCells1 + elif varname == "indexToEdgeID": + newVar[nEdges1:] += nEdges1 + elif varname == "indexToVertexID": + newVar[nVertices1:] += nVertices1 + elif varname == "cellsOnEdge": + part2 = newVar[nEdges1:,:] + part2[part2>0] += nCells1 + newVar[nEdges1:,:] = part2 + elif varname == "edgesOnCell": + part2 = newVar[nCells1:,:] + part2[part2>0] += nEdges1 + newVar[nCells1:,:] = part2 + elif varname == "edgesOnEdge": + part2 = newVar[nEdges1:,:] + part2[part2>0] += nEdges1 + newVar[nEdges1:,:] = part2 + elif varname == "cellsOnCell": + part2 = newVar[nCells1:,:] + part2[part2>0] += nCells1 + newVar[nCells1:,:] = part2 + elif varname == "verticesOnCell": + part2 = newVar[nCells1:,:] + part2[part2>0] += nVertices1 + newVar[nCells1:,:] = part2 + elif varname == "verticesOnEdge": + part2 = newVar[nEdges1:,:] + part2[part2>0] += nVertices1 + newVar[nEdges1:,:] = part2 + elif varname == "edgesOnVertex": + part2 = newVar[nVertices1:,:] + part2[part2>0] += nEdges1 + newVar[nVertices1:,:] = part2 + elif varname == "cellsOnVertex": + part2 = newVar[nVertices1:,:] + part2[part2>0] += nCells1 + newVar[nVertices1:,:] = part2 + + +# add some needed attributes +fout.on_a_sphere = "NO" +fout.sphere_radius = 0.0 +fout.is_periodic = "NO" +# Update history attribute of netCDF file +thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) +setattr(fout, 'history', thiscommand ) +fout.close() +f1.close() +f2.close() + +print('\nMerge completed.') + From 426d9a4f80cbe76853d35c034df8c9abd0f348cc Mon Sep 17 00:00:00 2001 From: Amrapalli Garanaik Date: Wed, 1 May 2019 14:25:32 -0600 Subject: [PATCH 103/180] single np flag --- mesh_tools/planar_hex/planar_hex.py | 81 ++++++++++++++++++++++++++--- 1 file changed, 74 insertions(+), 7 deletions(-) diff --git a/mesh_tools/planar_hex/planar_hex.py b/mesh_tools/planar_hex/planar_hex.py index f110b4077..374cc37e8 100755 --- a/mesh_tools/planar_hex/planar_hex.py +++ b/mesh_tools/planar_hex/planar_hex.py @@ -9,7 +9,7 @@ import netCDF4 -def make_periodic_planar_hex_mesh(nx, ny, dc, outFileName=None, +def make_periodic_planar_hex_mesh(nx, ny, dc, np, outFileName=None, compareWithFileName=None): ''' Builds an MPAS periodic, planar hexagonal mesh with the requested @@ -28,6 +28,9 @@ def make_periodic_planar_hex_mesh(nx, ny, dc, outFileName=None, dc : float The distance in meters between adjacent cell centers. + np : str + Direction of non peridicity, input 'x' or 'y' or 'xy' or 'none' + outFileName : str, optional The name of a file to save the mesh to. The mesh is not saved to a file if no file name is supplied. @@ -43,8 +46,14 @@ def make_periodic_planar_hex_mesh(nx, ny, dc, outFileName=None, cells or removing periodicity. ''' - mesh = initial_setup(nx, ny, dc) + mesh = initial_setup(nx, ny, dc, np) compute_indices_on_cell(mesh) + if np=='x': + mark_cull_cell_nonperiodic_x(mesh) + elif np=='y': + mark_cull_cell_nonperiodic_y(mesh) + elif np=='xy': + mark_cull_cell_nonperiodic_xy(mesh) compute_indices_on_edge(mesh) compute_indices_on_vertex(mesh) compute_weights_on_edge(mesh) @@ -65,8 +74,8 @@ def make_periodic_planar_hex_mesh(nx, ny, dc, outFileName=None, return mesh - -def initial_setup(nx, ny, dc): +#ag: np is added +def initial_setup(nx, ny, dc, np): '''Setup the dimensions and add placeholders for some index variables''' if ny % 2 != 0: raise ValueError('ny must be divisible by 2 for the grid\'s ' @@ -82,6 +91,15 @@ def initial_setup(nx, ny, dc): mesh.attrs['on_a_sphere'] = 'NO' mesh.attrs['sphere_radius'] = 1. + + #ag:for non periodic, extra cells added in required direction that is going to be removed for culledcells + if np=='x': + nx=nx+2 + elif np=='y': + ny=ny+2 + elif np=='xy': + nx=nx+2 + ny=ny+2 nCells = nx * ny nEdges = 3 * nCells @@ -106,6 +124,8 @@ def initial_setup(nx, ny, dc): mesh['indexToEdgeID'] = (('nEdges'), indexToEdgeID) mesh['indexToVertexID'] = (('nVertices'), indexToVertexID) + mesh['cullCell'] = (('nCells'), numpy.zeros(nCells, 'i4')) + mesh['nEdgesOnCell'] = (('nCells',), 6*numpy.ones((nCells,), 'i4')) mesh['cellsOnCell'] = (('nCells', 'maxEdges'), numpy.zeros((nCells, maxEdges), 'i4')) @@ -130,6 +150,50 @@ def initial_setup(nx, ny, dc): return mesh +def mark_cull_cell_nonperiodic_y(mesh): + + cullCell = mesh.cullCell + + cellIdx = mesh.cellIdx + cellRow = mesh.cellRow + cellCol = mesh.cellCol + nCells = mesh.sizes['nCells'] + nx = mesh.sizes['nx'] + ny = mesh.sizes['ny'] + #print(nx,ny,nCells) + cullCell[0:nx] = 1 + cullCell[nCells-nx:nCells+1] = 1 + +def mark_cull_cell_nonperiodic_x(mesh): + + cullCell = mesh.cullCell + cellIdx = mesh.cellIdx + cellRow = mesh.cellRow + cellCol = mesh.cellCol + nCells = mesh.sizes['nCells'] + nx = mesh.sizes['nx'] + ny = mesh.sizes['ny'] + #print(nx,ny,nCells) + cullCell[::nx] = 1 + cullCell[nx-1:nCells+1:nx] = 1 + + +def mark_cull_cell_nonperiodic_xy(mesh): + + cullCell = mesh.cullCell + cellIdx = mesh.cellIdx + cellRow = mesh.cellRow + cellCol = mesh.cellCol + + nCells = mesh.sizes['nCells'] + nx = mesh.sizes['nx'] + ny = mesh.sizes['ny'] + #print(nx, ny, nCells) + cullCell[0:nx] = 1 + cullCell[nCells-nx:nCells+1] = 1 + cullCell[::nx] = 1 + cullCell[nx-1:nCells+1:nx] = 1 + def compute_indices_on_cell(mesh): cellIdx = mesh.cellIdx @@ -404,10 +468,13 @@ def main(): help='Cells in y direction') parser.add_argument('--dc', dest='dc', type=float, required=True, help='Distance between cell centers in meters') + #ag: to include non periodicity + parser.add_argument('--np', dest='np', type=str, required=True, + help='non peridic in x or y or both xy or none direction') parser.add_argument('-o', '--outFileName', dest='outFileName', type=str, required=False, default='grid.nc', help='The name of the output file') - + # parser.add_argument('--periodicX', dest='periodicX', action='store_true', # help='Make the mesh periodic in x') # parser.add_argument('--periodicY', dest='periodicY', action='store_true', @@ -415,8 +482,8 @@ def main(): args = parser.parse_args() - make_periodic_planar_hex_mesh(args.nx, args.ny, args.dc, args.outFileName) - + make_periodic_planar_hex_mesh(args.nx, args.ny, args.dc, args.np, args.outFileName) + # used this instead to make sure results are exactly identical to # periodic_hex # make_periodic_planar_hex_mesh( From 354bf7e3fc071a0fb9f78c474fac6e02dbb19f03 Mon Sep 17 00:00:00 2001 From: Amrapalli Garanaik Date: Wed, 1 May 2019 16:23:18 -0600 Subject: [PATCH 104/180] Logical flag for npx, npy --- mesh_tools/planar_hex/planar_hex.py | 56 ++++++++++------------------- 1 file changed, 19 insertions(+), 37 deletions(-) diff --git a/mesh_tools/planar_hex/planar_hex.py b/mesh_tools/planar_hex/planar_hex.py index 374cc37e8..de0dead5a 100755 --- a/mesh_tools/planar_hex/planar_hex.py +++ b/mesh_tools/planar_hex/planar_hex.py @@ -9,7 +9,7 @@ import netCDF4 -def make_periodic_planar_hex_mesh(nx, ny, dc, np, outFileName=None, +def make_periodic_planar_hex_mesh(nx, ny, dc,nonperiodicx,nonperiodicy, outFileName=None, compareWithFileName=None): ''' Builds an MPAS periodic, planar hexagonal mesh with the requested @@ -27,9 +27,8 @@ def make_periodic_planar_hex_mesh(nx, ny, dc, np, outFileName=None, dc : float The distance in meters between adjacent cell centers. - - np : str - Direction of non peridicity, input 'x' or 'y' or 'xy' or 'none' + nonperiodicx: true/false: non-periodic in x direction + nonperiodicy: true/false: non-periodic in y direction outFileName : str, optional The name of a file to save the mesh to. The mesh is not saved to a @@ -46,14 +45,12 @@ def make_periodic_planar_hex_mesh(nx, ny, dc, np, outFileName=None, cells or removing periodicity. ''' - mesh = initial_setup(nx, ny, dc, np) + mesh = initial_setup(nx, ny, dc, nonperiodicx, nonperiodicy) compute_indices_on_cell(mesh) - if np=='x': + if nonperiodicx: mark_cull_cell_nonperiodic_x(mesh) - elif np=='y': + if nonperiodicy: mark_cull_cell_nonperiodic_y(mesh) - elif np=='xy': - mark_cull_cell_nonperiodic_xy(mesh) compute_indices_on_edge(mesh) compute_indices_on_vertex(mesh) compute_weights_on_edge(mesh) @@ -75,7 +72,7 @@ def make_periodic_planar_hex_mesh(nx, ny, dc, np, outFileName=None, return mesh #ag: np is added -def initial_setup(nx, ny, dc, np): +def initial_setup(nx, ny, dc, nonperiodicx, nonperiodicy): '''Setup the dimensions and add placeholders for some index variables''' if ny % 2 != 0: raise ValueError('ny must be divisible by 2 for the grid\'s ' @@ -93,15 +90,12 @@ def initial_setup(nx, ny, dc, np): mesh.attrs['sphere_radius'] = 1. #ag:for non periodic, extra cells added in required direction that is going to be removed for culledcells - if np=='x': + if nonperiodicx: nx=nx+2 - elif np=='y': + if nonperiodicy: ny=ny+2 - elif np=='xy': - nx=nx+2 - ny=ny+2 - - nCells = nx * ny + + nCells=nx*ny nEdges = 3 * nCells nVertices = 2 * nCells vertexDegree = 3 @@ -178,22 +172,6 @@ def mark_cull_cell_nonperiodic_x(mesh): cullCell[nx-1:nCells+1:nx] = 1 -def mark_cull_cell_nonperiodic_xy(mesh): - - cullCell = mesh.cullCell - cellIdx = mesh.cellIdx - cellRow = mesh.cellRow - cellCol = mesh.cellCol - - nCells = mesh.sizes['nCells'] - nx = mesh.sizes['nx'] - ny = mesh.sizes['ny'] - #print(nx, ny, nCells) - cullCell[0:nx] = 1 - cullCell[nCells-nx:nCells+1] = 1 - cullCell[::nx] = 1 - cullCell[nx-1:nCells+1:nx] = 1 - def compute_indices_on_cell(mesh): cellIdx = mesh.cellIdx @@ -468,9 +446,11 @@ def main(): help='Cells in y direction') parser.add_argument('--dc', dest='dc', type=float, required=True, help='Distance between cell centers in meters') - #ag: to include non periodicity - parser.add_argument('--np', dest='np', type=str, required=True, - help='non peridic in x or y or both xy or none direction') + #ag: for non-periodic boundary + parser.add_argument('-npx', '--nonperiodicx',action="store_true", + help='non-periodic in x direction') + parser.add_argument('-npy','--nonperiodicy', action="store_true", + help='non-periodic in y direction') parser.add_argument('-o', '--outFileName', dest='outFileName', type=str, required=False, default='grid.nc', help='The name of the output file') @@ -482,7 +462,8 @@ def main(): args = parser.parse_args() - make_periodic_planar_hex_mesh(args.nx, args.ny, args.dc, args.np, args.outFileName) + make_periodic_planar_hex_mesh(args.nx, args.ny, args.dc, args.nonperiodicx, args.nonperiodicy, args.outFileName) + # used this instead to make sure results are exactly identical to # periodic_hex @@ -493,3 +474,4 @@ def main(): if __name__ == '__main__': main() + From b35e1c18cc3d89282867c12583bd91a8a7f2630c Mon Sep 17 00:00:00 2001 From: Amrapalli Garanaik Date: Wed, 1 May 2019 16:58:21 -0600 Subject: [PATCH 105/180] cleanup --- mesh_tools/planar_hex/planar_hex.py | 36 ++++++++++++++--------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/mesh_tools/planar_hex/planar_hex.py b/mesh_tools/planar_hex/planar_hex.py index de0dead5a..3a496f59b 100755 --- a/mesh_tools/planar_hex/planar_hex.py +++ b/mesh_tools/planar_hex/planar_hex.py @@ -9,7 +9,7 @@ import netCDF4 -def make_periodic_planar_hex_mesh(nx, ny, dc,nonperiodicx,nonperiodicy, outFileName=None, +def make_periodic_planar_hex_mesh(nx, ny, dc,nonperiodic_x,nonperiodic_y, outFileName=None, compareWithFileName=None): ''' Builds an MPAS periodic, planar hexagonal mesh with the requested @@ -27,8 +27,8 @@ def make_periodic_planar_hex_mesh(nx, ny, dc,nonperiodicx,nonperiodicy, outFileN dc : float The distance in meters between adjacent cell centers. - nonperiodicx: true/false: non-periodic in x direction - nonperiodicy: true/false: non-periodic in y direction + nonperiodic_x: true/false: non-periodic in x direction + nonperiodic_y: true/false: non-periodic in y direction outFileName : str, optional The name of a file to save the mesh to. The mesh is not saved to a @@ -45,11 +45,11 @@ def make_periodic_planar_hex_mesh(nx, ny, dc,nonperiodicx,nonperiodicy, outFileN cells or removing periodicity. ''' - mesh = initial_setup(nx, ny, dc, nonperiodicx, nonperiodicy) + mesh = initial_setup(nx, ny, dc, nonperiodic_x, nonperiodic_y) compute_indices_on_cell(mesh) - if nonperiodicx: + if nonperiodic_x: mark_cull_cell_nonperiodic_x(mesh) - if nonperiodicy: + if nonperiodic_y: mark_cull_cell_nonperiodic_y(mesh) compute_indices_on_edge(mesh) compute_indices_on_vertex(mesh) @@ -72,7 +72,7 @@ def make_periodic_planar_hex_mesh(nx, ny, dc,nonperiodicx,nonperiodicy, outFileN return mesh #ag: np is added -def initial_setup(nx, ny, dc, nonperiodicx, nonperiodicy): +def initial_setup(nx, ny, dc, nonperiodic_x, nonperiodic_y): '''Setup the dimensions and add placeholders for some index variables''' if ny % 2 != 0: raise ValueError('ny must be divisible by 2 for the grid\'s ' @@ -88,13 +88,13 @@ def initial_setup(nx, ny, dc, nonperiodicx, nonperiodicy): mesh.attrs['on_a_sphere'] = 'NO' mesh.attrs['sphere_radius'] = 1. - + #ag:for non periodic, extra cells added in required direction that is going to be removed for culledcells - if nonperiodicx: + if nonperiodic_x: nx=nx+2 - if nonperiodicy: + if nonperiodic_y: ny=ny+2 - + nCells=nx*ny nEdges = 3 * nCells nVertices = 2 * nCells @@ -447,14 +447,14 @@ def main(): parser.add_argument('--dc', dest='dc', type=float, required=True, help='Distance between cell centers in meters') #ag: for non-periodic boundary - parser.add_argument('-npx', '--nonperiodicx',action="store_true", + parser.add_argument('-npx', '--nonperiodic_x',action="store_true", help='non-periodic in x direction') - parser.add_argument('-npy','--nonperiodicy', action="store_true", + parser.add_argument('-npy','--nonperiodic_y', action="store_true", help='non-periodic in y direction') parser.add_argument('-o', '--outFileName', dest='outFileName', type=str, required=False, default='grid.nc', help='The name of the output file') - + # parser.add_argument('--periodicX', dest='periodicX', action='store_true', # help='Make the mesh periodic in x') # parser.add_argument('--periodicY', dest='periodicY', action='store_true', @@ -462,9 +462,9 @@ def main(): args = parser.parse_args() - make_periodic_planar_hex_mesh(args.nx, args.ny, args.dc, args.nonperiodicx, args.nonperiodicy, args.outFileName) - - + make_periodic_planar_hex_mesh(args.nx, args.ny, args.dc, args.nonperiodic_x, args.nonperiodic_y, args.outFileName) + + # used this instead to make sure results are exactly identical to # periodic_hex # make_periodic_planar_hex_mesh( @@ -474,4 +474,4 @@ def main(): if __name__ == '__main__': main() - + From f11f76d6a6cc74086cc20ffefad5c57b18648fc9 Mon Sep 17 00:00:00 2001 From: Amrapalli Garanaik Date: Thu, 2 May 2019 11:23:23 -0600 Subject: [PATCH 106/180] formatted --- mesh_tools/planar_hex/planar_hex.py | 143 +++++++++++++--------------- 1 file changed, 66 insertions(+), 77 deletions(-) diff --git a/mesh_tools/planar_hex/planar_hex.py b/mesh_tools/planar_hex/planar_hex.py index 3a496f59b..ffd4deaf1 100755 --- a/mesh_tools/planar_hex/planar_hex.py +++ b/mesh_tools/planar_hex/planar_hex.py @@ -9,7 +9,8 @@ import netCDF4 -def make_periodic_planar_hex_mesh(nx, ny, dc,nonperiodic_x,nonperiodic_y, outFileName=None, +def make_periodic_planar_hex_mesh(nx, ny, dc, nonperiodic_x, + nonperiodic_y, outFileName=None, compareWithFileName=None): ''' Builds an MPAS periodic, planar hexagonal mesh with the requested @@ -27,8 +28,9 @@ def make_periodic_planar_hex_mesh(nx, ny, dc,nonperiodic_x,nonperiodic_y, outFil dc : float The distance in meters between adjacent cell centers. - nonperiodic_x: true/false: non-periodic in x direction - nonperiodic_y: true/false: non-periodic in y direction + + nonperiodic_x : true/false: non-periodic in x direction + nonperiodic_y : true/false: non-periodic in y direction outFileName : str, optional The name of a file to save the mesh to. The mesh is not saved to a @@ -48,9 +50,9 @@ def make_periodic_planar_hex_mesh(nx, ny, dc,nonperiodic_x,nonperiodic_y, outFil mesh = initial_setup(nx, ny, dc, nonperiodic_x, nonperiodic_y) compute_indices_on_cell(mesh) if nonperiodic_x: - mark_cull_cell_nonperiodic_x(mesh) + mark_cull_cell_nonperiodic_x(mesh) if nonperiodic_y: - mark_cull_cell_nonperiodic_y(mesh) + mark_cull_cell_nonperiodic_y(mesh) compute_indices_on_edge(mesh) compute_indices_on_vertex(mesh) compute_weights_on_edge(mesh) @@ -71,7 +73,7 @@ def make_periodic_planar_hex_mesh(nx, ny, dc,nonperiodic_x,nonperiodic_y, outFil return mesh -#ag: np is added + def initial_setup(nx, ny, dc, nonperiodic_x, nonperiodic_y): '''Setup the dimensions and add placeholders for some index variables''' if ny % 2 != 0: @@ -81,21 +83,20 @@ def initial_setup(nx, ny, dc, nonperiodic_x, nonperiodic_y): mesh = xarray.Dataset() mesh.attrs['is_periodic'] = 'YES' - mesh.attrs['x_period'] = nx*dc - mesh.attrs['y_period'] = ny*dc*numpy.sqrt(3.)/2. + mesh.attrs['x_period'] = nx * dc + mesh.attrs['y_period'] = ny * dc * numpy.sqrt(3.) / 2. mesh.attrs['dc'] = dc mesh.attrs['on_a_sphere'] = 'NO' mesh.attrs['sphere_radius'] = 1. - #ag:for non periodic, extra cells added in required direction that is going to be removed for culledcells if nonperiodic_x: - nx=nx+2 + nx = nx + 2 if nonperiodic_y: - ny=ny+2 + ny = ny + 2 - nCells=nx*ny + nCells = nx * ny nEdges = 3 * nCells nVertices = 2 * nCells vertexDegree = 3 @@ -120,7 +121,7 @@ def initial_setup(nx, ny, dc, nonperiodic_x, nonperiodic_y): mesh['cullCell'] = (('nCells'), numpy.zeros(nCells, 'i4')) - mesh['nEdgesOnCell'] = (('nCells',), 6*numpy.ones((nCells,), 'i4')) + mesh['nEdgesOnCell'] = (('nCells',), 6 * numpy.ones((nCells,), 'i4')) mesh['cellsOnCell'] = (('nCells', 'maxEdges'), numpy.zeros((nCells, maxEdges), 'i4')) mesh['edgesOnCell'] = (('nCells', 'maxEdges'), @@ -128,11 +129,11 @@ def initial_setup(nx, ny, dc, nonperiodic_x, nonperiodic_y): mesh['verticesOnCell'] = (('nCells', 'maxEdges'), numpy.zeros((nCells, maxEdges), 'i4')) - mesh['nEdgesOnEdge'] = (('nEdges',), 10*numpy.ones((nEdges,), 'i4')) + mesh['nEdgesOnEdge'] = (('nEdges',), 10 * numpy.ones((nEdges,), 'i4')) mesh['cellsOnEdge'] = (('nEdges', 'TWO'), numpy.zeros((nEdges, 2), 'i4')) mesh['edgesOnEdge'] = (('nEdges', 'maxEdges2'), - -1*numpy.ones((nEdges, 2*maxEdges), 'i4')) + -1 * numpy.ones((nEdges, 2 * maxEdges), 'i4')) mesh['verticesOnEdge'] = (('nEdges', 'TWO'), numpy.zeros((nEdges, 2), 'i4')) @@ -147,29 +148,19 @@ def initial_setup(nx, ny, dc, nonperiodic_x, nonperiodic_y): def mark_cull_cell_nonperiodic_y(mesh): cullCell = mesh.cullCell - - cellIdx = mesh.cellIdx - cellRow = mesh.cellRow - cellCol = mesh.cellCol nCells = mesh.sizes['nCells'] nx = mesh.sizes['nx'] - ny = mesh.sizes['ny'] - #print(nx,ny,nCells) cullCell[0:nx] = 1 - cullCell[nCells-nx:nCells+1] = 1 + cullCell[nCells - nx:nCells + 1] = 1 + def mark_cull_cell_nonperiodic_x(mesh): cullCell = mesh.cullCell - cellIdx = mesh.cellIdx - cellRow = mesh.cellRow - cellCol = mesh.cellCol nCells = mesh.sizes['nCells'] nx = mesh.sizes['nx'] - ny = mesh.sizes['ny'] - #print(nx,ny,nCells) cullCell[::nx] = 1 - cullCell[nx-1:nCells+1:nx] = 1 + cullCell[nx - 1:nCells + 1:nx] = 1 def compute_indices_on_cell(mesh): @@ -199,20 +190,20 @@ def compute_indices_on_cell(mesh): cellsOnCell[:, 5] = cellIdx[py, mx].where(mask, cellIdx[py, cellCol]) edgesOnCell = mesh.edgesOnCell - edgesOnCell[:, 0] = 3*indexToCellID - edgesOnCell[:, 1] = 3*indexToCellID + 1 - edgesOnCell[:, 2] = 3*indexToCellID + 2 - edgesOnCell[:, 3] = 3*cellsOnCell[:, 3] - edgesOnCell[:, 4] = 3*cellsOnCell[:, 4] + 1 - edgesOnCell[:, 5] = 3*cellsOnCell[:, 5] + 2 + edgesOnCell[:, 0] = 3 * indexToCellID + edgesOnCell[:, 1] = 3 * indexToCellID + 1 + edgesOnCell[:, 2] = 3 * indexToCellID + 2 + edgesOnCell[:, 3] = 3 * cellsOnCell[:, 3] + edgesOnCell[:, 4] = 3 * cellsOnCell[:, 4] + 1 + edgesOnCell[:, 5] = 3 * cellsOnCell[:, 5] + 2 verticesOnCell = mesh.verticesOnCell - verticesOnCell[:, 0] = 2*indexToCellID - verticesOnCell[:, 1] = 2*indexToCellID + 1 - verticesOnCell[:, 2] = 2*cellsOnCell[:, 2] - verticesOnCell[:, 3] = 2*cellsOnCell[:, 3] + 1 - verticesOnCell[:, 4] = 2*cellsOnCell[:, 3] - verticesOnCell[:, 5] = 2*cellsOnCell[:, 4] + 1 + verticesOnCell[:, 0] = 2 * indexToCellID + verticesOnCell[:, 1] = 2 * indexToCellID + 1 + verticesOnCell[:, 2] = 2 * cellsOnCell[:, 2] + verticesOnCell[:, 3] = 2 * cellsOnCell[:, 3] + 1 + verticesOnCell[:, 4] = 2 * cellsOnCell[:, 3] + verticesOnCell[:, 5] = 2 * cellsOnCell[:, 4] + 1 def compute_indices_on_edge(mesh): @@ -303,16 +294,16 @@ def compute_weights_on_edge(mesh): numpy.zeros((nEdges, maxEdges2), 'f8')) weightsOnEdge = mesh.weightsOnEdge - weights = (1./numpy.sqrt(3.))*numpy.array( - [[1./3., 1./6., 0., 1./6., 1./3.], - [1./3., -1./6., 0., 1./6., -1./3.], - [-1./3., -1./6., 0., -1./6., -1./3.]]) + weights = (1. / numpy.sqrt(3.)) * numpy.array( + [[1. / 3., 1. / 6., 0., 1. / 6., 1. / 3.], + [1. / 3., -1. / 6., 0., 1. / 6., -1. / 3.], + [-1. / 3., -1. / 6., 0., -1. / 6., -1. / 3.]]) for i in range(3): for j in range(5): - weightsOnEdge[edgesOnCell[:, i+3], j] = weights[i, j] + weightsOnEdge[edgesOnCell[:, i + 3], j] = weights[i, j] for i in range(3): for j in range(5): - weightsOnEdge[edgesOnCell[:, i], j+5] = weights[i, j] + weightsOnEdge[edgesOnCell[:, i], j + 5] = weights[i, j] def compute_coordinates(mesh): @@ -339,49 +330,54 @@ def compute_coordinates(mesh): cellCol = mesh.cellCol mask = numpy.mod(cellRow, 2) == 0 - mesh['xCell'] = (dc*(cellCol + 0.5)).where(mask, dc*(cellCol + 1)) - mesh['yCell'] = dc*(cellRow + 1)*numpy.sqrt(3.)/2. + mesh['xCell'] = (dc * (cellCol + 0.5)).where(mask, dc * (cellCol + 1)) + mesh['yCell'] = dc * (cellRow + 1) * numpy.sqrt(3.) / 2. mesh['zCell'] = (('nCells'), numpy.zeros((nCells,), 'f8')) mesh['xEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) mesh['yEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) mesh['zEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) - mesh.xEdge[edgesOnCell[:, 0]] = mesh.xCell - 0.5*dc + mesh.xEdge[edgesOnCell[:, 0]] = mesh.xCell - 0.5 * dc mesh.yEdge[edgesOnCell[:, 0]] = mesh.yCell - mesh.xEdge[edgesOnCell[:, 1]] = mesh.xCell - 0.5*dc*numpy.cos(numpy.pi/3.) - mesh.yEdge[edgesOnCell[:, 1]] = mesh.yCell - 0.5*dc*numpy.sin(numpy.pi/3.) + mesh.xEdge[edgesOnCell[:, 1]] = mesh.xCell - \ + 0.5 * dc * numpy.cos(numpy.pi / 3.) + mesh.yEdge[edgesOnCell[:, 1]] = mesh.yCell - \ + 0.5 * dc * numpy.sin(numpy.pi / 3.) - mesh.xEdge[edgesOnCell[:, 2]] = mesh.xCell + 0.5*dc*numpy.cos(numpy.pi/3.) - mesh.yEdge[edgesOnCell[:, 2]] = mesh.yCell - 0.5*dc*numpy.sin(numpy.pi/3.) + mesh.xEdge[edgesOnCell[:, 2]] = mesh.xCell + \ + 0.5 * dc * numpy.cos(numpy.pi / 3.) + mesh.yEdge[edgesOnCell[:, 2]] = mesh.yCell - \ + 0.5 * dc * numpy.sin(numpy.pi / 3.) mesh['xVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) mesh['yVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) mesh['zVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) - mesh.xVertex[verticesOnCell[:, 0]] = mesh.xCell - 0.5*dc - mesh.yVertex[verticesOnCell[:, 0]] = mesh.yCell + dc*numpy.sqrt(3.)/6. + mesh.xVertex[verticesOnCell[:, 0]] = mesh.xCell - 0.5 * dc + mesh.yVertex[verticesOnCell[:, 0]] = mesh.yCell + dc * numpy.sqrt(3.) / 6. - mesh.xVertex[verticesOnCell[:, 1]] = mesh.xCell - 0.5*dc - mesh.yVertex[verticesOnCell[:, 1]] = mesh.yCell - dc*numpy.sqrt(3.)/6. + mesh.xVertex[verticesOnCell[:, 1]] = mesh.xCell - 0.5 * dc + mesh.yVertex[verticesOnCell[:, 1]] = mesh.yCell - dc * numpy.sqrt(3.) / 6. mesh['angleEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) - mesh.angleEdge[edgesOnCell[:, 1]] = numpy.pi/3. - mesh.angleEdge[edgesOnCell[:, 2]] = 2.*numpy.pi/3. + mesh.angleEdge[edgesOnCell[:, 1]] = numpy.pi / 3. + mesh.angleEdge[edgesOnCell[:, 2]] = 2. * numpy.pi / 3. - mesh['dcEdge'] = (('nEdges'), dc*numpy.ones((nEdges,), 'f8')) - mesh['dvEdge'] = mesh.dcEdge*numpy.sqrt(3.)/3. + mesh['dcEdge'] = (('nEdges'), dc * numpy.ones((nEdges,), 'f8')) + mesh['dvEdge'] = mesh.dcEdge * numpy.sqrt(3.) / 3. mesh['areaCell'] = \ - (('nCells'), dc**2*numpy.sqrt(3.)/2.*numpy.ones((nCells,), 'f8')) + (('nCells'), dc**2 * numpy.sqrt(3.) / 2. * numpy.ones((nCells,), 'f8')) mesh['areaTriangle'] = \ - (('nVertices'), dc**2*numpy.sqrt(3.)/4.*numpy.ones((nVertices,), 'f8')) + (('nVertices'), dc**2 * numpy.sqrt(3.) / + 4. * numpy.ones((nVertices,), 'f8')) mesh['kiteAreasOnVertex'] = \ (('nVertices', 'vertexDegree'), - dc**2*numpy.sqrt(3.)/12.*numpy.ones((nVertices, vertexDegree), 'f8')) + dc**2 * numpy.sqrt(3.) / 12. * numpy.ones((nVertices, vertexDegree), 'f8')) mesh['meshDensity'] = (('nCells',), numpy.ones((nCells,), 'f8')) @@ -446,24 +442,18 @@ def main(): help='Cells in y direction') parser.add_argument('--dc', dest='dc', type=float, required=True, help='Distance between cell centers in meters') - #ag: for non-periodic boundary - parser.add_argument('-npx', '--nonperiodic_x',action="store_true", - help='non-periodic in x direction') - parser.add_argument('-npy','--nonperiodic_y', action="store_true", - help='non-periodic in y direction') + parser.add_argument('-npx', '--nonperiodic_x', action="store_true", + help='non-periodic in x direction') + parser.add_argument('-npy', '--nonperiodic_y', action="store_true", + help='non-periodic in y direction') parser.add_argument('-o', '--outFileName', dest='outFileName', type=str, required=False, default='grid.nc', help='The name of the output file') - - # parser.add_argument('--periodicX', dest='periodicX', action='store_true', - # help='Make the mesh periodic in x') - # parser.add_argument('--periodicY', dest='periodicY', action='store_true', - # help='Make the mesh periodic in y') args = parser.parse_args() - make_periodic_planar_hex_mesh(args.nx, args.ny, args.dc, args.nonperiodic_x, args.nonperiodic_y, args.outFileName) - + make_periodic_planar_hex_mesh(args.nx, args.ny, args.dc, args.nonperiodic_x, + args.nonperiodic_y, args.outFileName) # used this instead to make sure results are exactly identical to # periodic_hex @@ -474,4 +464,3 @@ def main(): if __name__ == '__main__': main() - From 43895b176453da3ae1098c77258381c132df450f Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 25 Feb 2019 22:38:43 -0700 Subject: [PATCH 107/180] Update print statement to function calls for python 3 Add futures to make python 2 code act like python 3 --- ...ate_landice_grid_from_generic_MPAS_grid.py | 65 +++--- landice/mesh_tools_li/define_cullMask.py | 39 ++-- .../interpolate_to_mpasli_grid.py | 187 +++++++++--------- .../mark_domain_boundaries_dirichlet.py | 11 +- .../mark_horns_for_culling.py | 11 +- 5 files changed, 164 insertions(+), 149 deletions(-) diff --git a/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py b/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py index fdf854f39..70f6a0db7 100755 --- a/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py +++ b/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py @@ -3,6 +3,9 @@ # I've only tested it with a periodic_hex grid, but it should work with any MPAS grid. # Currently variable attributes are not copied (and periodic_hex does not assign any, so this is ok). If variable attributes are added to periodic_hex, this script should be modified to copy them (looping over dir(var), skipping over variable function names "assignValue", "getValue", "typecode"). +from __future__ import absolute_import, division, print_function, \ + unicode_literals + import sys, numpy from netCDF4 import Dataset from optparse import OptionParser @@ -11,7 +14,7 @@ sphere_radius = 6.37122e6 # earth radius, if needed -print "** Gathering information. (Invoke with --help for more details. All arguments are optional)" +print("** Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser() parser.add_option("-i", "--in", dest="fileinName", help="input filename. Defaults to 'grid.nc'", metavar="FILENAME") parser.add_option("-o", "--out", dest="fileoutName", help="output filename. Defaults to 'landice_grid.nc'", metavar="FILENAME") @@ -26,14 +29,14 @@ options, args = parser.parse_args() if not options.fileinName: - print "No input filename specified, so using 'grid.nc'." + print("No input filename specified, so using 'grid.nc'.") options.fileinName = 'grid.nc' else: - print "Input file is:", options.fileinName + print("Input file is: {}".format(options.fileinName)) if not options.fileoutName: - print "No output filename specified, so using 'landice_grid.nc'." + print("No output filename specified, so using 'landice_grid.nc'.") options.fileoutName = 'landice_grid.nc' -print '' # make a space in stdout before further output +print('') # make a space in stdout before further output # Get the input file filein = Dataset(options.fileinName,'r') @@ -47,12 +50,12 @@ # ============================================ # Do this first as doing it last is slow for big files since adding # attributes forces the contents to get reorganized. -print "---- Copying global attributes from input file to output file ----" +print("---- Copying global attributes from input file to output file ----") for name in filein.ncattrs(): # sphere radius needs to be set to that of the earth if on a sphere if name == 'sphere_radius' and getattr(filein, 'on_a_sphere') == "YES ": setattr(fileout, 'sphere_radius', sphere_radius) - print 'Set global attribute sphere_radius = ', str(sphere_radius) + print('Set global attribute sphere_radius = {}'.format(sphere_radius)) elif name =='history': # Update history attribute of netCDF file newhist = '\n'.join([getattr(filein, 'history'), ' '.join(sys.argv[:]) ] ) @@ -60,14 +63,14 @@ else: # Otherwise simply copy the attr setattr(fileout, name, getattr(filein, name) ) - print 'Copied global attribute ', name, '=', getattr(filein, name) + print('Copied global attribute {} = {}'.format(name, getattr(filein, name))) # Update history attribute of netCDF file if we didn't above if not hasattr(fileout, 'history'): setattr(fileout, 'history', sys.argv[:] ) fileout.sync() -print '' # make a space in stdout before further output +print('') # make a space in stdout before further output # ============================================ @@ -78,7 +81,7 @@ # It may be better to list them explicitly as I do for the grid variables, # but this way ensures they all get included and is easier. # Note: The UNLIMITED time dimension will return a dimension value of None with Scientific.IO. This is what is supposed to happen. See below for how to deal with assigning values to a variable with a unlimited dimension. Special handling is needed with the netCDF module. -print "---- Copying dimensions from input file to output file ----" +print("---- Copying dimensions from input file to output file ----") for dim in filein.dimensions.keys(): if dim == 'nTracers': pass # Do nothing - we don't want this dimension @@ -91,12 +94,12 @@ if options.levels is None: # If nVertLevels is in the input file, and a value for it was not # specified on the command line, then use the value from the file (do nothing here) - print "Using nVertLevels from the intput file:", len(filein.dimensions[dim]) + print("Using nVertLevels from the intput file: {}".format(len(filein.dimensions[dim]))) dimvalue = len(filein.dimensions[dim]) else: # if nVertLevels is in the input file, but a value WAS specified # on the command line, then use the command line value - print "Using nVertLevels specified on the command line:", int(options.levels) + print("Using nVertLevels specified on the command line: {}".format(int(options.levels))) dimvalue = int(options.levels) else: dimvalue = len(filein.dimensions[dim]) @@ -105,22 +108,22 @@ # it has not been added to the output file yet. Treat those here. if 'nVertLevels' not in fileout.dimensions: if options.levels is None: - print "nVertLevels not in input file and not specified. Using default value of 10." + print("nVertLevels not in input file and not specified. Using default value of 10.") fileout.createDimension('nVertLevels', 10) else: - print "Using nVertLevels specified on the command line:", int(options.levels) + print("Using nVertLevels specified on the command line: {}".format(int(options.levels))) fileout.createDimension('nVertLevels', int(options.levels)) # Also create the nVertInterfaces dimension, even if none of the variables require it. fileout.createDimension('nVertInterfaces', len(fileout.dimensions['nVertLevels']) + 1) # nVertInterfaces = nVertLevels + 1 -print 'Added new dimension nVertInterfaces to output file with value of ' + str(len(fileout.dimensions['nVertInterfaces'])) + '.' +print('Added new dimension nVertInterfaces to output file with value of {}.'.format(len(fileout.dimensions['nVertInterfaces']))) fileout.sync() -print 'Finished creating dimensions in output file.\n' # include an extra blank line here +print('Finished creating dimensions in output file.\n') # include an extra blank line here # ============================================ # Copy over all of the required grid variables to the new file # ============================================ -print "Beginning to copy mesh variables to output file." +print("Beginning to copy mesh variables to output file.") vars2copy = ['latCell', 'lonCell', 'xCell', 'yCell', 'zCell', 'indexToCellID', 'latEdge', 'lonEdge', 'xEdge', 'yEdge', 'zEdge', 'indexToEdgeID', 'latVertex', 'lonVertex', 'xVertex', 'yVertex', 'zVertex', 'indexToVertexID', 'cellsOnEdge', 'nEdgesOnCell', 'nEdgesOnEdge', 'edgesOnCell', 'edgesOnEdge', 'weightsOnEdge', 'dvEdge', 'dcEdge', 'angleEdge', 'areaCell', 'areaTriangle', 'cellsOnCell', 'verticesOnCell', 'verticesOnEdge', 'edgesOnVertex', 'cellsOnVertex', 'kiteAreasOnVertex'] # Add these optional fields if they exist in the input file for optionalVar in ['meshDensity', 'gridSpacing', 'cellQuality', 'triangleQuality', 'triangleAngleQuality', 'obtuseTriangle']: @@ -128,8 +131,8 @@ vars2copy.append(optionalVar) for varname in vars2copy: - print "-", -print "|" + print("-"), +print("|") for varname in vars2copy: thevar = filein.variables[varname] datatype = thevar.dtype @@ -146,8 +149,8 @@ del newVar, thevar sys.stdout.write("* "); sys.stdout.flush() fileout.sync() -print "|" -print "Finished copying mesh variables to output file.\n" +print("|") +print("Finished copying mesh variables to output file.\n") # ============================================ # Create the land ice variables (all the shallow water vars in the input file can be ignored) @@ -170,7 +173,7 @@ layerInterfaces[k] = 4.0/3.0 * (1.0 - ((k+1.0-1.0)/(nInterfaces-1.0) + 1.0)**-2) for k in range(nVertLevels): layerThicknessFractionsData[k] = layerInterfaces[k+1] - layerInterfaces[k] - print "Setting layerThicknessFractions to:", layerThicknessFractionsData + print("Setting layerThicknessFractions to: {}".format(layerThicknessFractionsData)) else: sys.exit('Unknown method for vertical spacing method (--vert): '+options.vertMethod) @@ -192,17 +195,17 @@ newvar[:] = numpy.zeros(newvar.shape) newvar = fileout.createVariable('floatingBasalMassBal', datatype, ('Time', 'nCells')) newvar[:] = numpy.zeros(newvar.shape) -print 'Added default variables: thickness, temperature, bedTopography, sfcMassBal, floatingBasalMassBal' +print('Added default variables: thickness, temperature, bedTopography, sfcMassBal, floatingBasalMassBal') if options.beta: newvar = fileout.createVariable('beta', datatype, ('Time', 'nCells')) newvar[:] = 1.0e8 # Give a default beta that won't have much sliding. - print 'Added optional variable: beta' + print('Added optional variable: beta') if options.effecpress: newvar = fileout.createVariable('effectivePressure', datatype, ('Time', 'nCells')) newvar[:] = 1.0e8 # Give a default effective pressure that won't have much sliding. - print 'Added optional variable: effectivePressure' + print('Added optional variable: effectivePressure') if options.dirichlet: newvar = fileout.createVariable('dirichletVelocityMask', datatypeInt, ('Time', 'nCells', 'nVertInterfaces')) @@ -211,7 +214,7 @@ newvar[:] = 0.0 newvar = fileout.createVariable('uReconstructY', datatype, ('Time', 'nCells', 'nVertInterfaces',)) newvar[:] = 0.0 - print 'Added optional dirichlet variables: dirichletVelocityMask, uReconstructX, uReconstructY' + print('Added optional dirichlet variables: dirichletVelocityMask, uReconstructX, uReconstructY') if options.thermal: newvar = fileout.createVariable('temperature', datatype, ('Time', 'nCells', 'nVertLevels')) @@ -220,7 +223,7 @@ newvar[:] = 273.15 # Give default value for temperate ice newvar = fileout.createVariable('basalHeatFlux', datatype, ('Time', 'nCells')) newvar[:] = 0.0 # Default to none (W/m2) - print 'Added optional thermal variables: temperature, surfaceAirTemperature, basalHeatFlux' + print('Added optional thermal variables: temperature, surfaceAirTemperature, basalHeatFlux') if options.hydro: newvar = fileout.createVariable('waterThickness', datatype, ('Time', 'nCells')) @@ -237,7 +240,7 @@ newvar[:] = 0.0 newvar = fileout.createVariable('waterFluxMask', 'i', ('Time', 'nEdges')) newvar[:] = 0.0 - print 'Added optional hydro variables: waterThickness, tillWaterThickness, meltInput, frictionAngle, waterPressure, waterFluxMask' + print('Added optional hydro variables: waterThickness, tillWaterThickness, meltInput, frictionAngle, waterPressure, waterFluxMask') if options.obs: newvar = fileout.createVariable('observedSurfaceVelocityX', datatype, ('Time', 'nCells')) @@ -252,7 +255,7 @@ newvar[:] = 0.0 newvar = fileout.createVariable('thicknessUncertainty', datatype, ('Time', 'nCells')) newvar[:] = 0.0 - print 'Added optional velocity optimization variables: observedSurfaceVelocityX, observedSurfaceVelocityY, observedSurfaceVelocityUncertainty, observedThicknessTendency, observedThicknessTendencyUncertainty, thicknessUncertainty' + print('Added optional velocity optimization variables: observedSurfaceVelocityX, observedSurfaceVelocityY, observedSurfaceVelocityUncertainty, observedThicknessTendency, observedThicknessTendencyUncertainty, thicknessUncertainty') # Update history attribute of netCDF file thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) @@ -262,10 +265,10 @@ newhist = thiscommand setattr(fileout, 'history', newhist ) -print "Completed creating land ice variables in new file. Now syncing to file." +print("Completed creating land ice variables in new file. Now syncing to file.") fileout.sync() filein.close() fileout.close() -print '\n** Successfully created ' + options.fileoutName + '.**' +print('\n** Successfully created {}.**'.format(options.fileoutName)) diff --git a/landice/mesh_tools_li/define_cullMask.py b/landice/mesh_tools_li/define_cullMask.py index 6406df82a..fb707afd3 100755 --- a/landice/mesh_tools_li/define_cullMask.py +++ b/landice/mesh_tools_li/define_cullMask.py @@ -2,6 +2,9 @@ # Script for adding a field named cullMask to an MPAS land ice grid for use with the MpasCellCuller tool that actually culls the unwanted cells. # Matt Hoffman, February 28, 2013 +from __future__ import absolute_import, division, print_function, \ + unicode_literals + import sys import numpy as np from optparse import OptionParser @@ -9,7 +12,7 @@ from datetime import datetime -print "** Gathering information." +print("** Gathering information.") parser = OptionParser() parser.add_option("-f", "--file", dest="file", help="grid file to modify; default: landice_grid.nc", metavar="FILE") parser.add_option("-m", "--method", dest="method", help="method to use for marking cells to cull. Supported methods: 'noIce', 'numCells', 'distance', 'radius', 'edgeFraction'", metavar="METHOD") @@ -19,7 +22,7 @@ options, args = parser.parse_args() if not options.file: - print "No grid filename provided. Using landice_grid.nc." + print("No grid filename provided. Using landice_grid.nc.") options.file = "landice_grid.nc" if not options.method: @@ -46,10 +49,10 @@ thicknessMissing = True try: thickness = f.variables['thickness'][0,:] - print 'Using thickness field at time 0' + print('Using thickness field at time 0') thicknessMissing = False except: - print "The field 'thickness' is not available. Some culling methods will not work." + print("The field 'thickness' is not available. Some culling methods will not work.") # ===== Various methods for defining the mask ==== @@ -57,7 +60,7 @@ # ========= # only keep cells with ice if maskmethod == 'noIce': - print "Method: remove cells without ice" + print("Method: remove cells without ice") if thicknessMissing: sys.exit("Unable to perform 'numCells' method because thickness field was missing.") @@ -66,13 +69,13 @@ # ========= # add a buffer of X cells around the ice elif maskmethod == 'numCells': - print "Method: remove cells beyond a certain number of cells from existing ice" + print("Method: remove cells beyond a certain number of cells from existing ice") if thicknessMissing: sys.exit("Unable to perform 'numCells' method because thickness field was missing.") buffersize=int(options.numCells) # number of cells to expand - print "Using a buffer of {} cells".format(buffersize) + print("Using a buffer of {} cells".format(buffersize)) keepCellMask = np.copy(cullCell[:]) keepCellMask[:] = 0 @@ -81,17 +84,17 @@ # mark the cells with ice first keepCellMask[thickness > 0.0] = 1 - print 'Num of cells with ice:', sum(keepCellMask) + print('Num of cells with ice: {}'.format(sum(keepCellMask))) for i in range(buffersize): - print 'Starting buffer loop ', i+1 + print('Starting buffer loop {}'.format(i+1)) keepCellMaskNew = np.copy(keepCellMask) # make a copy to edit that can be edited without changing the original ind = np.nonzero(keepCellMask == 0)[0] for i in range(len(ind)): iCell = ind[i] keepCellMaskNew[iCell] = keepCellMask[cellsOnCell[iCell,:nEdgesOnCell[iCell]]-1].max() # if any neighbor has a value of 1, then 1 will get assigned to iCell. keepCellMask = np.copy(keepCellMaskNew) # after we've looped over all cells assign the new mask to the variable we need (either for another loop around the domain or to write out) - print ' Num of cells to keep:', sum(keepCellMask) + print(' Num of cells to keep: {}'.format(sum(keepCellMask))) # Now convert the keepCellMask to the cullMask cullCell[:] = np.absolute(keepCellMask[:]-1) # Flip the mask for which ones to cull @@ -100,13 +103,13 @@ # remove cells beyond a certain distance of ice extent elif maskmethod == 'distance': - print "Method: remove cells beyond a certain distance from existing ice" + print("Method: remove cells beyond a certain distance from existing ice") if thicknessMissing: sys.exit("Unable to perform 'numCells' method because thickness field was missing.") dist=float(options.distance) - print "Using a buffer distance of {} km".format(dist) + print("Using a buffer distance of {} km".format(dist)) dist = dist * 1000.0 # convert to m keepCellMask = np.copy(cullCell[:]) @@ -118,7 +121,7 @@ # mark the cells with ice first keepCellMask[thickness > 0.0] = 1 - print 'Num of cells with ice:', sum(keepCellMask) + print('Num of cells with ice: {}'.format(sum(keepCellMask))) # find list of margin cells iceCells = np.nonzero(keepCellMask == 1)[0] @@ -138,7 +141,7 @@ ind = np.nonzero(((xCell-xCell[iCell])**2 + (yCell-yCell[iCell])**2)**0.5 < dist)[0] keepCellMask[ind] = 1 - print ' Num of cells to keep:', sum(keepCellMask) + print(' Num of cells to keep:'.format(sum(keepCellMask))) # Now convert the keepCellMask to the cullMask cullCell[:] = np.absolute(keepCellMask[:]-1) # Flip the mask for which ones to cull @@ -147,14 +150,14 @@ # ========= # cut out beyond some radius (good for the dome) elif maskmethod == 'radius': - print "Method: remove cells beyond a radius" + print("Method: remove cells beyond a radius") ind = np.nonzero( (xCell[:]**2 + yCell[:]**2)**0.5 > 26000.0 ) cullCell[ind] = 1 # ========= # cut off some fraction of the height/width on all 4 sides - useful for cleaning up a mesh from periodic_general elif maskmethod == 'edgeFraction': - print "Method: remove a fraction from all 4 edges" + print("Method: remove a fraction from all 4 edges") frac=0.025 cullCell[:] = 0 @@ -175,7 +178,7 @@ # ========= -print 'Num of cells to cull:', sum(cullCell[:]) +print('Num of cells to cull: {}'.format(sum(cullCell[:]))) # ========= # Try to add the new variable @@ -205,6 +208,6 @@ plt.show() f.close() -print "cullMask generation complete." +print("cullMask generation complete.") diff --git a/landice/mesh_tools_li/interpolate_to_mpasli_grid.py b/landice/mesh_tools_li/interpolate_to_mpasli_grid.py index c16a9cbf5..6c7128e28 100755 --- a/landice/mesh_tools_li/interpolate_to_mpasli_grid.py +++ b/landice/mesh_tools_li/interpolate_to_mpasli_grid.py @@ -12,6 +12,9 @@ For MPAS input files only barycentric interpolation is supported. ''' +from __future__ import absolute_import, division, print_function, \ + unicode_literals + import sys import numpy as np import netCDF4 @@ -23,7 +26,7 @@ from datetime import datetime -print "== Gathering information. (Invoke with --help for more details. All arguments are optional)\n" +print("== Gathering information. (Invoke with --help for more details. All arguments are optional)\n") parser = OptionParser() parser.description = __doc__ parser.add_option("-s", "--source", dest="inputFile", help="name of source (input) file. Can be either CISM format or MPASLI format.", default="cism.nc", metavar="FILENAME") @@ -36,14 +39,14 @@ option.help += (" " if option.help else "") + "[default: %default]" options, args = parser.parse_args() -print " Source file: " + options.inputFile -print " Destination MPASLI file to be modified: " + options.mpasFile +print(" Source file: {}".format(options.inputFile)) +print(" Destination MPASLI file to be modified: {}".format(options.mpasFile)) -print " Interpolation method to be used: " + options.interpType -print " (b=bilinear, d=barycentric, e=esmf)" +print(" Interpolation method to be used: {}".format(options.interpType)) +print(" (b=bilinear, d=barycentric, e=esmf)") if options.weightFile and options.interpType == 'e': - print " Interpolation will be performed using ESMF-weights method, where possible, using weights file: " + options.weightFile + print(" Interpolation will be performed using ESMF-weights method, where possible, using weights file: {}".format(options.weightFile)) #---------------------------- # Get weights from file wfile = netCDF4.Dataset(options.weightFile, 'r') @@ -53,7 +56,7 @@ wfile.close() #---------------------------- -print '' # make a space in stdout before further output +print('') # make a space in stdout before further output #---------------------------- @@ -109,11 +112,11 @@ def BilinearInterp(Value, gridType): ygrid = len(y) - 2 elif ygrid < 0: ygrid = 0 - #print xgrid, ygrid, i + #print(xgrid, ygrid, i) ValueCell[i] = Value[ygrid,xgrid] * (x[xgrid+1] - xCell[i]) * (y[ygrid+1] - yCell[i]) / (dx * dy) + \ Value[ygrid+1,xgrid] * (x[xgrid+1] - xCell[i]) * (yCell[i] - y[ygrid]) / (dx * dy) + \ Value[ygrid,xgrid+1] * (xCell[i] - x[xgrid]) * (y[ygrid+1] - yCell[i]) / (dx * dy) + \ - Value[ygrid+1,xgrid+1] * (xCell[i] - x[xgrid]) * (yCell[i] - y[ygrid]) / (dx * dy) + Value[ygrid+1,xgrid+1] * (xCell[i] - x[xgrid]) * (yCell[i] - y[ygrid]) / (dx * dy) return ValueCell #---------------------------- @@ -124,31 +127,31 @@ def delaunay_interp_weights(xy, uv, d=2): uv = output (MPSALI) x,y coords ''' - #print "scipy version=", scipy.version.full_version + #print("scipy version=", scipy.version.full_version) if xy.shape[0] > 2**24-1: - print "WARNING: The source file contains more than 2^24-1 (16,777,215) points due to a limitation in older versions of Qhull (see: https://mail.scipy.org/pipermail/scipy-user/2015-June/036598.html). Delaunay creation may fail if Qhull being linked by scipy.spatial is older than v2015.0.1 2015/8/31." + print("WARNING: The source file contains more than 2^24-1 (16,777,215) points due to a limitation in older versions of Qhull (see: https://mail.scipy.org/pipermail/scipy-user/2015-June/036598.html). Delaunay creation may fail if Qhull being linked by scipy.spatial is older than v2015.0.1 2015/8/31.") tri = scipy.spatial.Delaunay(xy) - print " Delaunay triangulation complete." + print(" Delaunay triangulation complete.") simplex = tri.find_simplex(uv) - print " find_simplex complete." + print(" find_simplex complete.") vertices = np.take(tri.simplices, simplex, axis=0) - print " identified vertices." + print(" identified vertices.") temp = np.take(tri.transform, simplex, axis=0) - print " np.take complete." + print(" np.take complete.") delta = uv - temp[:, d] bary = np.einsum('njk,nk->nj', temp[:, :d, :], delta) - print " calculating bary complete." + print(" calculating bary complete.") wts = np.hstack((bary, 1 - bary.sum(axis=1, keepdims=True))) # Now figure out if there is any extrapolation. # Find indices to points of output file that are outside of convex hull of input points outsideInd = np.nonzero(tri.find_simplex(uv)<0) outsideCoords = uv[outsideInd] - #print outsideInd + #print(outsideInd) nExtrap = len(outsideInd[0]) if nExtrap > 0: - print " Found {} points requiring extrapolation. Using nearest neighbor extrapolation for those.".format(nExtrap) + print(" Found {} points requiring extrapolation. Using nearest neighbor extrapolation for those.".format(nExtrap)) # Now find nearest neighbor for each outside point # Use KDTree of input points @@ -208,29 +211,29 @@ def interpolate_field(MPASfieldName): else: InputField = inputFile.variables[InputFieldName][:] - print ' Input field %s min/max:'%InputFieldName, InputField.min(), InputField.max() + print(' Input field {} min/max: {} {}'.format(InputFieldName, InputField.min(), InputField.max())) # Call the appropriate routine for actually doing the interpolation if options.interpType == 'b': - print " ...Interpolating to %s using built-in bilinear method..." % MPASfieldName + print(" ...Interpolating to {} using built-in bilinear method...".format(MPASfieldName)) MPASfield = BilinearInterp(InputField, fieldInfo[MPASfieldName]['gridType']) elif options.interpType == 'd': - print " ...Interpolating to %s using barycentric method..." % MPASfieldName + print(" ...Interpolating to {} using barycentric method...".format(MPASfieldName)) MPASfield = delaunay_interpolate(InputField, fieldInfo[MPASfieldName]['gridType']) elif options.interpType == 'e': - print " ...Interpolating to %s using ESMF-weights method..." % MPASfieldName + print(" ...Interpolating to {} using ESMF-weights method...".format(MPASfieldName)) MPASfield = ESMF_interp(InputField) else: sys.exit('ERROR: Unknown interpolation method specified') - print ' interpolated MPAS %s min/max:'%MPASfieldName, MPASfield.min(), MPASfield.max() + print(' interpolated MPAS {} min/max: {} {}'.format(MPASfieldName, MPASfield.min(), MPASfield.max())) if fieldInfo[MPASfieldName]['scalefactor'] != 1.0: MPASfield *= fieldInfo[MPASfieldName]['scalefactor'] - print ' scaled MPAS %s min/max:'%MPASfieldName, MPASfield.min(), MPASfield.max() + print(' scaled MPAS {} min/max: {} {}'.format(MPASfieldName, MPASfield.min(), MPASfield.max())) if fieldInfo[MPASfieldName]['offset'] != 0.0: MPASfield += fieldInfo[MPASfieldName]['offset'] - print ' offset MPAS %s min/max:'%MPASfieldName, MPASfield.min(), MPASfield.max() + print(' offset MPAS {} min/max: {} {}'.format(MPASfieldName, MPASfield.min(), MPASfield.max())) return MPASfield @@ -272,58 +275,58 @@ def interpolate_field_with_layers(MPASfieldName): for z in range(inputVerticalDimSize): if filetype=='cism': - print ' Input layer %s, layer %s min/max:'%(z,InputFieldName), InputField[z,:,:].min(), InputField[z,:,:].max() + print(' Input layer {}, layer {} min/max: {} {}'.format(z, InputFieldName, InputField[z,:,:].min(), InputField[z,:,:].max())) elif filetype=='mpas': - print ' Input layer %s, layer %s min/max:'%(z,InputFieldName), InputField[:,z].min(), InputField[z,:].max() + print(' Input layer {}, layer {} min/max: {} {}'.format(z, InputFieldName, InputField[:,z].min(), InputField[z,:].max())) # Call the appropriate routine for actually doing the interpolation if options.interpType == 'b': - print " ...Layer %s, Interpolating this layer to MPAS grid using built-in bilinear method..." % (z) + print(" ...Layer {}, Interpolating this layer to MPAS grid using built-in bilinear method...".format(z)) mpas_grid_input_layers[z,:] = BilinearInterp(InputField[z,:,:], fieldInfo[MPASfieldName]['gridType']) elif options.interpType == 'd': - print " ...Layer %s, Interpolating this layer to MPAS grid using built-in barycentric method..." % (z) + print(" ...Layer {}, Interpolating this layer to MPAS grid using built-in barycentric method...".format(z)) if filetype=='cism': mpas_grid_input_layers[z,:] = delaunay_interpolate(InputField[z,:,:], fieldInfo[MPASfieldName]['gridType']) elif filetype=='mpas': mpas_grid_input_layers[z,:] = delaunay_interpolate(InputField[:,z], fieldInfo[MPASfieldName]['gridType']) elif options.interpType == 'e': - print " ...Layer %s, Interpolating this layer to MPAS grid using ESMF-weights method..." % (z) + print(" ...Layer{}, Interpolating this layer to MPAS grid using ESMF-weights method...".format(z)) mpas_grid_input_layers[z,:] = ESMF_interp(InputField[z,:,:]) else: sys.exit('ERROR: Unknown interpolation method specified') - print ' interpolated MPAS %s, layer %s min/max:'%(MPASfieldName, z), mpas_grid_input_layers[z,:].min(), mpas_grid_input_layers[z,:].max() + print(' interpolated MPAS {}, layer {} min/max {} {}: '.format(MPASfieldName, z, mpas_grid_input_layers[z,:].min(), mpas_grid_input_layers[z,:].max())) if fieldInfo[MPASfieldName]['scalefactor'] != 1.0: mpas_grid_input_layers *= fieldInfo[MPASfieldName]['scalefactor'] - print ' scaled MPAS %s on CISM vertical layers, min/max:'%MPASfieldName, mpas_grid_input_layers.min(), mpas_grid_input_layers.max() + print(' scaled MPAS {} on CISM vertical layers, min/max: {} {}'.format(MPASfieldName, mpas_grid_input_layers.min(), mpas_grid_input_layers.max())) if fieldInfo[MPASfieldName]['offset'] != 0.0: mpas_grid_input_layers += fieldInfo[MPASfieldName]['offset'] - print ' offset MPAS %s on CISM vertical layers, min/max:'%MPASfieldName, mpas_grid_input_layers.min(), mpas_grid_input_layers.max() + print(' offset MPAS {} on CISM vertical layers, min/max: {} {}'.format(MPASfieldName, mpas_grid_input_layers.min(), mpas_grid_input_layers.max())) # ------------ # Now interpolate vertically - print " Input layer field {} has layers: {}".format(inputFile.variables[InputFieldName].dimensions[1], input_layers) - print " MPAS layer centers are: {}".format(mpasLayerCenters) + print(" Input layer field {} has layers: {}".format(inputFile.variables[InputFieldName].dimensions[1], input_layers)) + print(" MPAS layer centers are: {}".format(mpasLayerCenters)) if input_layers.min() > mpasLayerCenters.min(): # This fix ensures that interpolation is done when input_layers.min is very slightly greater than mpasLayerCenters.min if input_layers.min() - 1.0e-6 < mpasLayerCenters.min(): - print 'input_layers.min =', '{0:.16f}'.format(input_layers.min()) - print 'mpasLayerCenters.min =', '{0:.16f}'.format(mpasLayerCenters.min()) + print('input_layers.min = {0:.16f}'.format(input_layers.min())) + print('mpasLayerCenters.min = {0:.16f}'.format(mpasLayerCenters.min())) input_layers[0] = input_layers[0] - 1.0e-6 - print 'New input_layers.min =', '{0:.16f}'.format(input_layers.min()) + print('New input_layers.min = {0:.16f}'.format(input_layers.min())) else: - print "WARNING: input_layers.min() > mpasLayerCenters.min() Values at the first level of input_layers will be used for all MPAS layers in this region!" + print("WARNING: input_layers.min() > mpasLayerCenters.min() Values at the first level of input_layers will be used for all MPAS layers in this region!") if input_layers.max() < mpasLayerCenters.max(): # This fix ensures that interpolation is done when input_layers.max is very slightly smaller than mpasLayerCenters.max if input_layers.max() + 1.0e-6 > mpasLayerCenters.min(): - print 'input_layers.max =', '{0:.16f}'.format(input_layers.max()) - print 'mpasLayerCenters.max =', '{0:.16f}'.format(mpasLayerCenters.max()) + print('input_layers.max = {0:.16f}'.format(input_layers.max())) + print('mpasLayerCenters.max = {0:.16f}'.format(mpasLayerCenters.max())) input_layers[inputVerticalDimSize-1] = input_layers[inputVerticalDimSize-1] + 1.0e-6 - print 'New input_layers.max =', '{0:.16f}'.format(input_layers.max()) - print 'input_layers = {}'.format(input_layers) + print('New input_layers.max = {0:.16f}'.format(input_layers.max())) + print('input_layers = {}'.format(input_layers)) else: - print "WARNING: input_layers.max() < mpasLayerCenters.max() Values at the last level of input_layers will be used for all MPAS layers in this region!" + print("WARNING: input_layers.max() < mpasLayerCenters.max() Values at the last level of input_layers will be used for all MPAS layers in this region!") MPASfield = vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers) - print ' MPAS %s on MPAS vertical layers, min/max of all layers:'%MPASfieldName, MPASfield.min(), MPASfield.max() + print(' MPAS {} on MPAS vertical layers, min/max of all layers:'.format(MPASfieldName, MPASfield.min(), MPASfield.max())) del mpas_grid_input_layers @@ -345,8 +348,8 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): -print "==================" -print 'Gathering coordinate information from input and output files.' +print("==================") +print('Gathering coordinate information from input and output files.') # Open the output file, get needed dimensions & variables @@ -355,7 +358,7 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): try: nVertLevels = len(MPASfile.dimensions['nVertLevels']) except: - print 'Output file is missing the dimension nVertLevels. Might not be a problem.' + print('Output file is missing the dimension nVertLevels. Might not be a problem.') try: # 1d vertical fields @@ -365,20 +368,20 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): mpasLayerCenters[0] = 0.5 * layerThicknessFractions[0] for k in range(nVertLevels)[1:]: # skip the first level mpasLayerCenters[k] = mpasLayerCenters[k-1] + 0.5 * layerThicknessFractions[k-1] + 0.5 * layerThicknessFractions[k] - print " Using MPAS layer centers at sigma levels: {}".format(mpasLayerCenters) + print(" Using MPAS layer centers at sigma levels: {}".format(mpasLayerCenters)) except: - print 'Output file is missing the variable layerThicknessFractions. Might not be a problem.' + print('Output file is missing the variable layerThicknessFractions. Might not be a problem.') # '2d' spatial fields on cell centers xCell = MPASfile.variables['xCell'][:] - #print 'xCell min/max:', xCell.min(), xCell.max() + #print('xCell min/max:', xCell.min(), xCell.max() yCell = MPASfile.variables['yCell'][:] - #print 'yCell min/max:', yCell.min(), yCell.max() + #print('yCell min/max:', yCell.min(), yCell.max() nCells = len(MPASfile.dimensions['nCells']) except: sys.exit('Error: The output grid file specified is either missing or lacking needed dimensions/variables.') -print "==================\n" +print("==================\n") @@ -398,48 +401,48 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): try: level = len(inputFile.dimensions['level']) except: - print ' Input file is missing the dimension level. Might not be a problem.' + print(' Input file is missing the dimension level. Might not be a problem.') try: stagwbndlevel = len(inputFile.dimensions['stagwbndlevel']) except: - print ' Input file is missing the dimension stagwbndlevel. Might not be a problem.' + print(' Input file is missing the dimension stagwbndlevel. Might not be a problem.') # Get CISM location variables if they exist try: x1 = inputFile.variables['x1'][:] dx1 = x1[1] - x1[0] - #print 'x1 min/max/dx:', x1.min(), x1.max(), dx1 + #print('x1 min/max/dx:', x1.min(), x1.max(), dx1 y1 = inputFile.variables['y1'][:] dy1 = y1[1] - y1[0] - #print 'y1 min/max/dx:', y1.min(), y1.max(), dy1 + #print('y1 min/max/dx:', y1.min(), y1.max(), dy1 ##x1 = x1 - (x1.max()-x1.min())/2.0 # This was for some shifted CISM grid but should not be used in general. ##y1 = y1 - (y1.max()-y1.min())/2.0 except: - print ' Input file is missing x1 and/or y1. Might not be a problem.' + print(' Input file is missing x1 and/or y1. Might not be a problem.') try: x0 = inputFile.variables['x0'][:] - #print 'x0 min/max:', x0.min(), x0.max() + #print('x0 min/max:', x0.min(), x0.max() y0 = inputFile.variables['y0'][:] - #print 'y0 min/max:', y0.min(), y0.max() + #print('y0 min/max:', y0.min(), y0.max() ##x0 = x0 - (x0.max()-x0.min())/2.0 ##y0 = y0 - (y0.max()-y0.min())/2.0 except: - print ' Input file is missing x0 and/or y0. Might not be a problem.' + print(' Input file is missing x0 and/or y0. Might not be a problem.') # Check the overlap of the grids - print '==================' - print 'CISM Input File extents:' - print ' x1 min, max: ', x1.min(), x1.max() - print ' y1 min, max: ', y1.min(), y1.max() - print 'MPAS File extents:' - print ' xCell min, max: ', xCell.min(), xCell.max() - print ' yCell min, max: ', yCell.min(), yCell.max() - print '==================' + print('==================') + print('CISM Input File extents:') + print(' x1 min, max: {} {}'.format(x1.min(), x1.max())) + print(' y1 min, max: {} {}'.format(y1.min(), y1.max())) + print('MPAS File extents:') + print(' xCell min, max: {} {}'.format(xCell.min(), xCell.max())) + print(' yCell min, max: {} {}'.format(yCell.min(), yCell.max())) + print('==================') elif filetype == 'mpas': @@ -447,12 +450,12 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): try: nVertLevels = len(inputFile.dimensions['nVertLevels']) except: - print ' Input file is missing the dimension nVertLevels. Might not be a problem.' + print(' Input file is missing the dimension nVertLevels. Might not be a problem.') #try: # nVertInterfaces = len(inputFile.dimensions['nVertInterfaces']) #except: - # print ' Input file is missing the dimension nVertInterfaces. Might not be a problem.' + # print(' Input file is missing the dimension nVertInterfaces. Might not be a problem.' # Get MPAS location variables if they exist try: @@ -463,14 +466,14 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): # Check the overlap of the grids - print '==================' - print 'Input MPAS File extents:' - print ' xCell min, max: ', inputxCell.min(), inputxCell.max() - print ' yCell min, max: ', inputyCell.min(), inputyCell.max() - print 'Output MPAS File extents:' - print ' xCell min, max: ', xCell.min(), xCell.max() - print ' yCell min, max: ', yCell.min(), yCell.max() - print '==================' + print('==================') + print('Input MPAS File extents:') + print(' xCell min, max: {} {}'.format(inputxCell.min(), inputxCell.max())) + print(' yCell min, max: {} {}'.format(inputyCell.min(), inputyCell.max())) + print('Output MPAS File extents:') + print(' xCell min, max: {} {}'.format(xCell.min(), xCell.max())) + print(' yCell min, max: {} {}'.format(yCell.min(), yCell.max())) + print('==================') if filetype=='mpas' and not options.interpType == 'd': @@ -487,12 +490,12 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): cismXY1[:,0] = Yi.flatten() cismXY1[:,1] = Xi.flatten() - print '\nBuilding interpolation weights: CISM x1/y1 -> MPAS' + print('\nBuilding interpolation weights: CISM x1/y1 -> MPAS') start = time.clock() vtx1, wts1, outsideIndx1, treex1 = delaunay_interp_weights(cismXY1, mpasXY) if len(outsideIndx1) > 0: outsideIndx1 = outsideIndx1[0] # get the list itself - end = time.clock(); print 'done in ', end-start + end = time.clock(); print('done in {}'.format(end-start)) if 'x0' in inputFile.variables and not options.thicknessOnly: # Need to setup separate weights for this grid @@ -501,19 +504,19 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): cismXY0[:,0] = Yi.flatten() cismXY0[:,1] = Xi.flatten() - print 'Building interpolation weights: CISM x0/y0 -> MPAS' + print('Building interpolation weights: CISM x0/y0 -> MPAS') start = time.clock() vtx0, wts0, outsideIndx0, treex0 = delaunay_interp_weights(cismXY0, mpasXY) if len(outsideIndx0) > 0: outsideIndx0 = outsideIndx0[0] # get the list itself - end = time.clock(); print 'done in ', end-start + end = time.clock(); print('done in {}'.format(end-start)) elif filetype=='mpas': inputmpasXY= np.vstack((inputxCell[:], inputyCell[:])).transpose() - print 'Building interpolation weights: MPAS in -> MPAS out' + print('Building interpolation weights: MPAS in -> MPAS out') start = time.clock() vtCell, wtsCell, outsideIndcell, treecell = delaunay_interp_weights(inputmpasXY, mpasXY) - end = time.clock(); print 'done in ', end-start + end = time.clock(); print('done in {}'.format(end-start)) #---------------------------- # Map Input-Output field names - add new fields here as needed @@ -561,18 +564,18 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): #---------------------------- - + #---------------------------- # try each field. If it exists in the input file, it will be copied. If not, it will be skipped. for MPASfieldName in fieldInfo: - print '\n## %s ##'%MPASfieldName + print('\n## {} ##'.format(MPASfieldName)) if not MPASfieldName in MPASfile.variables: - print " Warning: Field '{}' is not in the destination file. Skipping.".format(MPASfieldName) + print(" Warning: Field '{}' is not in the destination file. Skipping.".format(MPASfieldName)) continue # skip the rest of this iteration of the for loop over variables if not fieldInfo[MPASfieldName]['InputName'] in inputFile.variables: - print " Warning: Field '{}' is not in the source file. Skipping.".format(fieldInfo[MPASfieldName]['InputName']) + print(" Warning: Field '{}' is not in the source file. Skipping.".format(fieldInfo[MPASfieldName]['InputName'])) continue # skip the rest of this iteration of the for loop over variables start = time.clock() @@ -580,12 +583,12 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): MPASfield = interpolate_field_with_layers(MPASfieldName) else: MPASfield = interpolate_field(MPASfieldName) - end = time.clock(); print ' interpolation done in ', end-start + end = time.clock(); print(' interpolation done in {}'.format(end-start)) # Don't allow negative thickness. if MPASfieldName == 'thickness' and MPASfield.min() < 0.0: MPASfield[MPASfield < 0.0] = 0.0 - print ' removed negative thickness, new min/max:', MPASfield.min(), MPASfield.max() + print(' removed negative thickness, new min/max: {} {}'.format(MPASfield.min(), MPASfield.max())) # Now insert the MPAS field into the file. if 'Time' in MPASfile.variables[MPASfieldName].dimensions: @@ -607,4 +610,4 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): inputFile.close() MPASfile.close() -print '\nInterpolation completed.' +print('\nInterpolation completed.') diff --git a/landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py b/landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py index 3481b529b..bd826aabd 100755 --- a/landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py +++ b/landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py @@ -3,13 +3,16 @@ This script marks all of the boundary cells in a domain as Dirichlet velocity boundaries. ''' +from __future__ import absolute_import, division, print_function, \ + unicode_literals + import netCDF4 import numpy as np from optparse import OptionParser from datetime import datetime import sys -print "== Gathering information. (Invoke with --help for more details. All arguments are optional)\n" +print("== Gathering information. (Invoke with --help for more details. All arguments are optional)\n") parser = OptionParser() parser.description = __doc__ parser.add_option("-f", "--file", dest="inputFile", help="name of file to be modified.", default="landice_grid.nc", metavar="FILENAME") @@ -20,8 +23,8 @@ options, args = parser.parse_args() -print " Input file: " + options.inputFile -print " Time level: {}".format(options.time) +print(" Input file: {}".format(options.inputFile)) +print(" Time level: {}".format(options.time)) f=netCDF4.Dataset(options.inputFile, 'r+') nCells = len(f.dimensions['nCells']) @@ -47,4 +50,4 @@ f.close() -print '\nMarking boundary cells completed.' +print('\nMarking boundary cells completed.') diff --git a/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py b/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py index 69caeccf7..afd87972c 100755 --- a/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py +++ b/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py @@ -9,6 +9,9 @@ otherwise it creates a new field. ''' +from __future__ import absolute_import, division, print_function, \ + unicode_literals + import sys import numpy as np import netCDF4 @@ -16,7 +19,7 @@ from datetime import datetime -print "== Gathering information. (Invoke with --help for more details. All arguments are optional)\n" +print("== Gathering information. (Invoke with --help for more details. All arguments are optional)\n") parser = OptionParser() parser.description = __doc__ parser.add_option("-f", "--file", dest="inputFile", help="Name of file to be processed.", default="grid.nc", metavar="FILENAME") @@ -25,7 +28,7 @@ option.help += (" " if option.help else "") + "[default: %default]" options, args = parser.parse_args() -print " File to be modified: " + options.inputFile +print(" File to be modified: " + options.inputFile) # Open file and get needed fields. @@ -63,5 +66,5 @@ inputFile.close() -print '\n{} "horn" locations have been marked in the field cullCell.'.format(nHorns) -print "Remember to use MpasCellCuller.x to actually remove them!" +print('\n{} "horn" locations have been marked in the field cullCell.'.format(nHorns)) +print("Remember to use MpasCellCuller.x to actually remove them!") From f00466abca4eefeb77f93fcc54e850064d5b13fc Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 25 Feb 2019 22:53:15 -0700 Subject: [PATCH 108/180] Remove a tab (indentation should be with spaces) --- landice/mesh_tools_li/define_cullMask.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/landice/mesh_tools_li/define_cullMask.py b/landice/mesh_tools_li/define_cullMask.py index fb707afd3..971a13fc6 100755 --- a/landice/mesh_tools_li/define_cullMask.py +++ b/landice/mesh_tools_li/define_cullMask.py @@ -22,7 +22,7 @@ options, args = parser.parse_args() if not options.file: - print("No grid filename provided. Using landice_grid.nc.") + print("No grid filename provided. Using landice_grid.nc.") options.file = "landice_grid.nc" if not options.method: From 8ed7c1a9b443a0570910ea1b3c4832fcb8a5845d Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 6 Mar 2019 21:09:41 -0700 Subject: [PATCH 109/180] Fix print statements for python 3 --- .../add_land_locked_cells_to_mask.py | 32 ++++++++++++------- .../widen_transect_edge_masks.py | 5 ++- 2 files changed, 25 insertions(+), 12 deletions(-) diff --git a/ocean/coastline_alteration/add_land_locked_cells_to_mask.py b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py index 3ed99c01e..0deb4d978 100755 --- a/ocean/coastline_alteration/add_land_locked_cells_to_mask.py +++ b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py @@ -1,11 +1,14 @@ #!/usr/bin/env python """ Name: add_land_locked_cells_to_mask.py -Author: Mark Petersen, Adrian Turner +Author: Mark Petersen, Adrian Turner, Xylar Asay-Davis Find ocean cells that are land-locked, and alter the cell mask so that they are counted as land cells. """ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + import os import shutil from netCDF4 import Dataset @@ -65,7 +68,8 @@ def removeFile(fileName): outputMaskFile = Dataset(args.output_mask_filename, "a") landMaskDiagnostic = outputMaskFile.createVariable("landMaskDiagnostic", "i", dimensions=("nCells")) -print "Running add_land_locked_cells_to_mask.py. Total number of cells: ", nCells +print("Running add_land_locked_cells_to_mask.py. Total number of cells: " + "{}".format(nCells)) # use np.array, as simple = makes a pointer landMaskNew = np.array(landMask) @@ -75,7 +79,8 @@ def removeFile(fileName): removableCellIndex = np.zeros(nCells, dtype="i") nRemovableCells = 0 -print "Step 1: Searching for land-locked cells. Remove cells that only have isolated active edges." +print("Step 1: Searching for land-locked cells. Remove cells that only have " + "isolated active edges.") landLockedCounter = 0 for iCell in range(nCells): landMaskDiagnostic[iCell] = landMask[iCell] @@ -101,9 +106,10 @@ def removeFile(fileName): landMaskDiagnostic[iCell] = 2 landMask[:] = landMaskNew[:] -print " Number of landLocked cells: ", landLockedCounter +print(" Number of landLocked cells: {}".format(landLockedCounter)) -print "Step 2: Searching for land-locked cells. Remove cells that have any isolated active edges." +print("Step 2: Searching for land-locked cells. Remove cells that have any " + "isolated active edges.") for iSweep in range(args.nSweeps): landLockedCounter = 0 for iRemovableCell in range(0, nRemovableCells): @@ -130,11 +136,12 @@ def removeFile(fileName): break landMask[:] = landMaskNew[:] - print " Sweep: ", iSweep+1, "Number of landLocked cells removed: ", landLockedCounter + print(" Sweep: {} Number of landLocked cells removed: {}".format( + iSweep+1, landLockedCounter)) if landLockedCounter == 0: break -print "Step 3: Perform flood fill, starting from open ocean." +print("Step 3: Perform flood fill, starting from open ocean.") floodFill = np.zeros(nCells, dtype="i") floodableCellIndex = np.zeros(nCells, dtype="i") nFloodableCells = 0 @@ -160,7 +167,7 @@ def removeFile(fileName): else: floodableCellIndex[nFloodableCells] = iCell nFloodableCells += 1 -print " Initial number of flood cells: ", nFloodableCells +print(" Initial number of flood cells: {}".format(nFloodableCells)) # sweep over neighbors of known open ocean points for iSweep in range(0, nCells): @@ -178,7 +185,8 @@ def removeFile(fileName): newFloodCellsThisSweep += 1 break - print " Sweep ", iSweep, " new flood cells this sweep: ", newFloodCellsThisSweep + print(" Sweep {} new flood cells this sweep: {}".format( + iSweep, newFloodCellsThisSweep)) if (newFloodCellsThisSweep == 0): break @@ -188,7 +196,8 @@ def removeFile(fileName): if (floodFill[iCell] == 1): oceanMask[iCell] = 1 -print "Step 4: Searching for land-locked cells, step 3: revert cells with connected active edges" +print("Step 4: Searching for land-locked cells, step 3: revert cells with " + "connected active edges") for iSweep in range(args.nSweeps): landLockedCounter = 0 for iRemovableCell in range(0, nRemovableCells): @@ -216,7 +225,8 @@ def removeFile(fileName): break landMask[:] = landMaskNew[:] - print " Sweep: ", iSweep+1, "Number of land-locked cells returned: ", landLockedCounter + print(" Sweep: {} Number of land-locked cells returned: {}".format( + iSweep+1, landLockedCounter)) if landLockedCounter == 0: break diff --git a/ocean/coastline_alteration/widen_transect_edge_masks.py b/ocean/coastline_alteration/widen_transect_edge_masks.py index e941045c6..4c9025c9c 100755 --- a/ocean/coastline_alteration/widen_transect_edge_masks.py +++ b/ocean/coastline_alteration/widen_transect_edge_masks.py @@ -1,12 +1,15 @@ #!/usr/bin/env python """ Name: widen_transect_edge_masks.py -Author: Mark Petersen +Author: Mark Petersen, Xylar Asay-Davis Alter transects to be at least two cells wide. This is used for critical passages, to avoid sea ice blockage. Specifically, mark cells on both sides of each transect edge mask as a water cell. """ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + import numpy as np from netCDF4 import Dataset import argparse From 4494c5150f0ada45e802d5af04f2830a9643dc45 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 6 Mar 2019 21:21:41 -0700 Subject: [PATCH 110/180] Fix PEP8 formatting of coastline alteration scripts --- .../add_land_locked_cells_to_mask.py | 95 +++++++++++++------ .../widen_transect_edge_masks.py | 9 +- 2 files changed, 73 insertions(+), 31 deletions(-) diff --git a/ocean/coastline_alteration/add_land_locked_cells_to_mask.py b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py index 0deb4d978..29f8bb845 100755 --- a/ocean/coastline_alteration/add_land_locked_cells_to_mask.py +++ b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py @@ -15,12 +15,14 @@ import numpy as np import argparse + def removeFile(fileName): try: os.remove(fileName) except OSError: pass + parser = \ argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) @@ -34,14 +36,16 @@ def removeFile(fileName): help="MPAS Mesh filename.", metavar="MESHFILE", required=True) parser.add_argument("-l", "--latitude_threshold", dest="latitude_threshold", - help="Minimum latitude, in degrees, for transect widening.", + help="Minimum latitude, in degrees, for transect " + "widening.", required=False, type=float, default=43.0) parser.add_argument("-n", "--number_sweeps", dest="nSweeps", - help="Maximum number of sweeps to search for land-locked cells.", + help="Maximum number of sweeps to search for land-locked " + "cells.", required=False, type=int, default=10) args = parser.parse_args() -latitude_threshold_radians = args.latitude_threshold*3.1415/180. +latitude_threshold_radians = args.latitude_threshold * 3.1415 / 180. # Obtain mesh variables meshFile = Dataset(args.mesh_filename, "r") @@ -54,7 +58,7 @@ def removeFile(fileName): meshFile.close() removeFile(args.output_mask_filename) -shutil.copyfile(args.input_mask_filename,args.output_mask_filename) +shutil.copyfile(args.input_mask_filename, args.output_mask_filename) # Obtain original cell mask from input file inputMaskFile = Dataset(args.input_mask_filename, "r") @@ -66,7 +70,8 @@ def removeFile(fileName): # Open output file outputMaskFile = Dataset(args.output_mask_filename, "a") -landMaskDiagnostic = outputMaskFile.createVariable("landMaskDiagnostic", "i", dimensions=("nCells")) +landMaskDiagnostic = outputMaskFile.createVariable( + "landMaskDiagnostic", "i", dimensions=("nCells")) print("Running add_land_locked_cells_to_mask.py. Total number of cells: " "{}".format(nCells)) @@ -85,7 +90,8 @@ def removeFile(fileName): for iCell in range(nCells): landMaskDiagnostic[iCell] = landMask[iCell] # skip if outside latitude threshold or if this is already a land cell - if abs(latCell[iCell]) < latitude_threshold_radians or landMask[iCell] == 1: + if abs(latCell[iCell]) < latitude_threshold_radians or \ + landMask[iCell] == 1: continue removableCellIndex[nRemovableCells] = iCell nRemovableCells += 1 @@ -93,7 +99,7 @@ def removeFile(fileName): for iEdgeOnCell in range(nEdgesOnCell[iCell]): # check if neighbor is an ocean cell (landMask=0) # subtract 1 to convert 1-base to 0-base: - if landMask[cellsOnCell[iCell, iEdgeOnCell]-1] == 0: + if landMask[cellsOnCell[iCell, iEdgeOnCell] - 1] == 0: activeEdgeSum[iEdgeOnCell] += 1 # % is modulo operator: iP1 = (iEdgeOnCell + 1) % nEdgesOnCell[iCell] @@ -119,15 +125,15 @@ def removeFile(fileName): for iEdgeOnCell in range(nEdgesOnCell[iCell]): # check if neighbor is an ocean cell (landMask=0) # subtract 1 to convert 1-base to 0-base: - if landMask[cellsOnCell[iCell, iEdgeOnCell]-1] == 0: + if landMask[cellsOnCell[iCell, iEdgeOnCell] - 1] == 0: # % is modulo operator: iP1 = (iEdgeOnCell + 1) % nEdgesOnCell[iCell] iM1 = (iEdgeOnCell - 1) % nEdgesOnCell[iCell] # Is this neighbor's two neighbors to left and right land? # if so, sum of masks is two. # subtract 1 to convert 1-base to 0-base: - if (landMask[cellsOnCell[iCell, iP1]-1] - + landMask[cellsOnCell[iCell, iM1]-1]) == 2: + if (landMask[cellsOnCell[iCell, iP1] - 1] + + landMask[cellsOnCell[iCell, iM1] - 1]) == 2: landLockedCounter += 1 landMaskNew[iCell] = 1 outputMaskFile['regionCellMasks'][iCell, 1] = 1 @@ -137,7 +143,7 @@ def removeFile(fileName): landMask[:] = landMaskNew[:] print(" Sweep: {} Number of landLocked cells removed: {}".format( - iSweep+1, landLockedCounter)) + iSweep + 1, landLockedCounter)) if landLockedCounter == 0: break @@ -146,22 +152,53 @@ def removeFile(fileName): floodableCellIndex = np.zeros(nCells, dtype="i") nFloodableCells = 0 floodFill[:] = -1 -d2r = 3.1415/180.0 +d2r = 3.1415 / 180.0 # init flood fill to 0 for water, -1 for land, 1 for known open ocean regions for iRemovableCell in range(0, nRemovableCells): iCell = removableCellIndex[iRemovableCell] if (landMaskDiagnostic[iCell] == 0): floodFill[iCell] = 0 - if (latCell[iCell] > 84.0*d2r # North Pole - or lonCell[iCell] > 160.0*d2r and lonCell[iCell] < 230.0*d2r and latCell[iCell] > 73.0*d2r # Arctic - or lonCell[iCell] > 315.0*d2r and lonCell[iCell] < 340.0*d2r and latCell[iCell] > 15.0*d2r and latCell[iCell] < 45.0*d2r # North Atlantic - or lonCell[iCell] > 290.0*d2r and lonCell[iCell] < 300.0*d2r and latCell[iCell] > 72.0*d2r and latCell[iCell] < 75.0*d2r # North Atlantic - or lonCell[iCell] > 0.0*d2r and lonCell[iCell] < 10.0*d2r and latCell[iCell] > 70.0*d2r and latCell[iCell] < 75.0*d2r # North Atlantic 2 - or lonCell[iCell] > 150.0*d2r and lonCell[iCell] < 225.0*d2r and latCell[iCell] > 0.0*d2r and latCell[iCell] < 45.0*d2r # North Pacific - or lonCell[iCell] > 0.0*d2r and lonCell[iCell] < 5.0*d2r and latCell[iCell] > -60.0*d2r and latCell[iCell] < 0.0*d2r # South Atlantic - or lonCell[iCell] > 180.0*d2r and lonCell[iCell] < 280.0*d2r and latCell[iCell] > -60.0*d2r and latCell[iCell] < -10.0*d2r # South Pacific - or lonCell[iCell] > 0.0*d2r and lonCell[iCell] < 165.0*d2r and latCell[iCell] > -60.0*d2r and latCell[iCell] < -45.0*d2r): # Southern Ocean + if (latCell[iCell] > 84.0 * d2r # North Pole + # Arctic + or lonCell[iCell] > 160.0 * d2r + and lonCell[iCell] < 230.0 * d2r + and latCell[iCell] > 73.0 * d2r + # North Atlantic + or lonCell[iCell] > 315.0 * d2r + and lonCell[iCell] < 340.0 * d2r + and latCell[iCell] > 15.0 * d2r + and latCell[iCell] < 45.0 * d2r + # North Atlantic + or lonCell[iCell] > 290.0 * d2r + and lonCell[iCell] < 300.0 * d2r + and latCell[iCell] > 72.0 * d2r + and latCell[iCell] < 75.0 * d2r + # North Atlantic 2 + or lonCell[iCell] > 0.0 * d2r + and lonCell[iCell] < 10.0 * d2r + and latCell[iCell] > 70.0 * d2r + and latCell[iCell] < 75.0 * d2r + # North Pacific + or lonCell[iCell] > 150.0 * d2r + and lonCell[iCell] < 225.0 * d2r + and latCell[iCell] > 0.0 * d2r + and latCell[iCell] < 45.0 * d2r + # South Atlantic + or lonCell[iCell] > 0.0 * d2r + and lonCell[iCell] < 5.0 * d2r + and latCell[iCell] > -60.0 * d2r + and latCell[iCell] < 0.0 * d2r + # South Pacific + or lonCell[iCell] > 180.0 * d2r + and lonCell[iCell] < 280.0 * d2r + and latCell[iCell] > -60.0 * d2r + and latCell[iCell] < -10.0 * d2r + # Southern Ocean + or lonCell[iCell] > 0.0 * d2r + and lonCell[iCell] < 165.0 * d2r + and latCell[iCell] > -60.0 * d2r + and latCell[iCell] < -45.0 * d2r): floodFill[iCell] = 1 landMaskDiagnostic[iCell] = 5 # indicates seed region else: @@ -178,7 +215,7 @@ def removeFile(fileName): if (floodFill[iCell] == 0): for iCellOnCellSweep in range(0, nEdgesOnCell[iCell]): - iCellNeighbor = cellsOnCell[iCell, iCellOnCellSweep]-1 + iCellNeighbor = cellsOnCell[iCell, iCellOnCellSweep] - 1 if (floodFill[iCellNeighbor] == 1): floodFill[iCell] = 1 @@ -207,26 +244,28 @@ def removeFile(fileName): for iEdgeOnCell in range(nEdgesOnCell[iCell]): # check if neighbor is an ocean cell (landMask=0) # subtract 1 to convert 1-base to 0-base: - if oceanMask[cellsOnCell[iCell, iEdgeOnCell]-1] == 1: + if oceanMask[cellsOnCell[iCell, iEdgeOnCell] - 1] == 1: # % is modulo operator: iP1 = (iEdgeOnCell + 1) % nEdgesOnCell[iCell] iM1 = (iEdgeOnCell - 1) % nEdgesOnCell[iCell] - # Is either of this neighbor's two neighbors to left and right ocean? + # Is either of this neighbor's two neighbors to left and + # right ocean? # if so, sum of masks is two. # subtract 1 to convert 1-base to 0-base: - if (landMask[cellsOnCell[iCell, iP1]-1] == 0 - or landMask[cellsOnCell[iCell, iM1]-1] == 0): + if (landMask[cellsOnCell[iCell, iP1] - 1] == 0 + or landMask[cellsOnCell[iCell, iM1] - 1] == 0): landLockedCounter += 1 landMaskNew[iCell] = 0 outputMaskFile['regionCellMasks'][iCell, 1] = 0 landMaskDiagnostic[iCell] = 4 oceanMask[iCell] = 1 - # once we remove this cell, we can quit checking over edges + # once we remove this cell, we can quit checking over + # edges break landMask[:] = landMaskNew[:] print(" Sweep: {} Number of land-locked cells returned: {}".format( - iSweep+1, landLockedCounter)) + iSweep + 1, landLockedCounter)) if landLockedCounter == 0: break diff --git a/ocean/coastline_alteration/widen_transect_edge_masks.py b/ocean/coastline_alteration/widen_transect_edge_masks.py index 4c9025c9c..19a4730b2 100755 --- a/ocean/coastline_alteration/widen_transect_edge_masks.py +++ b/ocean/coastline_alteration/widen_transect_edge_masks.py @@ -10,6 +10,7 @@ from __future__ import absolute_import, division, print_function, \ unicode_literals + import numpy as np from netCDF4 import Dataset import argparse @@ -29,7 +30,7 @@ required=False, type=float, default=43.0) args = parser.parse_args() -latitude_threshold_radians = args.latitude_threshold*3.1415/180. +latitude_threshold_radians = args.latitude_threshold * 3.1415 / 180. # Obtain mesh variables meshFile = Dataset(args.mesh_filename, "r") @@ -49,7 +50,9 @@ if abs(latEdge[iEdge]) > latitude_threshold_radians: for iTransect in range(nTransects): if transectEdgeMasks[iEdge, iTransect] == 1: - maskFile['transectCellMasks'][cellsOnEdge[iEdge, 0]-1, iTransect] = 1 - maskFile['transectCellMasks'][cellsOnEdge[iEdge, 1]-1, iTransect] = 1 + maskFile['transectCellMasks'][cellsOnEdge[iEdge, 0] - + 1, iTransect] = 1 + maskFile['transectCellMasks'][cellsOnEdge[iEdge, 1] - + 1, iTransect] = 1 maskFile.close() From b9f11918ff9bacff905746d43a6ce60bfda6f8fa Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 25 Feb 2019 14:44:37 -0700 Subject: [PATCH 111/180] Move conda recipe to mesh_tools --- mesh_tools/{mesh_conversion_tools => }/conda/build.sh | 0 mesh_tools/{mesh_conversion_tools => }/conda/meta.yaml | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename mesh_tools/{mesh_conversion_tools => }/conda/build.sh (100%) rename mesh_tools/{mesh_conversion_tools => }/conda/meta.yaml (100%) diff --git a/mesh_tools/mesh_conversion_tools/conda/build.sh b/mesh_tools/conda/build.sh similarity index 100% rename from mesh_tools/mesh_conversion_tools/conda/build.sh rename to mesh_tools/conda/build.sh diff --git a/mesh_tools/mesh_conversion_tools/conda/meta.yaml b/mesh_tools/conda/meta.yaml similarity index 100% rename from mesh_tools/mesh_conversion_tools/conda/meta.yaml rename to mesh_tools/conda/meta.yaml From ce3a27f9a54c178eddd58add9a5b2500a5ad9313 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 25 Feb 2019 14:45:01 -0700 Subject: [PATCH 112/180] Add LICENSE file --- LICENSE | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 LICENSE diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..f6af5ee0a --- /dev/null +++ b/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2013-2018, Los Alamos National Security, LLC (LANS) (Ocean: LA-CC-13-047; +Land Ice: LA-CC-13-117) and the University Corporation for Atmospheric Research (UCAR). + +All rights reserved. + +LANS is the operator of the Los Alamos National Laboratory under Contract No. +DE-AC52-06NA25396 with the U.S. Department of Energy. UCAR manages the National +Center for Atmospheric Research under Cooperative Agreement ATM-0753581 with the +National Science Foundation. The U.S. Government has rights to use, reproduce, +and distribute this software. NO WARRANTY, EXPRESS OR IMPLIED IS OFFERED BY +LANS, UCAR OR THE GOVERNMENT AND NONE OF THEM ASSUME ANY LIABILITY FOR THE USE +OF THIS SOFTWARE. If software is modified to produce derivative works, such +modified software should be clearly marked, so as not to confuse it with the +version available from LANS and UCAR. + +Additionally, redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1) Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2) Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +3) None of the names of LANS, UCAR or the names of its contributors, if any, may +be used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. From 6c65a4bd71cce3537c82ecf41b3c653fd234dcef Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 25 Feb 2019 14:45:39 -0700 Subject: [PATCH 113/180] Add mpas_mesh_tools python package Includes planar_hex --- mesh_tools/conda/meta.yaml | 4 +-- mesh_tools/mpas_mesh_tools/__init__.py | 2 ++ mesh_tools/mpas_mesh_tools/__main__.py | 32 ++++++++++++++++++ .../planar_hex.py | 17 +++++----- mesh_tools/setup.py | 33 +++++++++++++++++++ 5 files changed, 78 insertions(+), 10 deletions(-) create mode 100644 mesh_tools/mpas_mesh_tools/__init__.py create mode 100755 mesh_tools/mpas_mesh_tools/__main__.py rename mesh_tools/{planar_hex => mpas_mesh_tools}/planar_hex.py (96%) create mode 100755 mesh_tools/setup.py diff --git a/mesh_tools/conda/meta.yaml b/mesh_tools/conda/meta.yaml index 377e1e439..2fd4b74aa 100644 --- a/mesh_tools/conda/meta.yaml +++ b/mesh_tools/conda/meta.yaml @@ -6,8 +6,8 @@ package: version: '{{ version }}' source: - git_url: https://github.com/MPAS-Dev/MPAS-Tools.git - git_rev: 65ec4939c7419ff868ded1d1d0785ec6c7ec3e80 + git_url: https://github.com/xylar/MPAS-Tools.git + git_rev: 847dc921f0bcb57f3bd49a4a4c4646df2f3afd65 build: number: 0 diff --git a/mesh_tools/mpas_mesh_tools/__init__.py b/mesh_tools/mpas_mesh_tools/__init__.py new file mode 100644 index 000000000..ac825a01c --- /dev/null +++ b/mesh_tools/mpas_mesh_tools/__init__.py @@ -0,0 +1,2 @@ +__version_info__ = (0, 0, 1) +__version__ = '.'.join(str(vi) for vi in __version_info__) diff --git a/mesh_tools/mpas_mesh_tools/__main__.py b/mesh_tools/mpas_mesh_tools/__main__.py new file mode 100755 index 000000000..6542cd2b2 --- /dev/null +++ b/mesh_tools/mpas_mesh_tools/__main__.py @@ -0,0 +1,32 @@ +""" +MPAS mesh tools +""" + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import mpas_mesh_tools + +import argparse + + +def main(): + """ + Entry point for the main script ``mpas_mesh_tools`` + """ + + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('-v', '--version', + action='version', + version='mpas_mesh_tools {}'.format( + mpas_mesh_tools.__version__), + help="Show version number and exit") + + args = parser.parse_args() + + +if __name__ == "__main__": + main() + +# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mesh_tools/planar_hex/planar_hex.py b/mesh_tools/mpas_mesh_tools/planar_hex.py similarity index 96% rename from mesh_tools/planar_hex/planar_hex.py rename to mesh_tools/mpas_mesh_tools/planar_hex.py index ffd4deaf1..1ac039c6e 100755 --- a/mesh_tools/planar_hex/planar_hex.py +++ b/mesh_tools/mpas_mesh_tools/planar_hex.py @@ -9,9 +9,9 @@ import netCDF4 -def make_periodic_planar_hex_mesh(nx, ny, dc, nonperiodic_x, - nonperiodic_y, outFileName=None, - compareWithFileName=None): +def make_planar_hex_mesh(nx, ny, dc, nonperiodic_x, + nonperiodic_y, outFileName=None, + compareWithFileName=None): ''' Builds an MPAS periodic, planar hexagonal mesh with the requested dimensions, optionally saving it to a file, and returs it as an @@ -442,9 +442,9 @@ def main(): help='Cells in y direction') parser.add_argument('--dc', dest='dc', type=float, required=True, help='Distance between cell centers in meters') - parser.add_argument('-npx', '--nonperiodic_x', action="store_true", + parser.add_argument('--npx', '--nonperiodic_x', action="store_true", help='non-periodic in x direction') - parser.add_argument('-npy', '--nonperiodic_y', action="store_true", + parser.add_argument('--npy', '--nonperiodic_y', action="store_true", help='non-periodic in y direction') parser.add_argument('-o', '--outFileName', dest='outFileName', type=str, required=False, default='grid.nc', @@ -452,12 +452,13 @@ def main(): args = parser.parse_args() - make_periodic_planar_hex_mesh(args.nx, args.ny, args.dc, args.nonperiodic_x, - args.nonperiodic_y, args.outFileName) + make_planar_hex_mesh(args.nx, args.ny, args.dc, + args.nonperiodic_x, args.nonperiodic_y, + args.outFileName) # used this instead to make sure results are exactly identical to # periodic_hex - # make_periodic_planar_hex_mesh( + # make_planar_hex_mesh( # args.nx, args.ny, args.dc, args.outFileName, # compareWithFileName='../periodic_hex/grid.nc') diff --git a/mesh_tools/setup.py b/mesh_tools/setup.py new file mode 100755 index 000000000..2f73c976e --- /dev/null +++ b/mesh_tools/setup.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python + +from setuptools import setup, find_packages + +version = '0.0.1' + +setup(name='mpas_mesh_tools', + version=version, + description='A set of tools for creating and manipulating meshes for the' + ' climate components based on the Model for Prediction ' + 'Across Scales (MPAS) framework', + url='https://github.com/MPAS-Dev/MPAS-Tools/tree/master/mesh_tools', + author='MPAS-Analysis Developers', + author_email='mpas-developers@googlegroups.com', + license='BSD', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'License :: OSI Approved :: BSD License', + 'Operating System :: OS Independent', + 'Intended Audience :: Science/Research', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Topic :: Scientific/Engineering', + ], + packages=find_packages(), + package_data={}, + install_requires=['numpy', 'xarray', 'netCDF4'], + entry_points={'console_scripts': + ['planar_hex = mpas_mesh_tools.planar_hex:main']}) From 2d69688d7307e84cc7bfc3fa0224aeca050e3d82 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 6 May 2019 09:03:54 +0200 Subject: [PATCH 114/180] Fix is_periodic if both --npx and --npy are used --- mesh_tools/mpas_mesh_tools/planar_hex.py | 25 +++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/mesh_tools/mpas_mesh_tools/planar_hex.py b/mesh_tools/mpas_mesh_tools/planar_hex.py index 1ac039c6e..0e78a1b52 100755 --- a/mesh_tools/mpas_mesh_tools/planar_hex.py +++ b/mesh_tools/mpas_mesh_tools/planar_hex.py @@ -29,8 +29,8 @@ def make_planar_hex_mesh(nx, ny, dc, nonperiodic_x, dc : float The distance in meters between adjacent cell centers. - nonperiodic_x : true/false: non-periodic in x direction - nonperiodic_y : true/false: non-periodic in y direction + nonperiodic_x, nonperiodic_y : bool + is the mesh non-periodic in x and y directions? outFileName : str, optional The name of a file to save the mesh to. The mesh is not saved to a @@ -82,14 +82,24 @@ def initial_setup(nx, ny, dc, nonperiodic_x, nonperiodic_y): mesh = xarray.Dataset() - mesh.attrs['is_periodic'] = 'YES' - mesh.attrs['x_period'] = nx * dc - mesh.attrs['y_period'] = ny * dc * numpy.sqrt(3.) / 2. + if nonperiodic_x and nonperiodic_y: + mesh.attrs['is_periodic'] = 'NO' + else: + mesh.attrs['is_periodic'] = 'YES' + + if nonperiodic_x: + mesh.attrs['x_period'] = 0. + else: + mesh.attrs['x_period'] = nx * dc + if nonperiodic_y: + mesh.attrs['y_period'] = 0. + else: + mesh.attrs['y_period'] = ny * dc * numpy.sqrt(3.) / 2. mesh.attrs['dc'] = dc mesh.attrs['on_a_sphere'] = 'NO' - mesh.attrs['sphere_radius'] = 1. + mesh.attrs['sphere_radius'] = 0. if nonperiodic_x: nx = nx + 2 @@ -377,7 +387,8 @@ def compute_coordinates(mesh): mesh['kiteAreasOnVertex'] = \ (('nVertices', 'vertexDegree'), - dc**2 * numpy.sqrt(3.) / 12. * numpy.ones((nVertices, vertexDegree), 'f8')) + dc**2 * numpy.sqrt(3.) / 12. * numpy.ones((nVertices, vertexDegree), + 'f8')) mesh['meshDensity'] = (('nCells',), numpy.ones((nCells,), 'f8')) From 30f99454a18bfa6029218bee14c9777cd0ecbcf4 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 25 Feb 2019 14:46:17 -0700 Subject: [PATCH 115/180] Update conda recipe to include mpas_mesh_tools package --- mesh_tools/conda/build.sh | 7 ++++- mesh_tools/conda/meta.yaml | 58 ++++++++++++++++---------------------- 2 files changed, 30 insertions(+), 35 deletions(-) diff --git a/mesh_tools/conda/build.sh b/mesh_tools/conda/build.sh index 79940f0c1..9351fd399 100644 --- a/mesh_tools/conda/build.sh +++ b/mesh_tools/conda/build.sh @@ -3,7 +3,11 @@ set -x set -e -cd mesh_tools/mesh_conversion_tools +cd mesh_tools + +${PYTHON} -m pip install . --no-deps -vv + +cd mesh_conversion_tools export CXX=${GXX} export CFLAGS="-O3 -std=c++0x -fopenmp -lstdc++" @@ -15,3 +19,4 @@ for exec in MpasMeshConverter.x MpasCellCuller.x MpasMaskCreator.x mark_horns_fo do install -m 755 ${exec} ${PREFIX}/bin/ done + diff --git a/mesh_tools/conda/meta.yaml b/mesh_tools/conda/meta.yaml index 2fd4b74aa..b49a549aa 100644 --- a/mesh_tools/conda/meta.yaml +++ b/mesh_tools/conda/meta.yaml @@ -1,4 +1,4 @@ -{% set name = "mpas_mesh_conversion_tools" %} +{% set name = "mpas_mesh_tools" %} {% set version = "0.0.1" %} package: @@ -7,59 +7,49 @@ package: source: git_url: https://github.com/xylar/MPAS-Tools.git - git_rev: 847dc921f0bcb57f3bd49a4a4c4646df2f3afd65 + git_rev: 4ea5d781e415c15a220ff43f0d064521ba18506a build: number: 0 + entry_points: + - planar_hex = mpas_mesh_tools.planar_hex:main requirements: build: - {{ compiler('cxx') }} host: - - netcdf4 =1.4.2 - - hdf5 =1.10.3 - - libnetcdf =4.6.1 + - python + - netcdf4 + - hdf5 + - libnetcdf run: - - netcdf4 =1.4.2 - - hdf5 =1.10.3 - - libnetcdf =4.6.1 + - netcdf4 + - hdf5 + - libnetcdf + - numpy + - xarray test: commands: - - wget https://github.com/MPAS-Dev/MPAS-Tools/raw/master/mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson - - wget https://github.com/MPAS-Dev/MPAS-Tools/raw/master/mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc - - wget https://github.com/MPAS-Dev/MPAS-Tools/raw/master/mesh_tools/mesh_conversion_tools/test/land_mask_final.nc + - planar_hex --nx=10 --ny=20 --dc=1000. --outFileName='periodic_mesh_10x10_1km.nc' + - wget https://github.com/xylar/MPAS-Tools/raw/mesh_tools_conda_package/mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson + - wget https://github.com/xylar/MPAS-Tools/raw/mesh_tools_conda_package/mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc + - wget https://github.com/xylar/MPAS-Tools/raw/mesh_tools_conda_package/mesh_tools/mesh_conversion_tools/test/land_mask_final.nc - MpasMeshConverter.x mesh.QU.1920km.151026.nc mesh.nc - MpasCellCuller.x mesh.nc culled_mesh.nc -m land_mask_final.nc - MpasMaskCreator.x mesh.nc arctic_mask.nc -f Arctic_Ocean.geojson about: - home: https://github.com/MPAS-Dev/MPAS-Tools/tree/master/mesh_tools/mesh_conversion_tools + home: https://github.com/MPAS-Dev/MPAS-Tools/ license: BSD-3-Clause license_family: BSD - license_file: '' - summary: Mesh conversion tools for Model for Prediction Across Scales (MPAS) + license_file: LICENSE + summary: Mesh tools for Model for Prediction Across Scales (MPAS) description: | - MpasMeshConverter.x is a piece of software designed create an MPAS mesh. - As input, this software takes the locations of MPAS cell centers, and cell - vertices, along with the connectivity array cellsOnVertex. If provided, it - will also migrate data from the meshDensity field, if it is not present it - will write 1.0 for every cell. - - MpasCellCuller.x is a piece of software designed remove - cells/edge/vertices from an MPAS mesh. As input, this software takes a - valid MPAS mesh with one additional field "cullCell". This new field should - be nCells integers. A 1 means the cell should be kept, and a 0 means the - cell should be removed. - - MpasMaskCreator.x is a piece of software designed to create cell masks - from region definitions. Region definitions are defined in geojson files, - and can be created using the tools contained within the repository located - at: - https://github.com/MPAS-Dev/geometric_features - Masks have a value of 0 or 1, and are integers. - doc_url: 'https://github.com/MPAS-Dev/MPAS-Tools/blob/master/mesh_tools/mesh_conversion_tools/README' - dev_url: 'https://github.com/MPAS-Dev/MPAS-Tools/tree/master/mesh_tools/mesh_conversion_tools' + A set of tools for creating and manipulating meshes for the climate + components based on the Model for Prediction Across Scales (MPAS) framework + doc_url: 'https://github.com/MPAS-Dev/MPAS-Tools/README.md' + dev_url: 'https://github.com/MPAS-Dev/MPAS-Tools/' extra: recipe-maintainers: From 832df9530edd6ccc76f1432882d6466d2c6cab43 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 25 Feb 2019 21:55:26 -0700 Subject: [PATCH 116/180] Update setup.py and conda recipe --- mesh_tools/conda/build.sh | 2 +- mesh_tools/conda/meta.yaml | 12 +++++++++--- mesh_tools/setup.py | 6 ++++++ 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/mesh_tools/conda/build.sh b/mesh_tools/conda/build.sh index 9351fd399..8d569ce42 100644 --- a/mesh_tools/conda/build.sh +++ b/mesh_tools/conda/build.sh @@ -15,7 +15,7 @@ export CFLAGS="-O3 -std=c++0x -fopenmp -lstdc++" make install -d ${PREFIX}/bin/ -for exec in MpasMeshConverter.x MpasCellCuller.x MpasMaskCreator.x mark_horns_for_culling.py +for exec in MpasMeshConverter.x MpasCellCuller.x MpasMaskCreator.x do install -m 755 ${exec} ${PREFIX}/bin/ done diff --git a/mesh_tools/conda/meta.yaml b/mesh_tools/conda/meta.yaml index b49a549aa..e74ed5782 100644 --- a/mesh_tools/conda/meta.yaml +++ b/mesh_tools/conda/meta.yaml @@ -6,8 +6,7 @@ package: version: '{{ version }}' source: - git_url: https://github.com/xylar/MPAS-Tools.git - git_rev: 4ea5d781e415c15a220ff43f0d064521ba18506a + path: ../.. build: number: 0 @@ -19,14 +18,16 @@ requirements: - {{ compiler('cxx') }} host: - python - - netcdf4 - hdf5 - libnetcdf + - setuptools run: + - python - netcdf4 - hdf5 - libnetcdf - numpy + - scipy - xarray test: @@ -38,6 +39,11 @@ test: - MpasMeshConverter.x mesh.QU.1920km.151026.nc mesh.nc - MpasCellCuller.x mesh.nc culled_mesh.nc -m land_mask_final.nc - MpasMaskCreator.x mesh.nc arctic_mask.nc -f Arctic_Ocean.geojson + - mark_horns_for_culling.py --help + - create_landice_grid_from_generic_MPAS_grid.py --help + - define_cullMask.py --help + - interpolate_to_mpasli_grid.py --help + - mark_domain_boundaries_dirichlet.py --help about: home: https://github.com/MPAS-Dev/MPAS-Tools/ diff --git a/mesh_tools/setup.py b/mesh_tools/setup.py index 2f73c976e..520c3eea1 100755 --- a/mesh_tools/setup.py +++ b/mesh_tools/setup.py @@ -27,7 +27,13 @@ 'Topic :: Scientific/Engineering', ], packages=find_packages(), +# package_dir={'mpas_mesh_tools': 'mesh_tools/mpas_mesh_tools'}, package_data={}, + scripts=['mesh_conversion_tools/mark_horns_for_culling.py', + 'landice/create_landice_grid_from_generic_MPAS_grid.py', + 'landice/define_cullMask.py', + 'landice/interpolate_to_mpasli_grid.py', + 'landice/mark_domain_boundaries_dirichlet.py'], install_requires=['numpy', 'xarray', 'netCDF4'], entry_points={'console_scripts': ['planar_hex = mpas_mesh_tools.planar_hex:main']}) From 187d4f7ab1310a64465e64ea1dbf2fa21a7d4ef2 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 25 Feb 2019 22:57:19 -0700 Subject: [PATCH 117/180] Fix PEP8 formatting on mark_horns_for_culling.py --- .../mark_horns_for_culling.py | 45 ++++++++++++------- 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py b/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py index afd87972c..27b9c8715 100755 --- a/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py +++ b/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py @@ -1,8 +1,9 @@ #!/usr/bin/env python ''' This script identifies "horns" on a mesh (cells with two or fewer neighbors), -and marks them for culling. In some cores/configurations, these weakly-connected -cells can be dynamically inactive, and, therefore, undesirable to keep in a mesh. +and marks them for culling. In some cores/configurations, these weakly +connected cells can be dynamically inactive, and, therefore, undesirable to +keep in a mesh. The method used will work on both planar and spherical meshes. It adds the new masked cell to an existing 'cullCell' field if it exists, @@ -19,10 +20,17 @@ from datetime import datetime -print("== Gathering information. (Invoke with --help for more details. All arguments are optional)\n") +print("== Gathering information. (Invoke with --help for more details. All " + "arguments are optional)\n") parser = OptionParser() parser.description = __doc__ -parser.add_option("-f", "--file", dest="inputFile", help="Name of file to be processed.", default="grid.nc", metavar="FILENAME") +parser.add_option( + "-f", + "--file", + dest="inputFile", + help="Name of file to be processed.", + default="grid.nc", + metavar="FILENAME") for option in parser.option_list: if option.default != ("NO", "DEFAULT"): option.help += (" " if option.help else "") + "[default: %default]" @@ -38,33 +46,36 @@ # Add the horn cells to existing mask if it exists if 'cullCell' in inputFile.variables: - cullCell = inputFile.variables['cullCell'][:] -else: # otherwise make a new mask initialized empty - cullCell = np.zeros( (nCells,) ) # local variable + cullCell = inputFile.variables['cullCell'][:] +else: # otherwise make a new mask initialized empty + cullCell = np.zeros((nCells,)) # local variable nHorns = 0 for i in range(nCells): - if (cellsOnCell[i,:] > 0).sum() <= 2: # NOTE: Can change this threshold, if needed for a particular use case. - cullCell[i] = 1 - nHorns += 1 + # NOTE: Can change this threshold, if needed for a particular use case. + if (cellsOnCell[i, :] > 0).sum() <= 2: + cullCell[i] = 1 + nHorns += 1 # Write out the new field if 'cullCell' in inputFile.variables: - cullCellVar = inputFile.variables['cullCell'] + cullCellVar = inputFile.variables['cullCell'] else: - cullCellVar = inputFile.createVariable('cullCell', 'i', ('nCells',)) + cullCellVar = inputFile.createVariable('cullCell', 'i', ('nCells',)) cullCellVar[:] = cullCell # Update history attribute of netCDF file -thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) +thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + \ + ": " + " ".join(sys.argv[:]) if hasattr(inputFile, 'history'): - newhist = '\n'.join([thiscommand, getattr(inputFile, 'history')]) + newhist = '\n'.join([thiscommand, getattr(inputFile, 'history')]) else: - newhist = thiscommand -setattr(inputFile, 'history', newhist ) + newhist = thiscommand +setattr(inputFile, 'history', newhist) inputFile.close() -print('\n{} "horn" locations have been marked in the field cullCell.'.format(nHorns)) +print('\n{} "horn" locations have been marked in the field cullCell.'.format( + nHorns)) print("Remember to use MpasCellCuller.x to actually remove them!") From 97bbedd56c44fef38ed155c9a6ae7cc6329a8a8f Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 26 Feb 2019 17:01:16 -0700 Subject: [PATCH 118/180] Update the recipe to move land ice mesh tools This is better than moving them in the repo. --- mesh_tools/conda/build.sh | 2 ++ mesh_tools/conda/meta.yaml | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/mesh_tools/conda/build.sh b/mesh_tools/conda/build.sh index 8d569ce42..4689a1a99 100644 --- a/mesh_tools/conda/build.sh +++ b/mesh_tools/conda/build.sh @@ -3,6 +3,8 @@ set -x set -e +cp -r landice/mesh_tools_li mesh_tools/landice + cd mesh_tools ${PYTHON} -m pip install . --no-deps -vv diff --git a/mesh_tools/conda/meta.yaml b/mesh_tools/conda/meta.yaml index e74ed5782..0ccc45c4f 100644 --- a/mesh_tools/conda/meta.yaml +++ b/mesh_tools/conda/meta.yaml @@ -9,7 +9,7 @@ source: path: ../.. build: - number: 0 + number: 1 entry_points: - planar_hex = mpas_mesh_tools.planar_hex:main From 2accb0d7cae72f7089e67297ce2b695411398488 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 1 Mar 2019 19:32:21 -0700 Subject: [PATCH 119/180] Add translate and io to mpas_mesh_tools --- mesh_tools/conda/meta.yaml | 9 +- mesh_tools/mpas_mesh_tools/io.py | 40 +++++ mesh_tools/mpas_mesh_tools/planar_hex.py | 23 +-- mesh_tools/mpas_mesh_tools/translate.py | 182 +++++++++++++++++++++++ mesh_tools/setup.py | 4 +- 5 files changed, 233 insertions(+), 25 deletions(-) create mode 100644 mesh_tools/mpas_mesh_tools/io.py create mode 100755 mesh_tools/mpas_mesh_tools/translate.py diff --git a/mesh_tools/conda/meta.yaml b/mesh_tools/conda/meta.yaml index 0ccc45c4f..001aed48a 100644 --- a/mesh_tools/conda/meta.yaml +++ b/mesh_tools/conda/meta.yaml @@ -9,9 +9,10 @@ source: path: ../.. build: - number: 1 + number: 3 entry_points: - planar_hex = mpas_mesh_tools.planar_hex:main + - translate_planar_grid = mpas_mesh_tools.translate:main requirements: build: @@ -32,7 +33,11 @@ requirements: test: commands: - - planar_hex --nx=10 --ny=20 --dc=1000. --outFileName='periodic_mesh_10x10_1km.nc' + - planar_hex --nx=10 --ny=20 --dc=1000. --outFileName='periodic_mesh_10x20_1km.nc' + - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -x 1000. -y 2000. + - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -c + - planar_hex --nx=20 --ny=40 --dc=1000. --outFileName='periodic_mesh_20x40_1km.nc' + - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -d 'periodic_mesh_20x40_1km.nc' - wget https://github.com/xylar/MPAS-Tools/raw/mesh_tools_conda_package/mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson - wget https://github.com/xylar/MPAS-Tools/raw/mesh_tools_conda_package/mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc - wget https://github.com/xylar/MPAS-Tools/raw/mesh_tools_conda_package/mesh_tools/mesh_conversion_tools/test/land_mask_final.nc diff --git a/mesh_tools/mpas_mesh_tools/io.py b/mesh_tools/mpas_mesh_tools/io.py new file mode 100644 index 000000000..bdadde224 --- /dev/null +++ b/mesh_tools/mpas_mesh_tools/io.py @@ -0,0 +1,40 @@ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy +import netCDF4 +from datetime import datetime +import sys + + +def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): + '''Write an xarray Dataset with NetCDF4 fill values where needed''' + encodingDict = {} + variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) + for variableName in variableNames: + isNumeric = numpy.issubdtype(ds[variableName].dtype, numpy.number) + if isNumeric and numpy.any(numpy.isnan(ds[variableName])): + dtype = ds[variableName].dtype + for fillType in fillValues: + if dtype == numpy.dtype(fillType): + encodingDict[variableName] = \ + {'_FillValue': fillValues[fillType]} + break + else: + encodingDict[variableName] = {'_FillValue': None} + + update_history(ds) + + ds.to_netcdf(fileName, encoding=encodingDict) + + +def update_history(ds): + '''Add or append history to attributes of a data set''' + + thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + \ + " ".join(sys.argv[:]) + if 'history' in ds.attrs: + newhist = '\n'.join([thiscommand, ds.attrs['history']]) + else: + newhist = thiscommand + ds.attrs['history'] = newhist diff --git a/mesh_tools/mpas_mesh_tools/planar_hex.py b/mesh_tools/mpas_mesh_tools/planar_hex.py index 0e78a1b52..73eed6a0d 100755 --- a/mesh_tools/mpas_mesh_tools/planar_hex.py +++ b/mesh_tools/mpas_mesh_tools/planar_hex.py @@ -6,7 +6,8 @@ import numpy import xarray import argparse -import netCDF4 + +from mpas_mesh_tools.io import write_netcdf def make_planar_hex_mesh(nx, ny, dc, nonperiodic_x, @@ -403,20 +404,6 @@ def add_one_to_indices(mesh): mesh[var] = mesh[var] + 1 -def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): - encodingDict = {} - variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) - for variableName in variableNames: - dtype = ds[variableName].dtype - for fillType in fillValues: - if dtype == numpy.dtype(fillType): - encodingDict[variableName] = \ - {'_FillValue': fillValues[fillType]} - break - - ds.to_netcdf(fileName, encoding=encodingDict) - - def make_diff(mesh, refMeshFileName, diffFileName): refMesh = xarray.open_dataset(refMeshFileName) @@ -467,12 +454,6 @@ def main(): args.nonperiodic_x, args.nonperiodic_y, args.outFileName) - # used this instead to make sure results are exactly identical to - # periodic_hex - # make_planar_hex_mesh( - # args.nx, args.ny, args.dc, args.outFileName, - # compareWithFileName='../periodic_hex/grid.nc') - if __name__ == '__main__': main() diff --git a/mesh_tools/mpas_mesh_tools/translate.py b/mesh_tools/mpas_mesh_tools/translate.py new file mode 100755 index 000000000..07dd5fb2f --- /dev/null +++ b/mesh_tools/mpas_mesh_tools/translate.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +from optparse import OptionParser + +import xarray + +from mpas_mesh_tools.io import write_netcdf + + +def translate(mesh, xOffset=0., yOffset=0.): + ''' + Translates the coordinate system of the planar MPAS mesh by an arbirary + shift in x and/or y + + Parameters + ---------- + mesh : ``xarray.Dataset`` + A planar mesh to translate + + xOffset : float, optional + user-specified shift in the x-direction + + yOffset : float, optional + user-specified shift in the y-direction + + ''' + + mesh.xCell[:] += xOffset + mesh.yCell[:] += yOffset + mesh.xVertex[:] += xOffset + mesh.yVertex[:] += yOffset + mesh.xEdge[:] += xOffset + mesh.yEdge[:] += yOffset + + +def center_on_mesh(mesh, otherMesh): + ''' + Translates the coordinate system of the planar MPAS mesh by shifting the + origin to the center of the domain described in a separate mesh + + Parameters + ---------- + mesh : ``xarray.Dataset`` + A planar mesh to translate + + otherMesh : ``xarray.Dataset`` + Another planar mesh whose center will become the center of this mesh. + Uses xCell,yCell or, if those fields do not exist, will secondly try + x1,y1 fields + ''' + + mpasXcenter, mpasYcenter = get_center(mesh) + + if 'xCell' in otherMesh and 'yCell' in otherMesh: + dataXcenter, dataYcenter = get_center(otherMesh, xVar='xCell', + yVar='yCell') + elif 'x1' in otherMesh and 'y1' in otherMesh: + dataXcenter, dataYcenter = get_center(otherMesh, xVar='x1', yVar='y1') + else: + raise ValueError('reference mesh has neither xCell/yCell nor x1/y1 ' + 'fields.') + + translate(mesh, dataXcenter-mpasXcenter, dataYcenter-mpasYcenter) + + +def center(mesh): + ''' + Translates the coordinate system of the planar MPAS mesh by shifting the + origin to the center of the domain + + Parameters + ---------- + mesh : ``xarray.Dataset`` + A planar mesh to translate + ''' + mpasXcenter, mpasYcenter = get_center(mesh) + + translate(mesh, -mpasXcenter, -mpasYcenter) + + +def get_center(mesh, xVar='xCell', yVar='yCell'): + ''' + Find the center of the mesh + ''' + + xCenter = (mesh[xVar].min() + mesh[xVar].max()) * 0.5 + yCenter = (mesh[yVar].min() + mesh[yVar].max()) * 0.5 + + return xCenter, yCenter + + +def main(): + + print("== Gathering information. (Invoke with --help for more details. " + "All arguments are optional)") + parser = OptionParser() + parser.description = \ + "This script translates the coordinate system of the planar MPAS " \ + "mesh specified with the -f flag. \n" \ + "There are 3 possible methods to choose from:\n" \ + "1) shift the origin to the center of the domain\n" \ + "2) arbirary shift in x and/or y\n" \ + "3) shift to the center of the domain described in a separate file\n" + parser.add_option("-f", "--file", dest="fileInName", + help="MPAS planar grid file name.", default="grid.nc", + metavar="FILENAME") + parser.add_option("-d", "--datafile", dest="dataFileName", + help="data file name to which to match the domain " + "center of. Uses xCell,yCell or, if those fields " + "do not exist, will secondly try x1,y1 fields.", + metavar="FILENAME") + parser.add_option("-x", dest="xshift", + help="user-specified shift in the x-direction.", + type="float", default=0.0, metavar="SHIFT_VALUE") + parser.add_option("-y", dest="yshift", + help="user-specified shift in the y-direction.", + type="float", default=0.0, metavar="SHIFT_VALUE") + parser.add_option("-c", dest="center", + help="shift so origin is at center of domain", + action="store_true", default=False) + for option in parser.option_list: + if option.default != ("NO", "DEFAULT"): + option.help += (" " if option.help else "") + "[default: %default]" + options, args = parser.parse_args() + + print("Attempting to translate coordinates in file: {}".format( + options.fileInName)) + + if options.dataFileName is not None and \ + (options.xshift != 0. or options.yshift != 0.): + raise ValueError('Specifying a datafile AND one or both of x/y shift ' + 'is invalid. Please select one of those methods ' + 'only.') + + if options.center and (options.xshift != 0. or options.yshift != 0.): + raise ValueError('Specifying a shift to center AND one or both of x/y ' + 'shift is invalid. Please select one of those ' + 'methods only.') + + if options.dataFileName is not None and options.center: + raise ValueError('Specifying a datafile AND a shift to center is ' + 'invalid. Please select one of those methods only.') + + if not options.center and (options.xshift == 0.) and \ + (options.yshift == 0.) and options.dataFileName is None: + raise ValueError('No translation method was specified. Please select ' + 'one. Run with -h for more information.') + + mesh = xarray.open_dataset(options.fileInName) + if options.dataFileName is not None: + print(" Translating coordinates in {} so the domain center matches " + "the domain center in {}.\n\n".format(options.fileInName, + options.dataFileName)) + otherMesh = xarray.open_dataset(options.dataFileName) + center_on_mesh(mesh, otherMesh) + + if options.xshift != 0. or options.yshift != 0.: + print(" Translating coordinates in {} by user-specified values. " + "X-shift={:f}; Y-shift={:f}\n\n".format(options.fileInName, + options.xshift, + options.yshift)) + + translate(mesh, options.xshift, options.yshift) + + if options.center: + print(" Translating coordinates in %s so the origin is the center of " + "the domain.\n\n") + + center(mesh) + + # close the file so we can re-open it for writing + mesh.close() + write_netcdf(mesh, options.fileInName) + + print("Translation completed.") + + +if __name__ == '__main__': + main() diff --git a/mesh_tools/setup.py b/mesh_tools/setup.py index 520c3eea1..1e99a31e4 100755 --- a/mesh_tools/setup.py +++ b/mesh_tools/setup.py @@ -27,7 +27,6 @@ 'Topic :: Scientific/Engineering', ], packages=find_packages(), -# package_dir={'mpas_mesh_tools': 'mesh_tools/mpas_mesh_tools'}, package_data={}, scripts=['mesh_conversion_tools/mark_horns_for_culling.py', 'landice/create_landice_grid_from_generic_MPAS_grid.py', @@ -36,4 +35,5 @@ 'landice/mark_domain_boundaries_dirichlet.py'], install_requires=['numpy', 'xarray', 'netCDF4'], entry_points={'console_scripts': - ['planar_hex = mpas_mesh_tools.planar_hex:main']}) + ['planar_hex = mpas_mesh_tools.planar_hex:main', + 'translate_planar_grid = mpas_mesh_tools.translate:main']}) From ee3d53c56b39d2b5c6044d3718704fc21170d9db Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 1 Mar 2019 23:42:11 -0700 Subject: [PATCH 120/180] Fix the conda recipe --- mesh_tools/conda/meta.yaml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/mesh_tools/conda/meta.yaml b/mesh_tools/conda/meta.yaml index 001aed48a..8ef9fef9a 100644 --- a/mesh_tools/conda/meta.yaml +++ b/mesh_tools/conda/meta.yaml @@ -32,18 +32,19 @@ requirements: - xarray test: + source_files: + - mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson + - mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc + - mesh_tools/mesh_conversion_tools/test/land_mask_final.nc commands: - planar_hex --nx=10 --ny=20 --dc=1000. --outFileName='periodic_mesh_10x20_1km.nc' - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -x 1000. -y 2000. - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -c - planar_hex --nx=20 --ny=40 --dc=1000. --outFileName='periodic_mesh_20x40_1km.nc' - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -d 'periodic_mesh_20x40_1km.nc' - - wget https://github.com/xylar/MPAS-Tools/raw/mesh_tools_conda_package/mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson - - wget https://github.com/xylar/MPAS-Tools/raw/mesh_tools_conda_package/mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc - - wget https://github.com/xylar/MPAS-Tools/raw/mesh_tools_conda_package/mesh_tools/mesh_conversion_tools/test/land_mask_final.nc - - MpasMeshConverter.x mesh.QU.1920km.151026.nc mesh.nc - - MpasCellCuller.x mesh.nc culled_mesh.nc -m land_mask_final.nc - - MpasMaskCreator.x mesh.nc arctic_mask.nc -f Arctic_Ocean.geojson + - MpasMeshConverter.x mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc mesh.nc + - MpasCellCuller.x mesh.nc culled_mesh.nc -m mesh_tools/mesh_conversion_tools/test/land_mask_final.nc + - MpasMaskCreator.x mesh.nc arctic_mask.nc -f mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson - mark_horns_for_culling.py --help - create_landice_grid_from_generic_MPAS_grid.py --help - define_cullMask.py --help From dab0c32ee7753ad77f4888bbb3af69975ca80380 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 6 Mar 2019 20:19:20 -0700 Subject: [PATCH 121/180] Add more scripts needed for COMPASS --- mesh_tools/conda/build.sh | 3 +++ mesh_tools/conda/meta.yaml | 3 +++ mesh_tools/setup.py | 5 ++++- 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/mesh_tools/conda/build.sh b/mesh_tools/conda/build.sh index 4689a1a99..62dd39405 100644 --- a/mesh_tools/conda/build.sh +++ b/mesh_tools/conda/build.sh @@ -5,6 +5,9 @@ set -e cp -r landice/mesh_tools_li mesh_tools/landice +mkdir -p mesh_tools/ocean/ +cp -r ocean/coastline_alteration mesh_tools/ocean/coastline_alteration + cd mesh_tools ${PYTHON} -m pip install . --no-deps -vv diff --git a/mesh_tools/conda/meta.yaml b/mesh_tools/conda/meta.yaml index 8ef9fef9a..7b635f13c 100644 --- a/mesh_tools/conda/meta.yaml +++ b/mesh_tools/conda/meta.yaml @@ -50,6 +50,9 @@ test: - define_cullMask.py --help - interpolate_to_mpasli_grid.py --help - mark_domain_boundaries_dirichlet.py --help + - add_land_locked_cells_to_mask.py --help + - widen_transect_edge_masks.py --help + - add_critical_land_blockages_to_mask.py --help about: home: https://github.com/MPAS-Dev/MPAS-Tools/ diff --git a/mesh_tools/setup.py b/mesh_tools/setup.py index 1e99a31e4..92e927a2a 100755 --- a/mesh_tools/setup.py +++ b/mesh_tools/setup.py @@ -32,7 +32,10 @@ 'landice/create_landice_grid_from_generic_MPAS_grid.py', 'landice/define_cullMask.py', 'landice/interpolate_to_mpasli_grid.py', - 'landice/mark_domain_boundaries_dirichlet.py'], + 'landice/mark_domain_boundaries_dirichlet.py', + 'ocean/coastline_alteration/add_land_locked_cells_to_mask.py', + 'ocean/coastline_alteration/widen_transect_edge_masks.py', + 'ocean/coastline_alteration/add_critical_land_blockages_to_mask.py'], install_requires=['numpy', 'xarray', 'netCDF4'], entry_points={'console_scripts': ['planar_hex = mpas_mesh_tools.planar_hex:main', From cd617435c4662497e4e8c0bee2b9d0c5403630ff Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 16 Mar 2019 00:49:07 -0600 Subject: [PATCH 122/180] Add python function to interface with mesh conversion tools These functions are expected to make it simpler to make use of these tools within a pythonn workflow such as COMPASS where the user is working with xarray.Dataset and geometric_features.FeatureCollection objects instead of file names. --- mesh_tools/conda/meta.yaml | 5 + mesh_tools/mpas_mesh_tools/conversion.py | 200 ++++++++++++++++++ mesh_tools/mpas_mesh_tools/tests/__init__.py | 0 .../mpas_mesh_tools/tests/test_conversion.py | 30 +++ 4 files changed, 235 insertions(+) create mode 100644 mesh_tools/mpas_mesh_tools/conversion.py create mode 100644 mesh_tools/mpas_mesh_tools/tests/__init__.py create mode 100755 mesh_tools/mpas_mesh_tools/tests/test_conversion.py diff --git a/mesh_tools/conda/meta.yaml b/mesh_tools/conda/meta.yaml index 7b635f13c..d8d124262 100644 --- a/mesh_tools/conda/meta.yaml +++ b/mesh_tools/conda/meta.yaml @@ -30,12 +30,16 @@ requirements: - numpy - scipy - xarray + - geometric_features test: + requires: + - pytest source_files: - mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson - mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc - mesh_tools/mesh_conversion_tools/test/land_mask_final.nc + - mesh_tools/mpas_mesh_tools/tests/* commands: - planar_hex --nx=10 --ny=20 --dc=1000. --outFileName='periodic_mesh_10x20_1km.nc' - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -x 1000. -y 2000. @@ -45,6 +49,7 @@ test: - MpasMeshConverter.x mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc mesh.nc - MpasCellCuller.x mesh.nc culled_mesh.nc -m mesh_tools/mesh_conversion_tools/test/land_mask_final.nc - MpasMaskCreator.x mesh.nc arctic_mask.nc -f mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson + - python -m pytest mesh_tools/mpas_mesh_tools/tests - mark_horns_for_culling.py --help - create_landice_grid_from_generic_MPAS_grid.py --help - define_cullMask.py --help diff --git a/mesh_tools/mpas_mesh_tools/conversion.py b/mesh_tools/mpas_mesh_tools/conversion.py new file mode 100644 index 000000000..d0cac0c1a --- /dev/null +++ b/mesh_tools/mpas_mesh_tools/conversion.py @@ -0,0 +1,200 @@ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import os +import xarray +import subprocess +import tempfile +import shutil + +from mpas_mesh_tools.io import write_netcdf + + +def convert(dsIn): + ''' + Use ``MpasMeshConverter.x`` to convert an input mesh to a valid MPAS + mesh that is fully compliant with the MPAS mesh specification. + https://mpas-dev.github.io/files/documents/MPAS-MeshSpec.pdf + + Parameters + ---------- + dsIn : ``xarray.Dataset`` + A data set to convert + + Returns + ------- + dsOut : ``xarray.Dataset`` + The MPAS mesh + ''' + + tempFiles = [] + inFileName = _get_temp_path(tempFiles) + write_netcdf(dsIn, inFileName) + + outFileName = _get_temp_path(tempFiles) + + # go into the directory of the output file so the graph.info file ends + # up in the same place + owd = os.getcwd() + os.chdir(os.path.dirname(outFileName)) + subprocess.check_call(['MpasMeshConverter.x', inFileName, outFileName]) + os.chdir(owd) + + dsOut = xarray.open_dataset(outFileName) + dsOut.load() + _remove_temp_files(tempFiles) + + return dsOut + + +def cull(dsIn, dsMask=None, dsInverse=None, dsPreserve=None, + graphInfoPath=None): + ''' + Use ``MpasCellCuller.x`` to cull cells from a mesh based on the + ``cullCell`` field in the input file or DataSet and/or the provided masks. + ``cullCell``, dsMask and dsInverse are merged together so that the final + mask is the union of these 3. The preserve mask is then used to determine + where cells should *not* be culled. + + Parameters + ---------- + dsIn : ``xarray.Dataset``, optional + A data set to cull, possibly with a ``cullCell`` field set to one where + cells should be removed + + dsMask : ``xarray.Dataset``, optional + A data set with region masks that are 1 where cells should be culled + + dsInverse : ``xarray.Dataset``, optional + A data set with region masks that are 0 where cells should be culled + + dsPreserve : ``xarray.Dataset``, optional + A data set with region masks that are 1 where cells should *not* be + culled + + graphInfoPath : str, optional + A path where the file ``graph.info`` should be written out. By + default, ``graph.info`` is written to a temp directory that is deleted. + + Returns + ------- + dsOut : ``xarray.Dataset`` + The culled mesh + + ''' + + tempFiles = [] + inFileName = _get_temp_path(tempFiles) + write_netcdf(dsIn, inFileName) + outFileName = _get_temp_path(tempFiles) + + args = ['MpasCellCuller.x', inFileName, outFileName] + + if dsMask is not None: + fileName = _get_temp_path(tempFiles) + write_netcdf(dsMask, fileName) + args.extend(['-m', fileName]) + + if dsInverse is not None: + fileName = _get_temp_path(tempFiles) + write_netcdf(dsInverse, fileName) + args.extend(['-i', fileName]) + + if dsPreserve is not None: + fileName = _get_temp_path(tempFiles) + write_netcdf(dsPreserve, fileName) + args.extend(['-p', fileName]) + + # go into the directory of the output file so the graph.info file ends + # up in the same place + + if graphInfoPath is not None: + graphInfoPath = os.path.abspath(graphInfoPath) + + owd = os.getcwd() + outDir = os.path.dirname(outFileName) + os.chdir(outDir) + subprocess.check_call(args) + os.chdir(owd) + + dsOut = xarray.open_dataset(outFileName) + dsOut.load() + + if graphInfoPath is not None: + shutil.copyfile('{}/graph.info'.format(outDir), + '{}/graph.info'.format(graphInfoPath)) + _remove_temp_files(tempFiles) + + return dsOut + + +def mask(dsMesh, fcMask=None, fcSeed=None, positiveLon=False): + ''' + Use ``MpasMaskCreator.x`` to create a set of region masks either from + mask feature collecitons or from seed points to be used to flood fill + + Parameters + ---------- + dsMesh : ``xarray.Dataset``, optional + An MPAS mesh on which the masks should be created + + fcMask : ``geometric_features.FeatureCollection``, optional + A feature collection containing features to use to create the mask + + fcSeed : ``geometric_features.FeatureCollection``, optional + A feature collection with points to use a seeds for a flood fill that + will create a mask of all cells connected to the seed points + + Returns + ------- + dsMask : ``xarray.Dataset`` + The masks + + ''' + + tempFiles = [] + inFileName = _get_temp_path(tempFiles) + write_netcdf(dsMesh, inFileName) + outFileName = _get_temp_path(tempFiles) + + args = ['MpasMaskCreator.x', inFileName, outFileName] + + if fcMask is not None: + fileName = _get_temp_path(tempFiles, ext='geojson') + fcMask.to_geojson(fileName) + args.extend(['-f', fileName]) + + if fcSeed is not None: + fileName = _get_temp_path(tempFiles, ext='geojson') + fcSeed.to_geojson(fileName) + args.extend(['-s', fileName]) + + if positiveLon: + args.append('--positive_lon') + + # go into the directory of the output file so the graph.info file ends + # up in the same place + owd = os.getcwd() + os.chdir(os.path.dirname(outFileName)) + subprocess.check_call(args) + os.chdir(owd) + + dsOut = xarray.open_dataset(outFileName) + dsOut.load() + _remove_temp_files(tempFiles) + + return dsOut + + +def _get_temp_path(tempFiles, ext='nc'): + '''Returns the name of a temporary NetCDF file''' + fileName = '{}/{}.{}'.format(tempfile._get_default_tempdir(), + next(tempfile._get_candidate_names()), + ext) + tempFiles.append(fileName) + return fileName + + +def _remove_temp_files(tempFiles): + for tempFileName in tempFiles: + os.remove(tempFileName) diff --git a/mesh_tools/mpas_mesh_tools/tests/__init__.py b/mesh_tools/mpas_mesh_tools/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/mesh_tools/mpas_mesh_tools/tests/test_conversion.py b/mesh_tools/mpas_mesh_tools/tests/test_conversion.py new file mode 100755 index 000000000..172e37b8e --- /dev/null +++ b/mesh_tools/mpas_mesh_tools/tests/test_conversion.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python + +from mpas_mesh_tools.conversion import convert, cull, mask +from mpas_mesh_tools.io import write_netcdf +from geometric_features import read_feature_collection +import xarray + + +def test_conversion(): + dsMesh = xarray.open_dataset( + 'mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc') + dsMesh = convert(dsIn=dsMesh) + write_netcdf(dsMesh, 'mesh.nc') + + dsMask = xarray.open_dataset( + 'mesh_tools/mesh_conversion_tools/test/land_mask_final.nc') + dsCulled = cull(dsIn=dsMesh, dsMask=dsMask) + write_netcdf(dsCulled, 'culled_mesh.nc') + + dsMask = xarray.open_dataset( + 'mesh_tools/mesh_conversion_tools/test/land_mask_final.nc') + + fcMask = read_feature_collection( + 'mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson') + dsMask = mask(dsMesh=dsMesh, fcMask=fcMask) + write_netcdf(dsMask, 'antarctic_mask.nc') + + +if __name__ == '__main__': + test_conversion() From bb8f187d84c9c39aae395fb2dee1425a1b5659a5 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 16 Mar 2019 00:51:30 -0600 Subject: [PATCH 123/180] Update gitignore to ignore many more unwanted files/dirs These include python cache directories as well as many, many other temporary, cache or config files that we might encounter. --- .gitignore | 92 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 92 insertions(+) diff --git a/.gitignore b/.gitignore index 11427a695..26937dc4a 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,95 @@ *.png # Each tool should also have it's own .gitignore file that ignores the build files for that tool. + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# IPython Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# dotenv +.env + +# virtualenv +venv/ +ENV/ + +# Spyder project settings +.spyderproject + +# Rope project settings +.ropeproject + +.DS_Store \ No newline at end of file From 4000fabe7b9bd8b8d9d6951be023490eea839389 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 17 Mar 2019 21:40:54 -0600 Subject: [PATCH 124/180] Add a starting point for MPAS mesh-tool documentation --- mesh_tools/docs/Makefile | 24 +++++ mesh_tools/docs/api.rst | 32 ++++++ mesh_tools/docs/conf.py | 180 ++++++++++++++++++++++++++++++++ mesh_tools/docs/environment.yml | 15 +++ mesh_tools/docs/index.rst | 15 +++ 5 files changed, 266 insertions(+) create mode 100644 mesh_tools/docs/Makefile create mode 100644 mesh_tools/docs/api.rst create mode 100644 mesh_tools/docs/conf.py create mode 100644 mesh_tools/docs/environment.yml create mode 100644 mesh_tools/docs/index.rst diff --git a/mesh_tools/docs/Makefile b/mesh_tools/docs/Makefile new file mode 100644 index 000000000..56119177a --- /dev/null +++ b/mesh_tools/docs/Makefile @@ -0,0 +1,24 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = mpas_mesh_tools +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +clean: + rm -rf *obs_table.rst generated obs + @$(SPHINXBUILD) -M clean "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/mesh_tools/docs/api.rst b/mesh_tools/docs/api.rst new file mode 100644 index 000000000..fefa8f0af --- /dev/null +++ b/mesh_tools/docs/api.rst @@ -0,0 +1,32 @@ +############# +API reference +############# + +This page provides an auto-generated summary of the MPAS mesh-tools API. For +more details and examples, refer to the relevant chapters in the main part of +the documentation. + +Python package +============== + +.. currentmodule:: mpas_mesh_tools.planar_hex + +.. autosummary:: + :toctree: generated/ + + make_periodic_planar_hex_mesh + +.. currentmodule:: mpas_mesh_tools.translate + +.. autosummary:: + :toctree: generated/ + + translate + +.. currentmodule:: mpas_mesh_tools.io + +.. autosummary:: + :toctree: generated/ + + write_netcdf + diff --git a/mesh_tools/docs/conf.py b/mesh_tools/docs/conf.py new file mode 100644 index 000000000..39226d450 --- /dev/null +++ b/mesh_tools/docs/conf.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +# +# mpas_mesh_tools documentation build configuration file, created by +# sphinx-quickstart on Sat Mar 25 14:39:11 2017. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import mpas_mesh_tools + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = ['sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.mathjax', + 'sphinx.ext.viewcode', + 'numpydoc'] + +autosummary_generate = True + +numpydoc_class_members_toctree = True +numpydoc_show_class_members = False + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +source_suffix = ['.rst'] +# source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'mpas_mesh_tools' +copyright = u'This software is open source software available under the BSD-3' \ + u'license. Copyright (c) 2019 Triad National Security, LLC. ' \ + u'All rights reserved. Copyright (c) 2018 Lawrence Livermore ' \ + u'National Security, LLC. All rights reserved. Copyright (c) ' \ + u'2018 UT-Battelle, LLC. All rights reserved.' +author = u'Xylar Asay-Davis, Doug Jacobsen, Michael Duda, Mark Petersen, ' \ + u'Adridan Turner' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = u'0.0.1' +# The full version, including alpha/beta/rc tags. +release = u'0.0.1' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', + 'design_docs/template.md'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# + +# on_rtd is whether we are on readthedocs.org, this line of code grabbed from +# docs.readthedocs.org +on_rtd = os.environ.get('READTHEDOCS', None) == 'True' + +if not on_rtd: # only import and set the theme if we're building docs locally + import sphinx_rtd_theme + html_theme = 'sphinx_rtd_theme' + html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + + +# -- Options for HTMLHelp output ------------------------------------------ + +# Output file base name for HTML help builder. +htmlhelp_basename = 'mpas_mesh_tools_doc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'mpas_mesh_tools.tex', u'mpas_mesh_tools Documentation', + author, 'manual'), +] + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'mpas_mesh_tools', u'mpas_mesh_tools Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'mpas_mesh_tools', u'mpas_mesh_tools Documentation', + author, 'mpas_mesh_tools', 'One line description of project.', + 'Miscellaneous'), +] + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('https://docs.python.org/', None), + 'numpy': ('http://docs.scipy.org/doc/numpy/', None), + 'xarray': ('http://xarray.pydata.org/en/stable/', None)} + + +github_doc_root = 'https://github.com/rtfd/recommonmark/tree/master/doc/' diff --git a/mesh_tools/docs/environment.yml b/mesh_tools/docs/environment.yml new file mode 100644 index 000000000..2ad5244dc --- /dev/null +++ b/mesh_tools/docs/environment.yml @@ -0,0 +1,15 @@ +name: mpas_mesh_tools_docs +channels: + - conda-forge +dependencies: + - python=3.7 + - pytest + - netcdf4 + - hdf5 + - libnetcdf + - numpy + - scipy + - xarray + - sphinx + - sphinx_rtd_theme + - numpydoc diff --git a/mesh_tools/docs/index.rst b/mesh_tools/docs/index.rst new file mode 100644 index 000000000..cbefe0bdf --- /dev/null +++ b/mesh_tools/docs/index.rst @@ -0,0 +1,15 @@ +mpas_mesh_tools +================== + +This repository houses geometric features relevant for climate science. + +.. toctree:: + :maxdepth: 2 + + api + +Indices and tables +================== + +* :ref:`genindex` + From 8a4916730fa1aa09f282245b1364d975564dc9c2 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 25 Apr 2019 13:54:00 +0200 Subject: [PATCH 125/180] Move mpas_mesh_tools up a directory Too many scripts are needed from elsewhere in the repo --- {mesh_tools/conda => conda}/build.sh | 8 +------- {mesh_tools/conda => conda}/meta.yaml | 8 ++++---- {mesh_tools/docs => docs}/Makefile | 0 {mesh_tools/docs => docs}/api.rst | 0 {mesh_tools/docs => docs}/conf.py | 0 {mesh_tools/docs => docs}/environment.yml | 0 {mesh_tools/docs => docs}/index.rst | 0 .../mpas_mesh_tools => mpas_mesh_tools}/__init__.py | 0 .../mpas_mesh_tools => mpas_mesh_tools}/__main__.py | 0 .../conversion.py | 0 .../mpas_mesh_tools => mpas_mesh_tools}/io.py | 0 .../planar_hex.py | 0 .../tests/__init__.py | 0 .../tests/test_conversion.py | 0 .../mpas_mesh_tools => mpas_mesh_tools}/translate.py | 0 mesh_tools/setup.py => setup.py | 12 ++++++------ 16 files changed, 11 insertions(+), 17 deletions(-) rename {mesh_tools/conda => conda}/build.sh (61%) rename {mesh_tools/conda => conda}/meta.yaml (95%) rename {mesh_tools/docs => docs}/Makefile (100%) rename {mesh_tools/docs => docs}/api.rst (100%) rename {mesh_tools/docs => docs}/conf.py (100%) rename {mesh_tools/docs => docs}/environment.yml (100%) rename {mesh_tools/docs => docs}/index.rst (100%) rename {mesh_tools/mpas_mesh_tools => mpas_mesh_tools}/__init__.py (100%) rename {mesh_tools/mpas_mesh_tools => mpas_mesh_tools}/__main__.py (100%) rename {mesh_tools/mpas_mesh_tools => mpas_mesh_tools}/conversion.py (100%) rename {mesh_tools/mpas_mesh_tools => mpas_mesh_tools}/io.py (100%) rename {mesh_tools/mpas_mesh_tools => mpas_mesh_tools}/planar_hex.py (100%) rename {mesh_tools/mpas_mesh_tools => mpas_mesh_tools}/tests/__init__.py (100%) rename {mesh_tools/mpas_mesh_tools => mpas_mesh_tools}/tests/test_conversion.py (100%) rename {mesh_tools/mpas_mesh_tools => mpas_mesh_tools}/translate.py (100%) rename mesh_tools/setup.py => setup.py (78%) diff --git a/mesh_tools/conda/build.sh b/conda/build.sh similarity index 61% rename from mesh_tools/conda/build.sh rename to conda/build.sh index 62dd39405..af4a9acea 100644 --- a/mesh_tools/conda/build.sh +++ b/conda/build.sh @@ -3,16 +3,10 @@ set -x set -e -cp -r landice/mesh_tools_li mesh_tools/landice - -mkdir -p mesh_tools/ocean/ -cp -r ocean/coastline_alteration mesh_tools/ocean/coastline_alteration - -cd mesh_tools ${PYTHON} -m pip install . --no-deps -vv -cd mesh_conversion_tools +cd mesh_tools/mesh_conversion_tools export CXX=${GXX} export CFLAGS="-O3 -std=c++0x -fopenmp -lstdc++" diff --git a/mesh_tools/conda/meta.yaml b/conda/meta.yaml similarity index 95% rename from mesh_tools/conda/meta.yaml rename to conda/meta.yaml index d8d124262..a3e314d0a 100644 --- a/mesh_tools/conda/meta.yaml +++ b/conda/meta.yaml @@ -6,10 +6,10 @@ package: version: '{{ version }}' source: - path: ../.. + path: .. build: - number: 3 + number: 0 entry_points: - planar_hex = mpas_mesh_tools.planar_hex:main - translate_planar_grid = mpas_mesh_tools.translate:main @@ -39,7 +39,7 @@ test: - mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson - mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc - mesh_tools/mesh_conversion_tools/test/land_mask_final.nc - - mesh_tools/mpas_mesh_tools/tests/* + - mpas_mesh_tools/tests/* commands: - planar_hex --nx=10 --ny=20 --dc=1000. --outFileName='periodic_mesh_10x20_1km.nc' - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -x 1000. -y 2000. @@ -49,7 +49,7 @@ test: - MpasMeshConverter.x mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc mesh.nc - MpasCellCuller.x mesh.nc culled_mesh.nc -m mesh_tools/mesh_conversion_tools/test/land_mask_final.nc - MpasMaskCreator.x mesh.nc arctic_mask.nc -f mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson - - python -m pytest mesh_tools/mpas_mesh_tools/tests + - python -m pytest mpas_mesh_tools/tests - mark_horns_for_culling.py --help - create_landice_grid_from_generic_MPAS_grid.py --help - define_cullMask.py --help diff --git a/mesh_tools/docs/Makefile b/docs/Makefile similarity index 100% rename from mesh_tools/docs/Makefile rename to docs/Makefile diff --git a/mesh_tools/docs/api.rst b/docs/api.rst similarity index 100% rename from mesh_tools/docs/api.rst rename to docs/api.rst diff --git a/mesh_tools/docs/conf.py b/docs/conf.py similarity index 100% rename from mesh_tools/docs/conf.py rename to docs/conf.py diff --git a/mesh_tools/docs/environment.yml b/docs/environment.yml similarity index 100% rename from mesh_tools/docs/environment.yml rename to docs/environment.yml diff --git a/mesh_tools/docs/index.rst b/docs/index.rst similarity index 100% rename from mesh_tools/docs/index.rst rename to docs/index.rst diff --git a/mesh_tools/mpas_mesh_tools/__init__.py b/mpas_mesh_tools/__init__.py similarity index 100% rename from mesh_tools/mpas_mesh_tools/__init__.py rename to mpas_mesh_tools/__init__.py diff --git a/mesh_tools/mpas_mesh_tools/__main__.py b/mpas_mesh_tools/__main__.py similarity index 100% rename from mesh_tools/mpas_mesh_tools/__main__.py rename to mpas_mesh_tools/__main__.py diff --git a/mesh_tools/mpas_mesh_tools/conversion.py b/mpas_mesh_tools/conversion.py similarity index 100% rename from mesh_tools/mpas_mesh_tools/conversion.py rename to mpas_mesh_tools/conversion.py diff --git a/mesh_tools/mpas_mesh_tools/io.py b/mpas_mesh_tools/io.py similarity index 100% rename from mesh_tools/mpas_mesh_tools/io.py rename to mpas_mesh_tools/io.py diff --git a/mesh_tools/mpas_mesh_tools/planar_hex.py b/mpas_mesh_tools/planar_hex.py similarity index 100% rename from mesh_tools/mpas_mesh_tools/planar_hex.py rename to mpas_mesh_tools/planar_hex.py diff --git a/mesh_tools/mpas_mesh_tools/tests/__init__.py b/mpas_mesh_tools/tests/__init__.py similarity index 100% rename from mesh_tools/mpas_mesh_tools/tests/__init__.py rename to mpas_mesh_tools/tests/__init__.py diff --git a/mesh_tools/mpas_mesh_tools/tests/test_conversion.py b/mpas_mesh_tools/tests/test_conversion.py similarity index 100% rename from mesh_tools/mpas_mesh_tools/tests/test_conversion.py rename to mpas_mesh_tools/tests/test_conversion.py diff --git a/mesh_tools/mpas_mesh_tools/translate.py b/mpas_mesh_tools/translate.py similarity index 100% rename from mesh_tools/mpas_mesh_tools/translate.py rename to mpas_mesh_tools/translate.py diff --git a/mesh_tools/setup.py b/setup.py similarity index 78% rename from mesh_tools/setup.py rename to setup.py index 92e927a2a..370273683 100755 --- a/mesh_tools/setup.py +++ b/setup.py @@ -28,15 +28,15 @@ ], packages=find_packages(), package_data={}, - scripts=['mesh_conversion_tools/mark_horns_for_culling.py', - 'landice/create_landice_grid_from_generic_MPAS_grid.py', - 'landice/define_cullMask.py', - 'landice/interpolate_to_mpasli_grid.py', - 'landice/mark_domain_boundaries_dirichlet.py', + scripts=['mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py', + 'landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py', + 'landice/mesh_tools_li/define_cullMask.py', + 'landice/mesh_tools_li/interpolate_to_mpasli_grid.py', + 'landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py', 'ocean/coastline_alteration/add_land_locked_cells_to_mask.py', 'ocean/coastline_alteration/widen_transect_edge_masks.py', 'ocean/coastline_alteration/add_critical_land_blockages_to_mask.py'], - install_requires=['numpy', 'xarray', 'netCDF4'], + install_requires=['numpy', 'xarray', 'netCDF4', 'pyevtk'], entry_points={'console_scripts': ['planar_hex = mpas_mesh_tools.planar_hex:main', 'translate_planar_grid = mpas_mesh_tools.translate:main']}) From ac8c01cae20e2f37fe00f234d15009307367e512 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 25 Apr 2019 13:57:03 +0200 Subject: [PATCH 126/180] Add the paraview extractor to mpas_mesh_tools --- conda/meta.yaml | 3 + .../utils.py => mpas_mesh_tools/viz.py | 2 +- setup.py | 4 +- .../mpas_mesh_tools | 1 + .../paraview_vtk_field_extractor.py | 108 +++++++++--------- 5 files changed, 62 insertions(+), 56 deletions(-) rename visualization/paraview_vtk_field_extractor/utils.py => mpas_mesh_tools/viz.py (99%) create mode 120000 visualization/paraview_vtk_field_extractor/mpas_mesh_tools diff --git a/conda/meta.yaml b/conda/meta.yaml index a3e314d0a..0392aae77 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -31,6 +31,8 @@ requirements: - scipy - xarray - geometric_features + - pyevtk + - future test: requires: @@ -58,6 +60,7 @@ test: - add_land_locked_cells_to_mask.py --help - widen_transect_edge_masks.py --help - add_critical_land_blockages_to_mask.py --help + - paraview_vtk_field_extractor.py -f mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc -v latCell,lonCell --ignore_time -o vtk_test about: home: https://github.com/MPAS-Dev/MPAS-Tools/ diff --git a/visualization/paraview_vtk_field_extractor/utils.py b/mpas_mesh_tools/viz.py similarity index 99% rename from visualization/paraview_vtk_field_extractor/utils.py rename to mpas_mesh_tools/viz.py index 4c4f9f715..bda7adbc5 100644 --- a/visualization/paraview_vtk_field_extractor/utils.py +++ b/mpas_mesh_tools/viz.py @@ -1,6 +1,6 @@ #!/usr/bin/env python """ -Name: utils.py +Name: viz.py Authors: Xylar Asay-Davis Utility library for various scripts used to extract vtk geometry from NetCDF diff --git a/setup.py b/setup.py index 370273683..3ee31ccfe 100755 --- a/setup.py +++ b/setup.py @@ -35,7 +35,9 @@ 'landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py', 'ocean/coastline_alteration/add_land_locked_cells_to_mask.py', 'ocean/coastline_alteration/widen_transect_edge_masks.py', - 'ocean/coastline_alteration/add_critical_land_blockages_to_mask.py'], + 'ocean/coastline_alteration/add_critical_land_blockages_to_mask.py', + 'ocean/coastline_alteration/add_critical_land_blockages_to_mask.py', + 'visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py'], install_requires=['numpy', 'xarray', 'netCDF4', 'pyevtk'], entry_points={'console_scripts': ['planar_hex = mpas_mesh_tools.planar_hex:main', diff --git a/visualization/paraview_vtk_field_extractor/mpas_mesh_tools b/visualization/paraview_vtk_field_extractor/mpas_mesh_tools new file mode 120000 index 000000000..36dde9bda --- /dev/null +++ b/visualization/paraview_vtk_field_extractor/mpas_mesh_tools @@ -0,0 +1 @@ +../../mpas_mesh_tools/ \ No newline at end of file diff --git a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index 8189bceba..2fddfb2fd 100755 --- a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -99,7 +99,7 @@ except ImportError: use_progress_bar = False -import utils +from mpas_mesh_tools import viz def build_field_time_series(local_time_indices, file_names, mesh_file, @@ -118,7 +118,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, outType = 'float64' # Get dimension info to allocate the size of Colors - time_series_file = utils.open_netcdf(file_names[0]) + time_series_file = viz.open_netcdf(file_names[0]) if mesh_file is not None: # blockDim may not exist in time series file @@ -207,27 +207,27 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, else: out_prefix = "timeDependentFieldsOn{}".format(suffix) # start the pvd file - pvd_file = utils.write_pvd_header(out_dir, out_prefix) + pvd_file = viz.write_pvd_header(out_dir, out_prefix) pvd_file.write('\n') if not combine_output and not np.all(var_has_time_dim): static_prefix = "staticFieldsOn{}".format(suffix) varIndices = np.arange(nVars)[np.logical_not(var_has_time_dim)] - timeIndependentFile = utils.write_vtp_header(out_dir, - static_prefix, - varIndices[0], - varIndices, - variable_list, - all_dim_vals, - vertices, - connectivity, - offsets, - nPoints, - nPolygons, - outType, - cellData=cellData, - pointData=pointData, - xtime=None) + timeIndependentFile = viz.write_vtp_header(out_dir, + static_prefix, + varIndices[0], + varIndices, + variable_list, + all_dim_vals, + vertices, + connectivity, + offsets, + nPoints, + nPolygons, + outType, + cellData=cellData, + pointData=pointData, + xtime=None) prev_file = "" for time_index in range(nTimes): @@ -235,7 +235,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if prev_file != file_names[time_index]: if prev_file != "": time_series_file.close() - time_series_file = utils.open_netcdf(file_names[time_index]) + time_series_file = viz.open_netcdf(file_names[time_index]) prev_file = file_names[time_index] if any_var_has_time_dim: @@ -278,21 +278,21 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, varIndices = np.arange(nVars) else: varIndices = np.arange(nVars)[var_has_time_dim] - timeDependentFile = utils.write_vtp_header(out_dir, - vtp_file_prefix, - varIndices[0], - varIndices, - variable_list, - all_dim_vals, - vertices, - connectivity, - offsets, - nPoints, - nPolygons, - outType, - cellData=cellData, - pointData=pointData, - xtime=xtime) + timeDependentFile = viz.write_vtp_header(out_dir, + vtp_file_prefix, + varIndices[0], + varIndices, + variable_list, + all_dim_vals, + vertices, + connectivity, + offsets, + nPoints, + nPolygons, + outType, + cellData=cellData, + pointData=pointData, + xtime=xtime) # add time step to pdv file pvd_file.write(' 0: print(" -- Extracting cell fields --") - mesh_file = utils.open_netcdf(args.mesh_filename) + mesh_file = viz.open_netcdf(args.mesh_filename) # Build cell geometry if args.topo_dim is None: (vertices, connectivity, offsets, valid_mask) = \ - utils.build_cell_geom_lists(mesh_file, use_32bit, args.lonlat) + viz.build_cell_geom_lists(mesh_file, use_32bit, args.lonlat) cell_to_point_map = None boundary_mask = None else: (vertices, connectivity, offsets, valid_mask, cell_to_point_map, - boundary_mask) = utils.build_topo_point_and_polygon_lists( + boundary_mask) = viz.build_topo_point_and_polygon_lists( mesh_file, use_32bit, args.lonlat) if not separate_mesh_file: @@ -529,11 +529,11 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if len(vertexVars) > 0: print(" -- Extracting vertex fields --") - mesh_file = utils.open_netcdf(args.mesh_filename) + mesh_file = viz.open_netcdf(args.mesh_filename) # Build vertex geometry (vertices, connectivity, offsets, valid_mask) = \ - utils.build_vertex_geom_lists(mesh_file, use_32bit, args.lonlat) + viz.build_vertex_geom_lists(mesh_file, use_32bit, args.lonlat) if not separate_mesh_file: mesh_file.close() @@ -553,11 +553,11 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if len(edgeVars) > 0: print(" -- Extracting edge fields --") - mesh_file = utils.open_netcdf(args.mesh_filename) + mesh_file = viz.open_netcdf(args.mesh_filename) # Build cell list (vertices, connectivity, offsets, valid_mask) = \ - utils.build_edge_geom_lists(mesh_file, use_32bit, args.lonlat) + viz.build_edge_geom_lists(mesh_file, use_32bit, args.lonlat) if not separate_mesh_file: mesh_file.close() From 05defe299ee1ca86c27bdeb8d1ff70f0bf3a37c0 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 28 Apr 2019 12:11:29 +0200 Subject: [PATCH 127/180] Rename mpas_mesh_tools to just mpas_tools It's too confusing to have mpas_mesh_tools next to mesh_tools. Plus, the package is pulling in scripts from throughout the repo, not just mesh tools. --- conda/meta.yaml | 10 +++++----- docs/Makefile | 2 +- docs/api.rst | 6 +++--- docs/conf.py | 16 ++++++++-------- docs/environment.yml | 2 +- docs/index.rst | 4 ++-- {mpas_mesh_tools => mpas_tools}/__init__.py | 0 {mpas_mesh_tools => mpas_tools}/__main__.py | 8 ++++---- {mpas_mesh_tools => mpas_tools}/conversion.py | 2 +- {mpas_mesh_tools => mpas_tools}/io.py | 0 {mpas_mesh_tools => mpas_tools}/planar_hex.py | 2 +- .../tests/__init__.py | 0 .../tests/test_conversion.py | 4 ++-- {mpas_mesh_tools => mpas_tools}/translate.py | 2 +- {mpas_mesh_tools => mpas_tools}/viz.py | 0 setup.py | 8 ++++---- .../paraview_vtk_field_extractor/mpas_mesh_tools | 1 - .../paraview_vtk_field_extractor/mpas_tools | 1 + .../paraview_vtk_field_extractor.py | 2 +- 19 files changed, 35 insertions(+), 35 deletions(-) rename {mpas_mesh_tools => mpas_tools}/__init__.py (100%) rename {mpas_mesh_tools => mpas_tools}/__main__.py (73%) rename {mpas_mesh_tools => mpas_tools}/conversion.py (99%) rename {mpas_mesh_tools => mpas_tools}/io.py (100%) rename {mpas_mesh_tools => mpas_tools}/planar_hex.py (99%) rename {mpas_mesh_tools => mpas_tools}/tests/__init__.py (100%) rename {mpas_mesh_tools => mpas_tools}/tests/test_conversion.py (89%) rename {mpas_mesh_tools => mpas_tools}/translate.py (99%) rename {mpas_mesh_tools => mpas_tools}/viz.py (100%) delete mode 120000 visualization/paraview_vtk_field_extractor/mpas_mesh_tools create mode 120000 visualization/paraview_vtk_field_extractor/mpas_tools diff --git a/conda/meta.yaml b/conda/meta.yaml index 0392aae77..5227e4978 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -1,4 +1,4 @@ -{% set name = "mpas_mesh_tools" %} +{% set name = "mpas_tools" %} {% set version = "0.0.1" %} package: @@ -11,8 +11,8 @@ source: build: number: 0 entry_points: - - planar_hex = mpas_mesh_tools.planar_hex:main - - translate_planar_grid = mpas_mesh_tools.translate:main + - planar_hex = mpas_tools.planar_hex:main + - translate_planar_grid = mpas_tools.translate:main requirements: build: @@ -41,7 +41,7 @@ test: - mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson - mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc - mesh_tools/mesh_conversion_tools/test/land_mask_final.nc - - mpas_mesh_tools/tests/* + - mpas_tools/tests/* commands: - planar_hex --nx=10 --ny=20 --dc=1000. --outFileName='periodic_mesh_10x20_1km.nc' - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -x 1000. -y 2000. @@ -51,7 +51,7 @@ test: - MpasMeshConverter.x mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc mesh.nc - MpasCellCuller.x mesh.nc culled_mesh.nc -m mesh_tools/mesh_conversion_tools/test/land_mask_final.nc - MpasMaskCreator.x mesh.nc arctic_mask.nc -f mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson - - python -m pytest mpas_mesh_tools/tests + - python -m pytest mpas_tools/tests - mark_horns_for_culling.py --help - create_landice_grid_from_generic_MPAS_grid.py --help - define_cullMask.py --help diff --git a/docs/Makefile b/docs/Makefile index 56119177a..1b24251fe 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -4,7 +4,7 @@ # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build -SPHINXPROJ = mpas_mesh_tools +SPHINXPROJ = mpas_tools SOURCEDIR = . BUILDDIR = _build diff --git a/docs/api.rst b/docs/api.rst index fefa8f0af..c6d138068 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -9,21 +9,21 @@ the documentation. Python package ============== -.. currentmodule:: mpas_mesh_tools.planar_hex +.. currentmodule:: mpas_tools.planar_hex .. autosummary:: :toctree: generated/ make_periodic_planar_hex_mesh -.. currentmodule:: mpas_mesh_tools.translate +.. currentmodule:: mpas_tools.translate .. autosummary:: :toctree: generated/ translate -.. currentmodule:: mpas_mesh_tools.io +.. currentmodule:: mpas_tools.io .. autosummary:: :toctree: generated/ diff --git a/docs/conf.py b/docs/conf.py index 39226d450..fac303ed5 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# mpas_mesh_tools documentation build configuration file, created by +# mpas_tools documentation build configuration file, created by # sphinx-quickstart on Sat Mar 25 14:39:11 2017. # # This file is execfile()d with the current directory set to its @@ -13,7 +13,7 @@ # serve to show the default. import os -import mpas_mesh_tools +import mpas_tools # -- General configuration ------------------------------------------------ @@ -49,7 +49,7 @@ master_doc = 'index' # General information about the project. -project = u'mpas_mesh_tools' +project = u'mpas_tools' copyright = u'This software is open source software available under the BSD-3' \ u'license. Copyright (c) 2019 Triad National Security, LLC. ' \ u'All rights reserved. Copyright (c) 2018 Lawrence Livermore ' \ @@ -117,7 +117,7 @@ # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. -htmlhelp_basename = 'mpas_mesh_tools_doc' +htmlhelp_basename = 'mpas_tools_doc' # -- Options for LaTeX output --------------------------------------------- @@ -144,7 +144,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'mpas_mesh_tools.tex', u'mpas_mesh_tools Documentation', + (master_doc, 'mpas_tools.tex', u'mpas_tools Documentation', author, 'manual'), ] @@ -154,7 +154,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'mpas_mesh_tools', u'mpas_mesh_tools Documentation', + (master_doc, 'mpas_tools', u'mpas_tools Documentation', [author], 1) ] @@ -165,8 +165,8 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'mpas_mesh_tools', u'mpas_mesh_tools Documentation', - author, 'mpas_mesh_tools', 'One line description of project.', + (master_doc, 'mpas_tools', u'mpas_tools Documentation', + author, 'mpas_tools', 'One line description of project.', 'Miscellaneous'), ] diff --git a/docs/environment.yml b/docs/environment.yml index 2ad5244dc..3b7a8e391 100644 --- a/docs/environment.yml +++ b/docs/environment.yml @@ -1,4 +1,4 @@ -name: mpas_mesh_tools_docs +name: mpas_tools_docs channels: - conda-forge dependencies: diff --git a/docs/index.rst b/docs/index.rst index cbefe0bdf..167ef1068 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,5 +1,5 @@ -mpas_mesh_tools -================== +mpas_tools +========== This repository houses geometric features relevant for climate science. diff --git a/mpas_mesh_tools/__init__.py b/mpas_tools/__init__.py similarity index 100% rename from mpas_mesh_tools/__init__.py rename to mpas_tools/__init__.py diff --git a/mpas_mesh_tools/__main__.py b/mpas_tools/__main__.py similarity index 73% rename from mpas_mesh_tools/__main__.py rename to mpas_tools/__main__.py index 6542cd2b2..ffa7fa56a 100755 --- a/mpas_mesh_tools/__main__.py +++ b/mpas_tools/__main__.py @@ -5,22 +5,22 @@ from __future__ import absolute_import, division, print_function, \ unicode_literals -import mpas_mesh_tools +import mpas_tools import argparse def main(): """ - Entry point for the main script ``mpas_mesh_tools`` + Entry point for the main script ``mpas_tools`` """ parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-v', '--version', action='version', - version='mpas_mesh_tools {}'.format( - mpas_mesh_tools.__version__), + version='mpas_tools {}'.format( + mpas_tools.__version__), help="Show version number and exit") args = parser.parse_args() diff --git a/mpas_mesh_tools/conversion.py b/mpas_tools/conversion.py similarity index 99% rename from mpas_mesh_tools/conversion.py rename to mpas_tools/conversion.py index d0cac0c1a..8c7a7943c 100644 --- a/mpas_mesh_tools/conversion.py +++ b/mpas_tools/conversion.py @@ -7,7 +7,7 @@ import tempfile import shutil -from mpas_mesh_tools.io import write_netcdf +from mpas_tools.io import write_netcdf def convert(dsIn): diff --git a/mpas_mesh_tools/io.py b/mpas_tools/io.py similarity index 100% rename from mpas_mesh_tools/io.py rename to mpas_tools/io.py diff --git a/mpas_mesh_tools/planar_hex.py b/mpas_tools/planar_hex.py similarity index 99% rename from mpas_mesh_tools/planar_hex.py rename to mpas_tools/planar_hex.py index 73eed6a0d..1e7d71493 100755 --- a/mpas_mesh_tools/planar_hex.py +++ b/mpas_tools/planar_hex.py @@ -7,7 +7,7 @@ import xarray import argparse -from mpas_mesh_tools.io import write_netcdf +from mpas_tools.io import write_netcdf def make_planar_hex_mesh(nx, ny, dc, nonperiodic_x, diff --git a/mpas_mesh_tools/tests/__init__.py b/mpas_tools/tests/__init__.py similarity index 100% rename from mpas_mesh_tools/tests/__init__.py rename to mpas_tools/tests/__init__.py diff --git a/mpas_mesh_tools/tests/test_conversion.py b/mpas_tools/tests/test_conversion.py similarity index 89% rename from mpas_mesh_tools/tests/test_conversion.py rename to mpas_tools/tests/test_conversion.py index 172e37b8e..6591e7669 100755 --- a/mpas_mesh_tools/tests/test_conversion.py +++ b/mpas_tools/tests/test_conversion.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -from mpas_mesh_tools.conversion import convert, cull, mask -from mpas_mesh_tools.io import write_netcdf +from mpas_tools.conversion import convert, cull, mask +from mpas_tools.io import write_netcdf from geometric_features import read_feature_collection import xarray diff --git a/mpas_mesh_tools/translate.py b/mpas_tools/translate.py similarity index 99% rename from mpas_mesh_tools/translate.py rename to mpas_tools/translate.py index 07dd5fb2f..c8bd93b0e 100755 --- a/mpas_mesh_tools/translate.py +++ b/mpas_tools/translate.py @@ -7,7 +7,7 @@ import xarray -from mpas_mesh_tools.io import write_netcdf +from mpas_tools.io import write_netcdf def translate(mesh, xOffset=0., yOffset=0.): diff --git a/mpas_mesh_tools/viz.py b/mpas_tools/viz.py similarity index 100% rename from mpas_mesh_tools/viz.py rename to mpas_tools/viz.py diff --git a/setup.py b/setup.py index 3ee31ccfe..713aa156b 100755 --- a/setup.py +++ b/setup.py @@ -4,12 +4,12 @@ version = '0.0.1' -setup(name='mpas_mesh_tools', +setup(name='mpas_tools', version=version, description='A set of tools for creating and manipulating meshes for the' ' climate components based on the Model for Prediction ' 'Across Scales (MPAS) framework', - url='https://github.com/MPAS-Dev/MPAS-Tools/tree/master/mesh_tools', + url='https://github.com/MPAS-Dev/MPAS-Tools', author='MPAS-Analysis Developers', author_email='mpas-developers@googlegroups.com', license='BSD', @@ -40,5 +40,5 @@ 'visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py'], install_requires=['numpy', 'xarray', 'netCDF4', 'pyevtk'], entry_points={'console_scripts': - ['planar_hex = mpas_mesh_tools.planar_hex:main', - 'translate_planar_grid = mpas_mesh_tools.translate:main']}) + ['planar_hex = mpas_tools.planar_hex:main', + 'translate_planar_grid = mpas_tools.translate:main']}) diff --git a/visualization/paraview_vtk_field_extractor/mpas_mesh_tools b/visualization/paraview_vtk_field_extractor/mpas_mesh_tools deleted file mode 120000 index 36dde9bda..000000000 --- a/visualization/paraview_vtk_field_extractor/mpas_mesh_tools +++ /dev/null @@ -1 +0,0 @@ -../../mpas_mesh_tools/ \ No newline at end of file diff --git a/visualization/paraview_vtk_field_extractor/mpas_tools b/visualization/paraview_vtk_field_extractor/mpas_tools new file mode 120000 index 000000000..4b3a73aae --- /dev/null +++ b/visualization/paraview_vtk_field_extractor/mpas_tools @@ -0,0 +1 @@ +../../mpas_tools/ \ No newline at end of file diff --git a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index 2fddfb2fd..58151e3b1 100755 --- a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -99,7 +99,7 @@ except ImportError: use_progress_bar = False -from mpas_mesh_tools import viz +from mpas_tools import viz def build_field_time_series(local_time_indices, file_names, mesh_file, From a4051f955437413d3da24d40f1f06ca89d10e11b Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 28 Apr 2019 12:22:39 +0200 Subject: [PATCH 128/180] Update 2 coastal-alteration tools to be functions The functions are in mpas_tools.ocean.coastline_alteration. The scripts in ocean/coastline_alteration still work the same as before by calling the functions. --- mpas_tools/ocean/__init__.py | 0 mpas_tools/ocean/coastline_alteration.py | 69 ++++++++++++++++ .../add_critical_land_blockages_to_mask.py | 70 ++++++---------- ocean/coastline_alteration/mpas_tools | 1 + .../widen_transect_edge_masks.py | 79 ++++++++----------- 5 files changed, 130 insertions(+), 89 deletions(-) create mode 100644 mpas_tools/ocean/__init__.py create mode 100644 mpas_tools/ocean/coastline_alteration.py create mode 120000 ocean/coastline_alteration/mpas_tools diff --git a/mpas_tools/ocean/__init__.py b/mpas_tools/ocean/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/mpas_tools/ocean/coastline_alteration.py b/mpas_tools/ocean/coastline_alteration.py new file mode 100644 index 000000000..a2bdd5890 --- /dev/null +++ b/mpas_tools/ocean/coastline_alteration.py @@ -0,0 +1,69 @@ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy + + +def add_critical_land_blockages(dsMask, dsBlockages): + ''' + Parameters + ---------- + dsMask : `xarray.Dataset` + The mask to which critical blockages should be added + dsBlockage : `xarray.Dataset` + The transect masks defining critical land regions that should block + ocean flow (e.g. the Antarctic Peninsula) + + Returns + ------- + dsMask : `xarray.Dataset` + The mask with critical blockages included + ''' + + dsMask = dsMask.copy() + + nTransects = dsBlockages.sizes['nTransects'] + for transectIndex in range(nTransects): + dsMask.regionCellMasks[:, 0] = numpy.maximum( + dsBlockages.transectCellMasks[:, transectIndex], + dsMask.regionCellMasks[:, 0]) + + return dsMask + + +def widen_transect_edge_masks(dsMask, dsMesh, latitude_threshold=43.0): + ''' + Parameters + ---------- + dsMask : `xarray.Dataset` + The mask to which critical blockages should be added + dsMesh : `xarray.Dataset` + The transect masks defining critical land regions that should block + ocean flow (e.g. the Antarctic Peninsula) + latitude_threshold : float + Minimum latitude, degrees, for transect widening + + Returns + ------- + dsMask : `xarray.Dataset` + The mask with critical blockages included + ''' + latitude_threshold_radians = numpy.deg2rad(latitude_threshold) + + dsMask = dsMask.copy() + + maxEdges = dsMesh.sizes['maxEdges'] + + latMask = numpy.abs(dsMesh.latEdge) > latitude_threshold_radians + + edgeMask = numpy.logical_and( + latMask, dsMask.transectEdgeMasks == 1) + for iEdge in range(maxEdges): + eoc = dsMesh.edgesOnCell[:, iEdge]-1 + mask = numpy.logical_and(eoc >= 0, + edgeMask[eoc]) + # cells with a neighboring transect edge should be masked to 1 + dsMask['transectCellMasks'] = dsMask.transectCellMasks.where( + numpy.logical_not(mask), 1.) + + return dsMask diff --git a/ocean/coastline_alteration/add_critical_land_blockages_to_mask.py b/ocean/coastline_alteration/add_critical_land_blockages_to_mask.py index 9f98deb15..76286b7a5 100755 --- a/ocean/coastline_alteration/add_critical_land_blockages_to_mask.py +++ b/ocean/coastline_alteration/add_critical_land_blockages_to_mask.py @@ -11,50 +11,32 @@ from __future__ import absolute_import, division, print_function, \ unicode_literals -import os -import shutil -from netCDF4 import Dataset -import numpy as np +import xarray import argparse +from mpas_tools.ocean.coastline_alteration import add_critical_land_blockages -def removeFile(fileName): - try: - os.remove(fileName) - except OSError: - pass - - -parser = \ - argparse.ArgumentParser(description=__doc__, - formatter_class=argparse.RawTextHelpFormatter) -parser.add_argument("-f", "--input_mask_file", dest="input_mask_filename", - help="Mask file that includes cell and edge masks.", - metavar="INPUTMASKFILE", required=True) -parser.add_argument("-o", "--output_mask_file", dest="output_mask_filename", - help="Mask file that includes cell and edge masks.", - metavar="OUTPUTMASKFILE", required=True) -parser.add_argument("-b", "--blockage_file", dest="blockage_file", - help="Masks for each transect identifying critical land" - "blockage.", metavar="BLOCKFILE", - required=True) -args = parser.parse_args() - -removeFile(args.output_mask_filename) -shutil.copyfile(args.input_mask_filename, args.output_mask_filename) - -outMaskFile = Dataset(args.output_mask_filename, "r+") -nRegions = len(outMaskFile.dimensions["nRegions"]) -regionCellMasks = outMaskFile.variables["regionCellMasks"] - -blockageFile = Dataset(args.blockage_file, "r+") -nTransects = len(blockageFile.dimensions["nTransects"]) -transectCellMasks = blockageFile.variables["transectCellMasks"] -for transectIndex in range(nTransects): - # make sure the regionCellMasks for the first region is 1 anywhere a - # transectCellMask is 1 - regionCellMasks[:, 0] = np.maximum(transectCellMasks[:, transectIndex], - regionCellMasks[:, 0]) - -blockageFile.close() -outMaskFile.close() + +if __name__ == '__main__': + parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-f", "--input_mask_file", dest="input_mask_filename", + help="Mask file that includes cell and edge masks.", + metavar="INPUTMASKFILE", required=True) + parser.add_argument("-o", "--output_mask_file", + dest="output_mask_filename", + help="Mask file that includes cell and edge masks.", + metavar="OUTPUTMASKFILE", required=True) + parser.add_argument("-b", "--blockage_file", dest="blockage_file", + help="Masks for each transect identifying critical " + "land blockage.", metavar="BLOCKFILE", + required=True) + args = parser.parse_args() + + dsMask = xarray.open_dataset(args.input_mask_filename) + + dsBlockages = xarray.open_dataset(args.blockage_file) + + dsMask = add_critical_land_blockages(dsMask, dsBlockages) + dsMask.to_netcdf(args.output_mask_filename) diff --git a/ocean/coastline_alteration/mpas_tools b/ocean/coastline_alteration/mpas_tools new file mode 120000 index 000000000..4b3a73aae --- /dev/null +++ b/ocean/coastline_alteration/mpas_tools @@ -0,0 +1 @@ +../../mpas_tools/ \ No newline at end of file diff --git a/ocean/coastline_alteration/widen_transect_edge_masks.py b/ocean/coastline_alteration/widen_transect_edge_masks.py index 19a4730b2..1d4bc067c 100755 --- a/ocean/coastline_alteration/widen_transect_edge_masks.py +++ b/ocean/coastline_alteration/widen_transect_edge_masks.py @@ -10,49 +10,38 @@ from __future__ import absolute_import, division, print_function, \ unicode_literals - -import numpy as np -from netCDF4 import Dataset import argparse - -parser = \ - argparse.ArgumentParser(description=__doc__, - formatter_class=argparse.RawTextHelpFormatter) -parser.add_argument("-f", "--mask_file", dest="mask_filename", - help="Mask file with cell and edge transect masks.", - metavar="MASKFILE", - required=True) -parser.add_argument("-m", "--mesh_file", dest="mesh_filename", - help="MPAS Mesh filename.", metavar="MESHFILE", - required=True) -parser.add_argument("-l", "--latitude_threshold", dest="latitude_threshold", - help="Minimum latitude, degrees, for transect widening.", - required=False, type=float, default=43.0) -args = parser.parse_args() - -latitude_threshold_radians = args.latitude_threshold * 3.1415 / 180. - -# Obtain mesh variables -meshFile = Dataset(args.mesh_filename, "r") -nEdges = len(meshFile.dimensions["nEdges"]) -cellsOnEdge = meshFile.variables["cellsOnEdge"][:, :] -latEdge = meshFile.variables["latEdge"][:] -meshFile.close() - -# Obtain transect mask variables -maskFile = Dataset(args.mask_filename, "a") -nTransects = len(maskFile.dimensions["nTransects"]) -transectCellMasks = maskFile.variables["transectCellMasks"][:, :] -transectEdgeMasks = maskFile.variables["transectEdgeMasks"][:, :] - -print("widen_transect_edge_masks.py: Widening transects to two cells wide") -for iEdge in range(nEdges): - if abs(latEdge[iEdge]) > latitude_threshold_radians: - for iTransect in range(nTransects): - if transectEdgeMasks[iEdge, iTransect] == 1: - maskFile['transectCellMasks'][cellsOnEdge[iEdge, 0] - - 1, iTransect] = 1 - maskFile['transectCellMasks'][cellsOnEdge[iEdge, 1] - - 1, iTransect] = 1 - -maskFile.close() +import xarray + +from mpas_tools.ocean.coastline_alteration import widen_transect_edge_masks + + +if __name__ == '__main__': + + parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-f", "--mask_file", dest="mask_filename", + help="Mask file with cell and edge transect masks.", + metavar="MASKFILE", + required=True) + parser.add_argument("-m", "--mesh_file", dest="mesh_filename", + help="MPAS Mesh filename.", metavar="MESHFILE", + required=True) + parser.add_argument("-o", "--out_file", dest="out_filename", + help="Output mask file,different from input filename.", + metavar="MASKFILE", + required=True) + parser.add_argument("-l", "--latitude_threshold", + dest="latitude_threshold", + help="Minimum latitude, degrees, for transect " + "widening.", + required=False, type=float, default=43.0) + args = parser.parse_args() + + dsMask = xarray.open_dataset(args.mask_filename) + + dsMesh = xarray.open_dataset(args.mesh_filename) + + dsMask = widen_transect_edge_masks(dsMask, dsMesh, args.latitude_threshold) + dsMask.to_netcdf(args.out_filename) From fa331829bd080baa5e1a039870665332862f9504 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 29 Apr 2019 10:33:10 +0200 Subject: [PATCH 129/180] Switch conversion to using a temp directory This should prevent mistakes where files from other processes are accidentally retained (e.g. graph.info vs culled_graph.info) --- conda/meta.yaml | 1 + mpas_tools/conversion.py | 172 +++++++++++++++++++-------------------- 2 files changed, 83 insertions(+), 90 deletions(-) diff --git a/conda/meta.yaml b/conda/meta.yaml index 5227e4978..ec41f629f 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -33,6 +33,7 @@ requirements: - geometric_features - pyevtk - future + - backports.tempfile test: requires: diff --git a/mpas_tools/conversion.py b/mpas_tools/conversion.py index 8c7a7943c..6f43c05dd 100644 --- a/mpas_tools/conversion.py +++ b/mpas_tools/conversion.py @@ -4,13 +4,13 @@ import os import xarray import subprocess -import tempfile +from backports.tempfile import TemporaryDirectory import shutil from mpas_tools.io import write_netcdf -def convert(dsIn): +def convert(dsIn, graphInfoFileName=None): ''' Use ``MpasMeshConverter.x`` to convert an input mesh to a valid MPAS mesh that is fully compliant with the MPAS mesh specification. @@ -21,34 +21,46 @@ def convert(dsIn): dsIn : ``xarray.Dataset`` A data set to convert + graphInfoFileName : str, optional + A file path (relative or absolute) where the graph file (typically + ``graph.info`` should be written out. By default, ``graph.info`` is + not saved. + Returns ------- dsOut : ``xarray.Dataset`` The MPAS mesh ''' - tempFiles = [] - inFileName = _get_temp_path(tempFiles) - write_netcdf(dsIn, inFileName) + with TemporaryDirectory() as tempdir: + inFileName = '{}/mesh_in.nc'.format(tempdir) + write_netcdf(dsIn, inFileName) + + outFileName = '{}/mesh_out.nc'.format(tempdir) - outFileName = _get_temp_path(tempFiles) + if graphInfoFileName is not None: + graphInfoFileName = os.path.abspath(graphInfoFileName) - # go into the directory of the output file so the graph.info file ends - # up in the same place - owd = os.getcwd() - os.chdir(os.path.dirname(outFileName)) - subprocess.check_call(['MpasMeshConverter.x', inFileName, outFileName]) - os.chdir(owd) + # go into the directory of the output file so the graph.info file ends + # up in the same place + owd = os.getcwd() + outDir = os.path.dirname(outFileName) + os.chdir(outDir) + subprocess.check_call(['MpasMeshConverter.x', inFileName, outFileName]) + os.chdir(owd) - dsOut = xarray.open_dataset(outFileName) - dsOut.load() - _remove_temp_files(tempFiles) + dsOut = xarray.open_dataset(outFileName) + dsOut.load() + + if graphInfoFileName is not None: + shutil.copyfile('{}/graph.info'.format(outDir), + graphInfoFileName) return dsOut def cull(dsIn, dsMask=None, dsInverse=None, dsPreserve=None, - graphInfoPath=None): + graphInfoFileName=None): ''' Use ``MpasCellCuller.x`` to cull cells from a mesh based on the ``cullCell`` field in the input file or DataSet and/or the provided masks. @@ -72,9 +84,10 @@ def cull(dsIn, dsMask=None, dsInverse=None, dsPreserve=None, A data set with region masks that are 1 where cells should *not* be culled - graphInfoPath : str, optional - A path where the file ``graph.info`` should be written out. By - default, ``graph.info`` is written to a temp directory that is deleted. + graphInfoFileName : str, optional + A file path (relative or absolute) where the graph file (typically + ``culled_graph.info`` should be written out. By default, + ``culled_graph.info`` is not saved. Returns ------- @@ -83,47 +96,46 @@ def cull(dsIn, dsMask=None, dsInverse=None, dsPreserve=None, ''' - tempFiles = [] - inFileName = _get_temp_path(tempFiles) - write_netcdf(dsIn, inFileName) - outFileName = _get_temp_path(tempFiles) + with TemporaryDirectory() as tempdir: + inFileName = '{}/ds_in.nc'.format(tempdir) + write_netcdf(dsIn, inFileName) + outFileName = '{}/ds_out.nc'.format(tempdir) - args = ['MpasCellCuller.x', inFileName, outFileName] + args = ['MpasCellCuller.x', inFileName, outFileName] - if dsMask is not None: - fileName = _get_temp_path(tempFiles) - write_netcdf(dsMask, fileName) - args.extend(['-m', fileName]) + if dsMask is not None: + fileName = '{}/mask.nc'.format(tempdir) + write_netcdf(dsMask, fileName) + args.extend(['-m', fileName]) - if dsInverse is not None: - fileName = _get_temp_path(tempFiles) - write_netcdf(dsInverse, fileName) - args.extend(['-i', fileName]) + if dsInverse is not None: + fileName = '{}/inverse.nc'.format(tempdir) + write_netcdf(dsInverse, fileName) + args.extend(['-i', fileName]) - if dsPreserve is not None: - fileName = _get_temp_path(tempFiles) - write_netcdf(dsPreserve, fileName) - args.extend(['-p', fileName]) + if dsPreserve is not None: + fileName = '{}/preserve.nc'.format(tempdir) + write_netcdf(dsPreserve, fileName) + args.extend(['-p', fileName]) - # go into the directory of the output file so the graph.info file ends - # up in the same place + # go into the directory of the output file so the graph.info file ends + # up in the same place - if graphInfoPath is not None: - graphInfoPath = os.path.abspath(graphInfoPath) + if graphInfoFileName is not None: + graphInfoFileName = os.path.abspath(graphInfoFileName) - owd = os.getcwd() - outDir = os.path.dirname(outFileName) - os.chdir(outDir) - subprocess.check_call(args) - os.chdir(owd) + owd = os.getcwd() + outDir = os.path.dirname(outFileName) + os.chdir(outDir) + subprocess.check_call(args) + os.chdir(owd) - dsOut = xarray.open_dataset(outFileName) - dsOut.load() + dsOut = xarray.open_dataset(outFileName) + dsOut.load() - if graphInfoPath is not None: - shutil.copyfile('{}/graph.info'.format(outDir), - '{}/graph.info'.format(graphInfoPath)) - _remove_temp_files(tempFiles) + if graphInfoFileName is not None: + shutil.copyfile('{}/culled_graph.info'.format(outDir), + graphInfoFileName) return dsOut @@ -152,49 +164,29 @@ def mask(dsMesh, fcMask=None, fcSeed=None, positiveLon=False): ''' - tempFiles = [] - inFileName = _get_temp_path(tempFiles) - write_netcdf(dsMesh, inFileName) - outFileName = _get_temp_path(tempFiles) - - args = ['MpasMaskCreator.x', inFileName, outFileName] + with TemporaryDirectory() as tempdir: + inFileName = '{}/mesh_in.nc'.format(tempdir) + write_netcdf(dsMesh, inFileName) + outFileName = '{}/mesh_out.nc'.format(tempdir) - if fcMask is not None: - fileName = _get_temp_path(tempFiles, ext='geojson') - fcMask.to_geojson(fileName) - args.extend(['-f', fileName]) + args = ['MpasMaskCreator.x', inFileName, outFileName] - if fcSeed is not None: - fileName = _get_temp_path(tempFiles, ext='geojson') - fcSeed.to_geojson(fileName) - args.extend(['-s', fileName]) + if fcMask is not None: + fileName = '{}/mask.geojson'.format(tempdir) + fcMask.to_geojson(fileName) + args.extend(['-f', fileName]) - if positiveLon: - args.append('--positive_lon') - - # go into the directory of the output file so the graph.info file ends - # up in the same place - owd = os.getcwd() - os.chdir(os.path.dirname(outFileName)) - subprocess.check_call(args) - os.chdir(owd) - - dsOut = xarray.open_dataset(outFileName) - dsOut.load() - _remove_temp_files(tempFiles) - - return dsOut + if fcSeed is not None: + fileName = '{}/seed.geojson'.format(tempdir) + fcSeed.to_geojson(fileName) + args.extend(['-s', fileName]) + if positiveLon: + args.append('--positive_lon') -def _get_temp_path(tempFiles, ext='nc'): - '''Returns the name of a temporary NetCDF file''' - fileName = '{}/{}.{}'.format(tempfile._get_default_tempdir(), - next(tempfile._get_candidate_names()), - ext) - tempFiles.append(fileName) - return fileName + subprocess.check_call(args) + dsOut = xarray.open_dataset(outFileName) + dsOut.load() -def _remove_temp_files(tempFiles): - for tempFileName in tempFiles: - os.remove(tempFileName) + return dsOut \ No newline at end of file From 93dd1594d16786c51bf1cb0cc4881f15f7c0ea77 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 29 Apr 2019 13:11:24 +0200 Subject: [PATCH 130/180] Add option to set NetCDF format The default is 'NETCDF3_64BIT' for MPAS compatibility. --- mpas_tools/io.py | 5 +++-- mpas_tools/planar_hex.py | 8 ++++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/mpas_tools/io.py b/mpas_tools/io.py index bdadde224..e1298ef5d 100644 --- a/mpas_tools/io.py +++ b/mpas_tools/io.py @@ -7,7 +7,8 @@ import sys -def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): +def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals, + format='NETCDF3_64BIT'): '''Write an xarray Dataset with NetCDF4 fill values where needed''' encodingDict = {} variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) @@ -25,7 +26,7 @@ def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): update_history(ds) - ds.to_netcdf(fileName, encoding=encodingDict) + ds.to_netcdf(fileName, encoding=encodingDict, format=format) def update_history(ds): diff --git a/mpas_tools/planar_hex.py b/mpas_tools/planar_hex.py index 1e7d71493..cd4b055c0 100755 --- a/mpas_tools/planar_hex.py +++ b/mpas_tools/planar_hex.py @@ -12,7 +12,8 @@ def make_planar_hex_mesh(nx, ny, dc, nonperiodic_x, nonperiodic_y, outFileName=None, - compareWithFileName=None): + compareWithFileName=None, + format='NETCDF3_64BIT'): ''' Builds an MPAS periodic, planar hexagonal mesh with the requested dimensions, optionally saving it to a file, and returs it as an @@ -41,6 +42,9 @@ def make_planar_hex_mesh(nx, ny, dc, nonperiodic_x, The name of a grid file to compare with to see if they are identical, used for testing purposes + format : {'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_64BIT', 'NETCDF3_CLASSIC'}, optional + The NetCDF format to use for output + Returns ------- mesh : ``xarray.Dataset`` @@ -66,7 +70,7 @@ def make_planar_hex_mesh(nx, ny, dc, nonperiodic_x, mesh.attrs.pop('dc') if outFileName is not None: - write_netcdf(mesh, outFileName) + write_netcdf(mesh, outFileName, format=format) if compareWithFileName is not None: # used to make sure results are exactly identical to periodic_hex From 6516c7e907a308e9e269988e0aee522cfc31194a Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 3 May 2019 21:02:48 +0200 Subject: [PATCH 131/180] Move mpas_tools package into conda_package This will hopefully avoid confusion with tools outside the package --- {docs => conda_package/docs}/Makefile | 0 {docs => conda_package/docs}/api.rst | 27 +++++++++++++++++-- {docs => conda_package/docs}/conf.py | 2 +- {docs => conda_package/docs}/environment.yml | 5 ++++ {docs => conda_package/docs}/index.rst | 0 .../mpas_tools}/__init__.py | 0 .../mpas_tools}/__main__.py | 0 .../mpas_tools}/conversion.py | 0 .../mpas_tools}/io.py | 0 .../mpas_tools}/ocean/__init__.py | 0 .../mpas_tools}/ocean/coastline_alteration.py | 0 .../mpas_tools}/planar_hex.py | 0 .../mpas_tools}/tests/__init__.py | 0 .../mpas_tools}/tests/test_conversion.py | 0 .../mpas_tools}/translate.py | 0 .../mpas_tools}/viz.py | 0 {conda => conda_package/recipe}/build.sh | 2 ++ {conda => conda_package/recipe}/meta.yaml | 6 ++--- setup.py => conda_package/setup.py | 0 ocean/coastline_alteration/mpas_tools | 2 +- .../paraview_vtk_field_extractor/mpas_tools | 2 +- 21 files changed, 38 insertions(+), 8 deletions(-) rename {docs => conda_package/docs}/Makefile (100%) rename {docs => conda_package/docs}/api.rst (61%) rename {docs => conda_package/docs}/conf.py (99%) rename {docs => conda_package/docs}/environment.yml (71%) rename {docs => conda_package/docs}/index.rst (100%) rename {mpas_tools => conda_package/mpas_tools}/__init__.py (100%) rename {mpas_tools => conda_package/mpas_tools}/__main__.py (100%) rename {mpas_tools => conda_package/mpas_tools}/conversion.py (100%) rename {mpas_tools => conda_package/mpas_tools}/io.py (100%) rename {mpas_tools => conda_package/mpas_tools}/ocean/__init__.py (100%) rename {mpas_tools => conda_package/mpas_tools}/ocean/coastline_alteration.py (100%) rename {mpas_tools => conda_package/mpas_tools}/planar_hex.py (100%) rename {mpas_tools => conda_package/mpas_tools}/tests/__init__.py (100%) rename {mpas_tools => conda_package/mpas_tools}/tests/test_conversion.py (100%) rename {mpas_tools => conda_package/mpas_tools}/translate.py (100%) rename {mpas_tools => conda_package/mpas_tools}/viz.py (100%) rename {conda => conda_package/recipe}/build.sh (81%) rename {conda => conda_package/recipe}/meta.yaml (96%) rename setup.py => conda_package/setup.py (100%) diff --git a/docs/Makefile b/conda_package/docs/Makefile similarity index 100% rename from docs/Makefile rename to conda_package/docs/Makefile diff --git a/docs/api.rst b/conda_package/docs/api.rst similarity index 61% rename from docs/api.rst rename to conda_package/docs/api.rst index c6d138068..fb57a2b4e 100644 --- a/docs/api.rst +++ b/conda_package/docs/api.rst @@ -6,8 +6,8 @@ This page provides an auto-generated summary of the MPAS mesh-tools API. For more details and examples, refer to the relevant chapters in the main part of the documentation. -Python package -============== +MPAS mesh tools +=============== .. currentmodule:: mpas_tools.planar_hex @@ -23,6 +23,17 @@ Python package translate + +.. currentmodule:: mpas_tools.conversion + +.. autosummary:: + :toctree: generated/ + + convert + cull + mask + + .. currentmodule:: mpas_tools.io .. autosummary:: @@ -30,3 +41,15 @@ Python package write_netcdf + +Ocean Tools +=========== + +.. currentmodule:: mpas_tools.ocean.coastline_alteration + +.. autosummary:: + :toctree: generated/ + + add_critical_land_blockages + widen_transect_edge_masks + diff --git a/docs/conf.py b/conda_package/docs/conf.py similarity index 99% rename from docs/conf.py rename to conda_package/docs/conf.py index fac303ed5..0e8e5b05f 100644 --- a/docs/conf.py +++ b/conda_package/docs/conf.py @@ -56,7 +56,7 @@ u'National Security, LLC. All rights reserved. Copyright (c) ' \ u'2018 UT-Battelle, LLC. All rights reserved.' author = u'Xylar Asay-Davis, Doug Jacobsen, Michael Duda, Mark Petersen, ' \ - u'Adridan Turner' + u'Matt Hoffman, Adridan Turner, Philip Wolfram' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/docs/environment.yml b/conda_package/docs/environment.yml similarity index 71% rename from docs/environment.yml rename to conda_package/docs/environment.yml index 3b7a8e391..eefa4830d 100644 --- a/docs/environment.yml +++ b/conda_package/docs/environment.yml @@ -1,6 +1,7 @@ name: mpas_tools_docs channels: - conda-forge + - xylar dependencies: - python=3.7 - pytest @@ -10,6 +11,10 @@ dependencies: - numpy - scipy - xarray + - geometric_features + - pyevtk + - future + - backports.tempfile - sphinx - sphinx_rtd_theme - numpydoc diff --git a/docs/index.rst b/conda_package/docs/index.rst similarity index 100% rename from docs/index.rst rename to conda_package/docs/index.rst diff --git a/mpas_tools/__init__.py b/conda_package/mpas_tools/__init__.py similarity index 100% rename from mpas_tools/__init__.py rename to conda_package/mpas_tools/__init__.py diff --git a/mpas_tools/__main__.py b/conda_package/mpas_tools/__main__.py similarity index 100% rename from mpas_tools/__main__.py rename to conda_package/mpas_tools/__main__.py diff --git a/mpas_tools/conversion.py b/conda_package/mpas_tools/conversion.py similarity index 100% rename from mpas_tools/conversion.py rename to conda_package/mpas_tools/conversion.py diff --git a/mpas_tools/io.py b/conda_package/mpas_tools/io.py similarity index 100% rename from mpas_tools/io.py rename to conda_package/mpas_tools/io.py diff --git a/mpas_tools/ocean/__init__.py b/conda_package/mpas_tools/ocean/__init__.py similarity index 100% rename from mpas_tools/ocean/__init__.py rename to conda_package/mpas_tools/ocean/__init__.py diff --git a/mpas_tools/ocean/coastline_alteration.py b/conda_package/mpas_tools/ocean/coastline_alteration.py similarity index 100% rename from mpas_tools/ocean/coastline_alteration.py rename to conda_package/mpas_tools/ocean/coastline_alteration.py diff --git a/mpas_tools/planar_hex.py b/conda_package/mpas_tools/planar_hex.py similarity index 100% rename from mpas_tools/planar_hex.py rename to conda_package/mpas_tools/planar_hex.py diff --git a/mpas_tools/tests/__init__.py b/conda_package/mpas_tools/tests/__init__.py similarity index 100% rename from mpas_tools/tests/__init__.py rename to conda_package/mpas_tools/tests/__init__.py diff --git a/mpas_tools/tests/test_conversion.py b/conda_package/mpas_tools/tests/test_conversion.py similarity index 100% rename from mpas_tools/tests/test_conversion.py rename to conda_package/mpas_tools/tests/test_conversion.py diff --git a/mpas_tools/translate.py b/conda_package/mpas_tools/translate.py similarity index 100% rename from mpas_tools/translate.py rename to conda_package/mpas_tools/translate.py diff --git a/mpas_tools/viz.py b/conda_package/mpas_tools/viz.py similarity index 100% rename from mpas_tools/viz.py rename to conda_package/mpas_tools/viz.py diff --git a/conda/build.sh b/conda_package/recipe/build.sh similarity index 81% rename from conda/build.sh rename to conda_package/recipe/build.sh index af4a9acea..3606720cb 100644 --- a/conda/build.sh +++ b/conda_package/recipe/build.sh @@ -3,7 +3,9 @@ set -x set -e +cp -r ocean landice visualization mesh_tools conda_package +cd conda_package ${PYTHON} -m pip install . --no-deps -vv cd mesh_tools/mesh_conversion_tools diff --git a/conda/meta.yaml b/conda_package/recipe/meta.yaml similarity index 96% rename from conda/meta.yaml rename to conda_package/recipe/meta.yaml index ec41f629f..d68480ecd 100644 --- a/conda/meta.yaml +++ b/conda_package/recipe/meta.yaml @@ -6,7 +6,7 @@ package: version: '{{ version }}' source: - path: .. + path: ../.. build: number: 0 @@ -42,7 +42,7 @@ test: - mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson - mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc - mesh_tools/mesh_conversion_tools/test/land_mask_final.nc - - mpas_tools/tests/* + - conda_package/mpas_tools/tests/* commands: - planar_hex --nx=10 --ny=20 --dc=1000. --outFileName='periodic_mesh_10x20_1km.nc' - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -x 1000. -y 2000. @@ -52,7 +52,7 @@ test: - MpasMeshConverter.x mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc mesh.nc - MpasCellCuller.x mesh.nc culled_mesh.nc -m mesh_tools/mesh_conversion_tools/test/land_mask_final.nc - MpasMaskCreator.x mesh.nc arctic_mask.nc -f mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson - - python -m pytest mpas_tools/tests + - python -m pytest conda_package/mpas_tools/tests - mark_horns_for_culling.py --help - create_landice_grid_from_generic_MPAS_grid.py --help - define_cullMask.py --help diff --git a/setup.py b/conda_package/setup.py similarity index 100% rename from setup.py rename to conda_package/setup.py diff --git a/ocean/coastline_alteration/mpas_tools b/ocean/coastline_alteration/mpas_tools index 4b3a73aae..627733f3b 120000 --- a/ocean/coastline_alteration/mpas_tools +++ b/ocean/coastline_alteration/mpas_tools @@ -1 +1 @@ -../../mpas_tools/ \ No newline at end of file +../../conda_package/mpas_tools/ \ No newline at end of file diff --git a/visualization/paraview_vtk_field_extractor/mpas_tools b/visualization/paraview_vtk_field_extractor/mpas_tools index 4b3a73aae..627733f3b 120000 --- a/visualization/paraview_vtk_field_extractor/mpas_tools +++ b/visualization/paraview_vtk_field_extractor/mpas_tools @@ -1 +1 @@ -../../mpas_tools/ \ No newline at end of file +../../conda_package/mpas_tools/ \ No newline at end of file From 061bc4bc808e2cc6ead08192bcf385f36bc1436c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 3 May 2019 21:06:51 +0200 Subject: [PATCH 132/180] Make add_land_locked_cells_to_mask a function Made the looping more efficient. Currently, still uses NetCDF4 instead of xarray. --- .../mpas_tools/ocean/coastline_alteration.py | 336 ++++++++++++++++++ .../add_land_locked_cells_to_mask.py | 238 +------------ 2 files changed, 340 insertions(+), 234 deletions(-) diff --git a/conda_package/mpas_tools/ocean/coastline_alteration.py b/conda_package/mpas_tools/ocean/coastline_alteration.py index a2bdd5890..b709995c9 100644 --- a/conda_package/mpas_tools/ocean/coastline_alteration.py +++ b/conda_package/mpas_tools/ocean/coastline_alteration.py @@ -2,6 +2,9 @@ unicode_literals import numpy +from netCDF4 import Dataset +import os +import shutil def add_critical_land_blockages(dsMask, dsBlockages): @@ -67,3 +70,336 @@ def widen_transect_edge_masks(dsMask, dsMesh, latitude_threshold=43.0): numpy.logical_not(mask), 1.) return dsMask + + +def add_land_locked_cells_to_mask(input_mask_filename, output_mask_filename, + mesh_filename, latitude_threshold=43.0, + nSweeps=10): + ''' + Find ocean cells that are land-locked, and alter the cell mask so that they + are counted as land cells. + + Parameters + ---------- + input_mask_filename : str + Mask file that includes cell and edge masks. + + output_mask_filename : str + Mask file that includes cell and edge masks. + + mesh_filename : str + MPAS Mesh filename. + + latitude_threshold : float, optional + Minimum latitude, in degrees, for transect widening. + + nSweeps : int, optional + Maximum number of sweeps to search for land-locked cells. + ''' + + # Obtain mesh variables + meshFile = Dataset(mesh_filename, "r") + nCells = len(meshFile.dimensions["nCells"]) + cellsOnCell = meshFile.variables["cellsOnCell"][:, :] - 1 + nEdgesOnCell = meshFile.variables["nEdgesOnCell"][:] + latCell = numpy.rad2deg(meshFile.variables["latCell"][:]) + lonCell = numpy.rad2deg(meshFile.variables["lonCell"][:]) + meshFile.close() + + _remove_file(output_mask_filename) + shutil.copyfile(input_mask_filename, output_mask_filename) + + # Obtain original cell mask from input file + inputMaskFile = Dataset(input_mask_filename, "r") + regionCellMasks = inputMaskFile.variables["regionCellMasks"][:, :] + # set landMask = flattened regionCellMasks + landMask = numpy.amax(regionCellMasks, axis=1) + inputMaskFile.close() + + # Open output file + outputMaskFile = Dataset(output_mask_filename, "a") + landMaskDiagnostic = outputMaskFile.createVariable( + "landMaskDiagnostic", "i", dimensions=("nCells")) + + regionCellMasks = outputMaskFile['regionCellMasks'] + + print("Running add_land_locked_cells_to_mask.py. Total number of cells: " + "{}".format(nCells)) + + landMask, removable = _remove_cells_with_isolated_edges1( + landMask, landMaskDiagnostic, regionCellMasks, latCell, nEdgesOnCell, + cellsOnCell, nCells, latitude_threshold) + landMask = _remove_cells_with_isolated_edges2( + landMask, landMaskDiagnostic, regionCellMasks, removable, + nEdgesOnCell, cellsOnCell, nCells, nSweeps) + oceanMask = _flood_fill(landMask, landMaskDiagnostic, removable, lonCell, + latCell, nEdgesOnCell, cellsOnCell, nCells) + landMask = _revert_cells_with_connected_edges( + oceanMask, landMask, landMaskDiagnostic, regionCellMasks, removable, + nEdgesOnCell, cellsOnCell, nCells, nSweeps) + outputMaskFile.close() + + +def _remove_cells_with_isolated_edges1(landMask, landMaskDiagnostic, + regionCellMasks, latCell, nEdgesOnCell, + cellsOnCell, nCells, + latitude_threshold): + print("Step 1: Searching for land-locked cells. Remove cells that only " + "have isolated active edges.") + + landMaskNew = numpy.array(landMask) + + landMaskDiagnostic[:] = landMask + + removable = numpy.logical_and(numpy.abs(latCell) >= latitude_threshold, + landMask == 0) + + nextCellsOnCell = numpy.zeros(cellsOnCell.shape, int) + + for iEdgeOnCell in range(nextCellsOnCell.shape[1]): + iP1 = numpy.mod(iEdgeOnCell + 1, nEdgesOnCell) + nextCellsOnCell[:, iEdgeOnCell] = \ + cellsOnCell[numpy.arange(nCells), iP1] + + valid = numpy.logical_and(removable.reshape(nCells, 1), + cellsOnCell >= 0) + + active = numpy.logical_not(landMask) + activeEdge = numpy.logical_and(valid, active[cellsOnCell]) + activeNextEdge = numpy.logical_and(valid, active[nextCellsOnCell]) + + # which vertices have adjacent active edges on this cell? + activeAdjacentEdges = numpy.logical_and(activeEdge, activeNextEdge) + + # which removable cells have no pairs of adjacent active cells? + noActiveAdjacentEdges = numpy.logical_and( + removable, numpy.logical_not(numpy.any(activeAdjacentEdges, axis=1))) + + landMaskNew[noActiveAdjacentEdges] = 1 + landLockedCounter = numpy.count_nonzero(noActiveAdjacentEdges) + + regionCellMasks[:, 0] = numpy.maximum(regionCellMasks[:, 0], + noActiveAdjacentEdges) + + landMaskDiagnostic[noActiveAdjacentEdges] = 2 + + print(" Number of landLocked cells: {}".format(landLockedCounter)) + + return landMaskNew, removable + + +def _remove_cells_with_isolated_edges2(landMask, landMaskDiagnostic, + regionCellMasks, removable, + nEdgesOnCell, cellsOnCell, nCells, + nSweeps): + print("Step 2: Searching for land-locked cells. Remove cells that have " + "any isolated active edges.") + + nextCellsOnCell = numpy.zeros(cellsOnCell.shape, int) + prevCellsOnCell = numpy.zeros(cellsOnCell.shape, int) + + for iEdgeOnCell in range(nextCellsOnCell.shape[1]): + iP1 = numpy.mod(iEdgeOnCell + 1, nEdgesOnCell) + nextCellsOnCell[:, iEdgeOnCell] = \ + cellsOnCell[numpy.arange(nCells), iP1] + iM1 = numpy.mod(iEdgeOnCell - 1, nEdgesOnCell) + prevCellsOnCell[:, iEdgeOnCell] = \ + cellsOnCell[numpy.arange(nCells), iM1] + + for iSweep in range(nSweeps): + landLockedCounter = 0 + landMaskNew = numpy.array(landMask) + + mask = numpy.logical_and(removable, + landMask == 0) + + active = numpy.logical_not(landMask) + valid = numpy.logical_and(mask.reshape(nCells, 1), + cellsOnCell >= 0) + activeEdge = numpy.logical_and(valid, active[cellsOnCell]) + valid = numpy.logical_and(mask.reshape(nCells, 1), + nextCellsOnCell >= 0) + activeNextEdge = numpy.logical_and(valid, active[nextCellsOnCell]) + valid = numpy.logical_and(mask.reshape(nCells, 1), + prevCellsOnCell >= 0) + activePrevEdge = numpy.logical_and(valid, active[prevCellsOnCell]) + + # an edge is land-locked if it is active but neither neighbor is active + landLockedEdges = numpy.logical_and( + activeEdge, + numpy.logical_not( + numpy.logical_or(activePrevEdge, activeNextEdge))) + + landLockedCells = numpy.any(landLockedEdges, axis=1) + + landLockedCounter = numpy.count_nonzero(landLockedCells) + if landLockedCounter > 0: + landMaskNew[landLockedCells] = 1 + regionCellMasks[landLockedCells, 0] = 1 + landMaskDiagnostic[landLockedCells] = 3 + + landMask = landMaskNew + print(" Sweep: {} Number of landLocked cells removed: {}".format( + iSweep + 1, landLockedCounter)) + if landLockedCounter == 0: + break + + return landMask + + +def _flood_fill(landMask, landMaskDiagnostic, removable, lonCell, latCell, + nEdgesOnCell, cellsOnCell, nCells): + print("Step 3: Perform flood fill, starting from open ocean.") + + # init flood fill to 0 for water, -1 for land, 1 for known open ocean + floodFill = -1*numpy.ones(nCells, dtype="i") + mask = numpy.logical_and(removable, landMask == 0) + floodFill[mask] = 0 + + openOceanMask = numpy.zeros(nCells, bool) + + # North Pole + mask = latCell > 84.0 + openOceanMask = numpy.logical_or(openOceanMask, mask) + + # Arctic + mask = numpy.logical_and( + numpy.logical_and(lonCell > 160.0, lonCell < 230.0), + latCell > 73.0) + openOceanMask = numpy.logical_or(openOceanMask, mask) + + # North Atlantic + mask = numpy.logical_and( + numpy.logical_and(lonCell > 315.0, lonCell < 340.0), + numpy.logical_and(latCell > 15.0, latCell < 45.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + mask = numpy.logical_and( + numpy.logical_and(lonCell > 290.0, lonCell < 300.0), + numpy.logical_and(latCell > 72.0, latCell < 75.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + mask = numpy.logical_and( + numpy.logical_and(lonCell > 0.0, lonCell < 10.0), + numpy.logical_and(latCell > 70.0, latCell < 75.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + + # North Pacific + mask = numpy.logical_and( + numpy.logical_and(lonCell > 150.0, lonCell < 225.0), + numpy.logical_and(latCell > 0.0, latCell < 45.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + + # South Atlantic + mask = numpy.logical_and( + numpy.logical_and(lonCell > 0.0, lonCell < 5.0), + numpy.logical_and(latCell > -60.0, latCell < 0.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + + # South Pacific + mask = numpy.logical_and( + numpy.logical_and(lonCell > 180.0, lonCell < 280.0), + numpy.logical_and(latCell > -60.0, latCell < -10.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + + # Southern Ocean + mask = numpy.logical_and( + numpy.logical_and(lonCell > 0.0, lonCell < 165.0), + numpy.logical_and(latCell > -60.0, latCell < -45.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + + mask = numpy.logical_and(floodFill == 0, openOceanMask) + floodFill[mask] = 1 + + nFloodableCells = numpy.count_nonzero(floodFill == 0) + print(" Initial number of flood cells: {}".format(nFloodableCells)) + + landMaskDiagnostic[floodFill == 1] = 5 + + # sweep over neighbors of known open ocean points + for iSweep in range(0, nCells): + + newFloodCellsThisSweep = 0 + mask = floodFill == 0 + for iCellOnCell in range(cellsOnCell.shape[1]): + neighbors = cellsOnCell[:, iCellOnCell] + fill = numpy.logical_and( + mask, + numpy.logical_and(neighbors >= 0, floodFill[neighbors] == 1)) + floodFill[fill] = 1 + newFloodCellsThisSweep += numpy.count_nonzero(fill) + + print(" Sweep {} new flood cells this sweep: {}".format( + iSweep, newFloodCellsThisSweep)) + + if (newFloodCellsThisSweep == 0): + break + + oceanMask = (floodFill == 1) + + print('oceanMask:', numpy.count_nonzero(oceanMask)) + + return oceanMask + + +def _revert_cells_with_connected_edges(oceanMask, landMask, landMaskDiagnostic, + regionCellMasks, removable, + nEdgesOnCell, cellsOnCell, nCells, + nSweeps): + print("Step 4: Searching for land-locked cells, step 3: revert cells with " + "connected active edges") + + nextCellsOnCell = numpy.zeros(cellsOnCell.shape, int) + prevCellsOnCell = numpy.zeros(cellsOnCell.shape, int) + + for iEdgeOnCell in range(nextCellsOnCell.shape[1]): + iP1 = numpy.mod(iEdgeOnCell + 1, nEdgesOnCell) + nextCellsOnCell[:, iEdgeOnCell] = \ + cellsOnCell[numpy.arange(nCells), iP1] + iM1 = numpy.mod(iEdgeOnCell - 1, nEdgesOnCell) + prevCellsOnCell[:, iEdgeOnCell] = \ + cellsOnCell[numpy.arange(nCells), iM1] + + for iSweep in range(nSweeps): + landMaskNew = numpy.array(landMask) + + # only remove a cell that was added in Step 2, + # _remove_cells_with_isolated_edges2 + mask = numpy.logical_and(removable, landMaskDiagnostic[:] == 3) + + valid = numpy.logical_and(mask.reshape(nCells, 1), + cellsOnCell >= 0) + oceanEdge = numpy.logical_and(valid, oceanMask[cellsOnCell]) + valid = numpy.logical_and(mask.reshape(nCells, 1), + nextCellsOnCell >= 0) + activeNextEdge = numpy.logical_and(valid, + landMask[nextCellsOnCell] == 0) + valid = numpy.logical_and(mask.reshape(nCells, 1), + prevCellsOnCell >= 0) + activePrevEdge = numpy.logical_and(valid, + landMask[prevCellsOnCell] == 0) + + reactivate = numpy.any( + numpy.logical_and( + oceanEdge, + numpy.logical_or(activePrevEdge, activeNextEdge)), axis=1) + + landLockedCounter = numpy.count_nonzero(reactivate) + if landLockedCounter > 0: + landMaskNew[reactivate] = 0 + regionCellMasks[reactivate, 0] = 0 + oceanMask[reactivate] = 1 + landMaskDiagnostic[reactivate] = 4 + + landMask = landMaskNew + print(" Sweep: {} Number of land-locked cells returned: {}".format( + iSweep + 1, landLockedCounter)) + if landLockedCounter == 0: + break + + return landMask + + +def _remove_file(fileName): + try: + os.remove(fileName) + except OSError: + pass diff --git a/ocean/coastline_alteration/add_land_locked_cells_to_mask.py b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py index 29f8bb845..9885f0b30 100755 --- a/ocean/coastline_alteration/add_land_locked_cells_to_mask.py +++ b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py @@ -9,18 +9,9 @@ from __future__ import absolute_import, division, print_function, \ unicode_literals -import os -import shutil -from netCDF4 import Dataset -import numpy as np import argparse - -def removeFile(fileName): - try: - os.remove(fileName) - except OSError: - pass +from mpas_tools.ocean.coastline_alteration import add_land_locked_cells_to_mask parser = \ @@ -45,228 +36,7 @@ def removeFile(fileName): required=False, type=int, default=10) args = parser.parse_args() -latitude_threshold_radians = args.latitude_threshold * 3.1415 / 180. - -# Obtain mesh variables -meshFile = Dataset(args.mesh_filename, "r") -nCells = len(meshFile.dimensions["nCells"]) -maxEdges = len(meshFile.dimensions["maxEdges"]) -cellsOnCell = meshFile.variables["cellsOnCell"][:, :] -nEdgesOnCell = meshFile.variables["nEdgesOnCell"][:] -latCell = meshFile.variables["latCell"][:] -lonCell = meshFile.variables["lonCell"][:] -meshFile.close() - -removeFile(args.output_mask_filename) -shutil.copyfile(args.input_mask_filename, args.output_mask_filename) - -# Obtain original cell mask from input file -inputMaskFile = Dataset(args.input_mask_filename, "r") -nRegions = len(inputMaskFile.dimensions["nRegions"]) -regionCellMasks = inputMaskFile.variables["regionCellMasks"][:, :] -# set landMask = flattened regionCellMasks -landMask = np.amax(regionCellMasks, axis=1) -inputMaskFile.close() - -# Open output file -outputMaskFile = Dataset(args.output_mask_filename, "a") -landMaskDiagnostic = outputMaskFile.createVariable( - "landMaskDiagnostic", "i", dimensions=("nCells")) - -print("Running add_land_locked_cells_to_mask.py. Total number of cells: " - "{}".format(nCells)) - -# use np.array, as simple = makes a pointer -landMaskNew = np.array(landMask) -activeEdgeSum = np.zeros(maxEdges, dtype="i") - -# Removable cells are ocean cells outside of latitude threshold -removableCellIndex = np.zeros(nCells, dtype="i") -nRemovableCells = 0 - -print("Step 1: Searching for land-locked cells. Remove cells that only have " - "isolated active edges.") -landLockedCounter = 0 -for iCell in range(nCells): - landMaskDiagnostic[iCell] = landMask[iCell] - # skip if outside latitude threshold or if this is already a land cell - if abs(latCell[iCell]) < latitude_threshold_radians or \ - landMask[iCell] == 1: - continue - removableCellIndex[nRemovableCells] = iCell - nRemovableCells += 1 - activeEdgeSum[:] = 0 - for iEdgeOnCell in range(nEdgesOnCell[iCell]): - # check if neighbor is an ocean cell (landMask=0) - # subtract 1 to convert 1-base to 0-base: - if landMask[cellsOnCell[iCell, iEdgeOnCell] - 1] == 0: - activeEdgeSum[iEdgeOnCell] += 1 - # % is modulo operator: - iP1 = (iEdgeOnCell + 1) % nEdgesOnCell[iCell] - activeEdgeSum[iP1] += 1 - - if np.amax(activeEdgeSum[0:nEdgesOnCell[iCell]]) == 1: - outputMaskFile['regionCellMasks'][iCell, 1] = 1 - landLockedCounter += 1 - landMaskNew[iCell] = 1 - landMaskDiagnostic[iCell] = 2 - -landMask[:] = landMaskNew[:] -print(" Number of landLocked cells: {}".format(landLockedCounter)) - -print("Step 2: Searching for land-locked cells. Remove cells that have any " - "isolated active edges.") -for iSweep in range(args.nSweeps): - landLockedCounter = 0 - for iRemovableCell in range(0, nRemovableCells): - iCell = removableCellIndex[iRemovableCell] - if landMask[iCell] == 1: - continue - for iEdgeOnCell in range(nEdgesOnCell[iCell]): - # check if neighbor is an ocean cell (landMask=0) - # subtract 1 to convert 1-base to 0-base: - if landMask[cellsOnCell[iCell, iEdgeOnCell] - 1] == 0: - # % is modulo operator: - iP1 = (iEdgeOnCell + 1) % nEdgesOnCell[iCell] - iM1 = (iEdgeOnCell - 1) % nEdgesOnCell[iCell] - # Is this neighbor's two neighbors to left and right land? - # if so, sum of masks is two. - # subtract 1 to convert 1-base to 0-base: - if (landMask[cellsOnCell[iCell, iP1] - 1] - + landMask[cellsOnCell[iCell, iM1] - 1]) == 2: - landLockedCounter += 1 - landMaskNew[iCell] = 1 - outputMaskFile['regionCellMasks'][iCell, 1] = 1 - landMaskDiagnostic[iCell] = 3 - # once we remove this cell, we can quit checking over edges - break - - landMask[:] = landMaskNew[:] - print(" Sweep: {} Number of landLocked cells removed: {}".format( - iSweep + 1, landLockedCounter)) - if landLockedCounter == 0: - break - -print("Step 3: Perform flood fill, starting from open ocean.") -floodFill = np.zeros(nCells, dtype="i") -floodableCellIndex = np.zeros(nCells, dtype="i") -nFloodableCells = 0 -floodFill[:] = -1 -d2r = 3.1415 / 180.0 - -# init flood fill to 0 for water, -1 for land, 1 for known open ocean regions -for iRemovableCell in range(0, nRemovableCells): - iCell = removableCellIndex[iRemovableCell] - if (landMaskDiagnostic[iCell] == 0): - floodFill[iCell] = 0 - if (latCell[iCell] > 84.0 * d2r # North Pole - # Arctic - or lonCell[iCell] > 160.0 * d2r - and lonCell[iCell] < 230.0 * d2r - and latCell[iCell] > 73.0 * d2r - # North Atlantic - or lonCell[iCell] > 315.0 * d2r - and lonCell[iCell] < 340.0 * d2r - and latCell[iCell] > 15.0 * d2r - and latCell[iCell] < 45.0 * d2r - # North Atlantic - or lonCell[iCell] > 290.0 * d2r - and lonCell[iCell] < 300.0 * d2r - and latCell[iCell] > 72.0 * d2r - and latCell[iCell] < 75.0 * d2r - # North Atlantic 2 - or lonCell[iCell] > 0.0 * d2r - and lonCell[iCell] < 10.0 * d2r - and latCell[iCell] > 70.0 * d2r - and latCell[iCell] < 75.0 * d2r - # North Pacific - or lonCell[iCell] > 150.0 * d2r - and lonCell[iCell] < 225.0 * d2r - and latCell[iCell] > 0.0 * d2r - and latCell[iCell] < 45.0 * d2r - # South Atlantic - or lonCell[iCell] > 0.0 * d2r - and lonCell[iCell] < 5.0 * d2r - and latCell[iCell] > -60.0 * d2r - and latCell[iCell] < 0.0 * d2r - # South Pacific - or lonCell[iCell] > 180.0 * d2r - and lonCell[iCell] < 280.0 * d2r - and latCell[iCell] > -60.0 * d2r - and latCell[iCell] < -10.0 * d2r - # Southern Ocean - or lonCell[iCell] > 0.0 * d2r - and lonCell[iCell] < 165.0 * d2r - and latCell[iCell] > -60.0 * d2r - and latCell[iCell] < -45.0 * d2r): - floodFill[iCell] = 1 - landMaskDiagnostic[iCell] = 5 # indicates seed region - else: - floodableCellIndex[nFloodableCells] = iCell - nFloodableCells += 1 -print(" Initial number of flood cells: {}".format(nFloodableCells)) - -# sweep over neighbors of known open ocean points -for iSweep in range(0, nCells): - newFloodCellsThisSweep = 0 - - for iFloodableCell in range(0, nFloodableCells): - iCell = floodableCellIndex[iFloodableCell] - if (floodFill[iCell] == 0): - - for iCellOnCellSweep in range(0, nEdgesOnCell[iCell]): - iCellNeighbor = cellsOnCell[iCell, iCellOnCellSweep] - 1 - - if (floodFill[iCellNeighbor] == 1): - floodFill[iCell] = 1 - newFloodCellsThisSweep += 1 - break - - print(" Sweep {} new flood cells this sweep: {}".format( - iSweep, newFloodCellsThisSweep)) - - if (newFloodCellsThisSweep == 0): - break - -oceanMask = np.zeros(nCells, dtype="i") -for iCell in range(0, nCells): - if (floodFill[iCell] == 1): - oceanMask[iCell] = 1 - -print("Step 4: Searching for land-locked cells, step 3: revert cells with " - "connected active edges") -for iSweep in range(args.nSweeps): - landLockedCounter = 0 - for iRemovableCell in range(0, nRemovableCells): - iCell = removableCellIndex[iRemovableCell] - # only remove a cell that was added in lats round (red cells) - if landMaskDiagnostic[iCell] == 3: - for iEdgeOnCell in range(nEdgesOnCell[iCell]): - # check if neighbor is an ocean cell (landMask=0) - # subtract 1 to convert 1-base to 0-base: - if oceanMask[cellsOnCell[iCell, iEdgeOnCell] - 1] == 1: - # % is modulo operator: - iP1 = (iEdgeOnCell + 1) % nEdgesOnCell[iCell] - iM1 = (iEdgeOnCell - 1) % nEdgesOnCell[iCell] - # Is either of this neighbor's two neighbors to left and - # right ocean? - # if so, sum of masks is two. - # subtract 1 to convert 1-base to 0-base: - if (landMask[cellsOnCell[iCell, iP1] - 1] == 0 - or landMask[cellsOnCell[iCell, iM1] - 1] == 0): - landLockedCounter += 1 - landMaskNew[iCell] = 0 - outputMaskFile['regionCellMasks'][iCell, 1] = 0 - landMaskDiagnostic[iCell] = 4 - oceanMask[iCell] = 1 - # once we remove this cell, we can quit checking over - # edges - break - - landMask[:] = landMaskNew[:] - print(" Sweep: {} Number of land-locked cells returned: {}".format( - iSweep + 1, landLockedCounter)) - if landLockedCounter == 0: - break -outputMaskFile.close() +add_land_locked_cells_to_mask(args.input_mask_filename, + args.output_mask_filename, args.mesh_filename, + args.latitude_threshold, args.nSweeps) From bde3e5db59074889a42ed7cb7bdb6da8dbb3f761 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 3 May 2019 23:03:50 +0200 Subject: [PATCH 133/180] Change add_land_locked_cells_to_mask to use xarray --- .../mpas_tools/ocean/coastline_alteration.py | 240 +++++++----------- .../add_land_locked_cells_to_mask.py | 55 ++-- 2 files changed, 129 insertions(+), 166 deletions(-) diff --git a/conda_package/mpas_tools/ocean/coastline_alteration.py b/conda_package/mpas_tools/ocean/coastline_alteration.py index b709995c9..5a654dda0 100644 --- a/conda_package/mpas_tools/ocean/coastline_alteration.py +++ b/conda_package/mpas_tools/ocean/coastline_alteration.py @@ -2,9 +2,7 @@ unicode_literals import numpy -from netCDF4 import Dataset -import os -import shutil +import xarray def add_critical_land_blockages(dsMask, dsBlockages): @@ -72,8 +70,7 @@ def widen_transect_edge_masks(dsMask, dsMesh, latitude_threshold=43.0): return dsMask -def add_land_locked_cells_to_mask(input_mask_filename, output_mask_filename, - mesh_filename, latitude_threshold=43.0, +def add_land_locked_cells_to_mask(dsMask, dsMesh, latitude_threshold=43.0, nSweeps=10): ''' Find ocean cells that are land-locked, and alter the cell mask so that they @@ -81,91 +78,80 @@ def add_land_locked_cells_to_mask(input_mask_filename, output_mask_filename, Parameters ---------- - input_mask_filename : str - Mask file that includes cell and edge masks. + dsMask : ``xarray.Dataset`` + A land-mask data set - output_mask_filename : str - Mask file that includes cell and edge masks. - - mesh_filename : str - MPAS Mesh filename. + dsMesh : ``xarray.Dataset`` + MPAS Mesh data set latitude_threshold : float, optional - Minimum latitude, in degrees, for transect widening. + Minimum latitude, in degrees, for transect widening nSweeps : int, optional - Maximum number of sweeps to search for land-locked cells. + Maximum number of sweeps to search for land-locked cells + + Returns + ------- + dsMask : ``xarray.Dataset`` + A copy of the land-mask data set with land-locked cells added to the + mask for the first region ''' - # Obtain mesh variables - meshFile = Dataset(mesh_filename, "r") - nCells = len(meshFile.dimensions["nCells"]) - cellsOnCell = meshFile.variables["cellsOnCell"][:, :] - 1 - nEdgesOnCell = meshFile.variables["nEdgesOnCell"][:] - latCell = numpy.rad2deg(meshFile.variables["latCell"][:]) - lonCell = numpy.rad2deg(meshFile.variables["lonCell"][:]) - meshFile.close() + dsMask = xarray.Dataset(dsMask) + dsMesh = dsMesh.copy(deep=True) + + landMask = dsMask.regionCellMasks.max(dim='nRegions') > 0 - _remove_file(output_mask_filename) - shutil.copyfile(input_mask_filename, output_mask_filename) + dsMask['landMaskDiagnostic'] = xarray.where(landMask, 1, 0) - # Obtain original cell mask from input file - inputMaskFile = Dataset(input_mask_filename, "r") - regionCellMasks = inputMaskFile.variables["regionCellMasks"][:, :] - # set landMask = flattened regionCellMasks - landMask = numpy.amax(regionCellMasks, axis=1) - inputMaskFile.close() + print("Running add_land_locked_cells_to_mask.py. Total number of cells: " + "{}".format(dsMesh.sizes['nCells'])) - # Open output file - outputMaskFile = Dataset(output_mask_filename, "a") - landMaskDiagnostic = outputMaskFile.createVariable( - "landMaskDiagnostic", "i", dimensions=("nCells")) + cellsOnCell = dsMesh.cellsOnCell - 1 + nEdgesOnCell = dsMesh.nEdgesOnCell - regionCellMasks = outputMaskFile['regionCellMasks'] + nextCellsOnCell = cellsOnCell.copy(deep=True) + prevCellsOnCell = cellsOnCell.copy(deep=True) + for iEdgeOnCell in range(nextCellsOnCell.shape[1]): + iP1 = numpy.mod(iEdgeOnCell + 1, nEdgesOnCell) + nextCellsOnCell[:, iEdgeOnCell] = cellsOnCell[:, iP1] + iM1 = numpy.mod(iEdgeOnCell - 1, nEdgesOnCell) + prevCellsOnCell[:, iEdgeOnCell] = cellsOnCell[:, iM1] - print("Running add_land_locked_cells_to_mask.py. Total number of cells: " - "{}".format(nCells)) + dsMesh['cellsOnCell'] = cellsOnCell + dsMesh['nextCellsOnCell'] = nextCellsOnCell + dsMesh['prevCellsOnCell'] = prevCellsOnCell + dsMesh['latCell'] = numpy.rad2deg(dsMesh.latCell) + dsMesh['lonCell'] = numpy.rad2deg(dsMesh.lonCell) landMask, removable = _remove_cells_with_isolated_edges1( - landMask, landMaskDiagnostic, regionCellMasks, latCell, nEdgesOnCell, - cellsOnCell, nCells, latitude_threshold) + dsMask, dsMesh, landMask, latitude_threshold) landMask = _remove_cells_with_isolated_edges2( - landMask, landMaskDiagnostic, regionCellMasks, removable, - nEdgesOnCell, cellsOnCell, nCells, nSweeps) - oceanMask = _flood_fill(landMask, landMaskDiagnostic, removable, lonCell, - latCell, nEdgesOnCell, cellsOnCell, nCells) + dsMask, dsMesh, landMask, removable, nSweeps) + oceanMask = _flood_fill(dsMask, dsMesh, landMask, removable) landMask = _revert_cells_with_connected_edges( - oceanMask, landMask, landMaskDiagnostic, regionCellMasks, removable, - nEdgesOnCell, cellsOnCell, nCells, nSweeps) - outputMaskFile.close() + dsMask, dsMesh, oceanMask, landMask, removable, nSweeps) + + return dsMask -def _remove_cells_with_isolated_edges1(landMask, landMaskDiagnostic, - regionCellMasks, latCell, nEdgesOnCell, - cellsOnCell, nCells, +def _remove_cells_with_isolated_edges1(dsMask, dsMesh, landMask, latitude_threshold): print("Step 1: Searching for land-locked cells. Remove cells that only " "have isolated active edges.") - landMaskNew = numpy.array(landMask) - - landMaskDiagnostic[:] = landMask - - removable = numpy.logical_and(numpy.abs(latCell) >= latitude_threshold, - landMask == 0) - - nextCellsOnCell = numpy.zeros(cellsOnCell.shape, int) - - for iEdgeOnCell in range(nextCellsOnCell.shape[1]): - iP1 = numpy.mod(iEdgeOnCell + 1, nEdgesOnCell) - nextCellsOnCell[:, iEdgeOnCell] = \ - cellsOnCell[numpy.arange(nCells), iP1] - - valid = numpy.logical_and(removable.reshape(nCells, 1), - cellsOnCell >= 0) + landMaskNew = landMask.copy(deep=True) active = numpy.logical_not(landMask) + removable = numpy.logical_and( + numpy.abs(dsMesh.latCell) >= latitude_threshold, active) + + cellsOnCell = dsMesh.cellsOnCell + valid = numpy.logical_and(removable, cellsOnCell >= 0) activeEdge = numpy.logical_and(valid, active[cellsOnCell]) + + nextCellsOnCell = dsMesh.nextCellsOnCell + valid = numpy.logical_and(removable, nextCellsOnCell >= 0) activeNextEdge = numpy.logical_and(valid, active[nextCellsOnCell]) # which vertices have adjacent active edges on this cell? @@ -178,50 +164,37 @@ def _remove_cells_with_isolated_edges1(landMask, landMaskDiagnostic, landMaskNew[noActiveAdjacentEdges] = 1 landLockedCounter = numpy.count_nonzero(noActiveAdjacentEdges) - regionCellMasks[:, 0] = numpy.maximum(regionCellMasks[:, 0], - noActiveAdjacentEdges) + dsMask.regionCellMasks[:, 0] = numpy.maximum(dsMask.regionCellMasks[:, 0], + 1*noActiveAdjacentEdges) - landMaskDiagnostic[noActiveAdjacentEdges] = 2 + dsMask.landMaskDiagnostic[noActiveAdjacentEdges] = 2 print(" Number of landLocked cells: {}".format(landLockedCounter)) return landMaskNew, removable -def _remove_cells_with_isolated_edges2(landMask, landMaskDiagnostic, - regionCellMasks, removable, - nEdgesOnCell, cellsOnCell, nCells, +def _remove_cells_with_isolated_edges2(dsMask, dsMesh, landMask, removable, nSweeps): print("Step 2: Searching for land-locked cells. Remove cells that have " "any isolated active edges.") - nextCellsOnCell = numpy.zeros(cellsOnCell.shape, int) - prevCellsOnCell = numpy.zeros(cellsOnCell.shape, int) - - for iEdgeOnCell in range(nextCellsOnCell.shape[1]): - iP1 = numpy.mod(iEdgeOnCell + 1, nEdgesOnCell) - nextCellsOnCell[:, iEdgeOnCell] = \ - cellsOnCell[numpy.arange(nCells), iP1] - iM1 = numpy.mod(iEdgeOnCell - 1, nEdgesOnCell) - prevCellsOnCell[:, iEdgeOnCell] = \ - cellsOnCell[numpy.arange(nCells), iM1] + cellsOnCell = dsMesh.cellsOnCell + nextCellsOnCell = dsMesh.nextCellsOnCell + prevCellsOnCell = dsMesh.prevCellsOnCell for iSweep in range(nSweeps): landLockedCounter = 0 - landMaskNew = numpy.array(landMask) - - mask = numpy.logical_and(removable, - landMask == 0) + landMaskNew = landMask.copy(deep=True) active = numpy.logical_not(landMask) - valid = numpy.logical_and(mask.reshape(nCells, 1), - cellsOnCell >= 0) + mask = numpy.logical_and(removable, active) + + valid = numpy.logical_and(mask, cellsOnCell >= 0) activeEdge = numpy.logical_and(valid, active[cellsOnCell]) - valid = numpy.logical_and(mask.reshape(nCells, 1), - nextCellsOnCell >= 0) + valid = numpy.logical_and(mask, nextCellsOnCell >= 0) activeNextEdge = numpy.logical_and(valid, active[nextCellsOnCell]) - valid = numpy.logical_and(mask.reshape(nCells, 1), - prevCellsOnCell >= 0) + valid = numpy.logical_and(mask, prevCellsOnCell >= 0) activePrevEdge = numpy.logical_and(valid, active[prevCellsOnCell]) # an edge is land-locked if it is active but neither neighbor is active @@ -235,8 +208,8 @@ def _remove_cells_with_isolated_edges2(landMask, landMaskDiagnostic, landLockedCounter = numpy.count_nonzero(landLockedCells) if landLockedCounter > 0: landMaskNew[landLockedCells] = 1 - regionCellMasks[landLockedCells, 0] = 1 - landMaskDiagnostic[landLockedCells] = 3 + dsMask.regionCellMasks[landLockedCells, 0] = 1 + dsMask.landMaskDiagnostic[landLockedCells] = 3 landMask = landMaskNew print(" Sweep: {} Number of landLocked cells removed: {}".format( @@ -247,20 +220,21 @@ def _remove_cells_with_isolated_edges2(landMask, landMaskDiagnostic, return landMask -def _flood_fill(landMask, landMaskDiagnostic, removable, lonCell, latCell, - nEdgesOnCell, cellsOnCell, nCells): +def _flood_fill(dsMask, dsMesh, landMask, removable): print("Step 3: Perform flood fill, starting from open ocean.") # init flood fill to 0 for water, -1 for land, 1 for known open ocean - floodFill = -1*numpy.ones(nCells, dtype="i") - mask = numpy.logical_and(removable, landMask == 0) - floodFill[mask] = 0 + floodFill = xarray.where( + numpy.logical_and(removable, numpy.logical_not(landMask)), 0, -1) - openOceanMask = numpy.zeros(nCells, bool) + latCell = dsMesh.latCell + lonCell = dsMesh.lonCell + + cellsOnCell = dsMesh.cellsOnCell # North Pole mask = latCell > 84.0 - openOceanMask = numpy.logical_or(openOceanMask, mask) + openOceanMask = mask # Arctic mask = numpy.logical_and( @@ -312,20 +286,22 @@ def _flood_fill(landMask, landMaskDiagnostic, removable, lonCell, latCell, nFloodableCells = numpy.count_nonzero(floodFill == 0) print(" Initial number of flood cells: {}".format(nFloodableCells)) - landMaskDiagnostic[floodFill == 1] = 5 + dsMask.landMaskDiagnostic[floodFill == 1] = 5 # sweep over neighbors of known open ocean points - for iSweep in range(0, nCells): + for iSweep in range(dsMesh.sizes['nCells']): newFloodCellsThisSweep = 0 mask = floodFill == 0 + cellIndices = numpy.nonzero(mask.values)[0] for iCellOnCell in range(cellsOnCell.shape[1]): - neighbors = cellsOnCell[:, iCellOnCell] - fill = numpy.logical_and( - mask, - numpy.logical_and(neighbors >= 0, floodFill[neighbors] == 1)) - floodFill[fill] = 1 - newFloodCellsThisSweep += numpy.count_nonzero(fill) + neighbors = cellsOnCell[cellIndices, iCellOnCell] + filledNeighbors = numpy.logical_and(neighbors >= 0, + floodFill[neighbors] == 1) + fillIndices = cellIndices[filledNeighbors.values] + if(len(fillIndices) > 0): + floodFill[fillIndices] = 1 + newFloodCellsThisSweep += len(fillIndices) print(" Sweep {} new flood cells this sweep: {}".format( iSweep, newFloodCellsThisSweep)) @@ -340,42 +316,29 @@ def _flood_fill(landMask, landMaskDiagnostic, removable, lonCell, latCell, return oceanMask -def _revert_cells_with_connected_edges(oceanMask, landMask, landMaskDiagnostic, - regionCellMasks, removable, - nEdgesOnCell, cellsOnCell, nCells, - nSweeps): +def _revert_cells_with_connected_edges(dsMask, dsMesh, oceanMask, landMask, + removable, nSweeps): print("Step 4: Searching for land-locked cells, step 3: revert cells with " "connected active edges") - nextCellsOnCell = numpy.zeros(cellsOnCell.shape, int) - prevCellsOnCell = numpy.zeros(cellsOnCell.shape, int) - - for iEdgeOnCell in range(nextCellsOnCell.shape[1]): - iP1 = numpy.mod(iEdgeOnCell + 1, nEdgesOnCell) - nextCellsOnCell[:, iEdgeOnCell] = \ - cellsOnCell[numpy.arange(nCells), iP1] - iM1 = numpy.mod(iEdgeOnCell - 1, nEdgesOnCell) - prevCellsOnCell[:, iEdgeOnCell] = \ - cellsOnCell[numpy.arange(nCells), iM1] + cellsOnCell = dsMesh.cellsOnCell + nextCellsOnCell = dsMesh.nextCellsOnCell + prevCellsOnCell = dsMesh.prevCellsOnCell for iSweep in range(nSweeps): landMaskNew = numpy.array(landMask) # only remove a cell that was added in Step 2, # _remove_cells_with_isolated_edges2 - mask = numpy.logical_and(removable, landMaskDiagnostic[:] == 3) + mask = numpy.logical_and(removable, dsMask.landMaskDiagnostic == 3) - valid = numpy.logical_and(mask.reshape(nCells, 1), - cellsOnCell >= 0) + notLand = numpy.logical_not(landMask) + valid = numpy.logical_and(mask, cellsOnCell >= 0) oceanEdge = numpy.logical_and(valid, oceanMask[cellsOnCell]) - valid = numpy.logical_and(mask.reshape(nCells, 1), - nextCellsOnCell >= 0) - activeNextEdge = numpy.logical_and(valid, - landMask[nextCellsOnCell] == 0) - valid = numpy.logical_and(mask.reshape(nCells, 1), - prevCellsOnCell >= 0) - activePrevEdge = numpy.logical_and(valid, - landMask[prevCellsOnCell] == 0) + valid = numpy.logical_and(mask, nextCellsOnCell >= 0) + activeNextEdge = numpy.logical_and(valid, notLand[nextCellsOnCell]) + valid = numpy.logical_and(mask, prevCellsOnCell >= 0) + activePrevEdge = numpy.logical_and(valid, notLand[prevCellsOnCell]) reactivate = numpy.any( numpy.logical_and( @@ -385,9 +348,9 @@ def _revert_cells_with_connected_edges(oceanMask, landMask, landMaskDiagnostic, landLockedCounter = numpy.count_nonzero(reactivate) if landLockedCounter > 0: landMaskNew[reactivate] = 0 - regionCellMasks[reactivate, 0] = 0 + dsMask.regionCellMasks[reactivate, 0] = 0 oceanMask[reactivate] = 1 - landMaskDiagnostic[reactivate] = 4 + dsMask.landMaskDiagnostic[reactivate] = 4 landMask = landMaskNew print(" Sweep: {} Number of land-locked cells returned: {}".format( @@ -396,10 +359,3 @@ def _revert_cells_with_connected_edges(oceanMask, landMask, landMaskDiagnostic, break return landMask - - -def _remove_file(fileName): - try: - os.remove(fileName) - except OSError: - pass diff --git a/ocean/coastline_alteration/add_land_locked_cells_to_mask.py b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py index 9885f0b30..799d211fe 100755 --- a/ocean/coastline_alteration/add_land_locked_cells_to_mask.py +++ b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py @@ -10,33 +10,40 @@ unicode_literals import argparse +import xarray from mpas_tools.ocean.coastline_alteration import add_land_locked_cells_to_mask +if __name__ == '__main__': + parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-f", "--input_mask_file", dest="input_mask_filename", + help="Mask file that includes cell and edge masks.", + metavar="INPUTMASKFILE", required=True) + parser.add_argument("-o", "--output_mask_file", + dest="output_mask_filename", + help="Mask file that includes cell and edge masks.", + metavar="OUTPUTMASKFILE", required=True) + parser.add_argument("-m", "--mesh_file", dest="mesh_filename", + help="MPAS Mesh filename.", metavar="MESHFILE", + required=True) + parser.add_argument("-l", "--latitude_threshold", + dest="latitude_threshold", + help="Minimum latitude, in degrees, for transect " + "widening.", + required=False, type=float, default=43.0) + parser.add_argument("-n", "--number_sweeps", dest="nSweeps", + help="Maximum number of sweeps to search for " + "land-locked cells.", + required=False, type=int, default=10) + args = parser.parse_args() -parser = \ - argparse.ArgumentParser(description=__doc__, - formatter_class=argparse.RawTextHelpFormatter) -parser.add_argument("-f", "--input_mask_file", dest="input_mask_filename", - help="Mask file that includes cell and edge masks.", - metavar="INPUTMASKFILE", required=True) -parser.add_argument("-o", "--output_mask_file", dest="output_mask_filename", - help="Mask file that includes cell and edge masks.", - metavar="OUTPUTMASKFILE", required=True) -parser.add_argument("-m", "--mesh_file", dest="mesh_filename", - help="MPAS Mesh filename.", metavar="MESHFILE", - required=True) -parser.add_argument("-l", "--latitude_threshold", dest="latitude_threshold", - help="Minimum latitude, in degrees, for transect " - "widening.", - required=False, type=float, default=43.0) -parser.add_argument("-n", "--number_sweeps", dest="nSweeps", - help="Maximum number of sweeps to search for land-locked " - "cells.", - required=False, type=int, default=10) -args = parser.parse_args() + dsMask = xarray.open_dataset(args.input_mask_filename) + dsMesh = xarray.open_dataset(args.mesh_filename) -add_land_locked_cells_to_mask(args.input_mask_filename, - args.output_mask_filename, args.mesh_filename, - args.latitude_threshold, args.nSweeps) + dsMask = add_land_locked_cells_to_mask(dsMask, dsMesh, + args.latitude_threshold, + args.nSweeps) + dsMask.to_netcdf(args.output_mask_filename) From 4efecfc01f2816fc50800b5e3af6d8872172c9ac Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 6 May 2019 09:20:40 +0200 Subject: [PATCH 134/180] Add test for non-periodic planar-hex meshes --- conda_package/mpas_tools/planar_hex.py | 6 ++++-- conda_package/recipe/meta.yaml | 2 ++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/conda_package/mpas_tools/planar_hex.py b/conda_package/mpas_tools/planar_hex.py index cd4b055c0..81f9fba77 100755 --- a/conda_package/mpas_tools/planar_hex.py +++ b/conda_package/mpas_tools/planar_hex.py @@ -444,9 +444,11 @@ def main(): help='Cells in y direction') parser.add_argument('--dc', dest='dc', type=float, required=True, help='Distance between cell centers in meters') - parser.add_argument('--npx', '--nonperiodic_x', action="store_true", + parser.add_argument('--npx', '--nonperiodic_x', dest='nonperiodic_x', + action="store_true", help='non-periodic in x direction') - parser.add_argument('--npy', '--nonperiodic_y', action="store_true", + parser.add_argument('--npy', '--nonperiodic_y', dest='nonperiodic_y', + action="store_true", help='non-periodic in y direction') parser.add_argument('-o', '--outFileName', dest='outFileName', type=str, required=False, default='grid.nc', diff --git a/conda_package/recipe/meta.yaml b/conda_package/recipe/meta.yaml index d68480ecd..d2cce64f1 100644 --- a/conda_package/recipe/meta.yaml +++ b/conda_package/recipe/meta.yaml @@ -52,6 +52,8 @@ test: - MpasMeshConverter.x mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc mesh.nc - MpasCellCuller.x mesh.nc culled_mesh.nc -m mesh_tools/mesh_conversion_tools/test/land_mask_final.nc - MpasMaskCreator.x mesh.nc arctic_mask.nc -f mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson + - planar_hex --nx=30 --ny=20 --dc=1000. --npx --npy --outFileName='nonperiodic_mesh_30x20_1km.nc' + - MpasCellCuller.x nonperiodic_mesh_30x20_1km.nc culled_nonperiodic_mesh_30x20_1km.nc - python -m pytest conda_package/mpas_tools/tests - mark_horns_for_culling.py --help - create_landice_grid_from_generic_MPAS_grid.py --help From eb25e775aaf5c8c4ad5b8df034daf057826e3176 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 8 May 2019 16:32:01 +0200 Subject: [PATCH 135/180] Add support for lists of masks for the conda-package cell culler --- conda_package/mpas_tools/conversion.py | 47 ++++++++++++++++---------- 1 file changed, 29 insertions(+), 18 deletions(-) diff --git a/conda_package/mpas_tools/conversion.py b/conda_package/mpas_tools/conversion.py index 6f43c05dd..c8bd9b44c 100644 --- a/conda_package/mpas_tools/conversion.py +++ b/conda_package/mpas_tools/conversion.py @@ -70,19 +70,21 @@ def cull(dsIn, dsMask=None, dsInverse=None, dsPreserve=None, Parameters ---------- - dsIn : ``xarray.Dataset``, optional + dsIn : ``xarray.Dataset`` A data set to cull, possibly with a ``cullCell`` field set to one where cells should be removed - dsMask : ``xarray.Dataset``, optional - A data set with region masks that are 1 where cells should be culled + dsMask : ``xarray.Dataset`` or list, optional + A data set (or data sets) with region masks that are 1 where cells + should be culled - dsInverse : ``xarray.Dataset``, optional - A data set with region masks that are 0 where cells should be culled + dsInverse : ``xarray.Dataset`` or list, optional + A data set (or data sets) with region masks that are 0 where cells + should be culled - dsPreserve : ``xarray.Dataset``, optional - A data set with region masks that are 1 where cells should *not* be - culled + dsPreserve : ``xarray.Dataset`` or list, optional + A data set (or data sets) with region masks that are 1 where cells + should *not* be culled graphInfoFileName : str, optional A file path (relative or absolute) where the graph file (typically @@ -104,19 +106,28 @@ def cull(dsIn, dsMask=None, dsInverse=None, dsPreserve=None, args = ['MpasCellCuller.x', inFileName, outFileName] if dsMask is not None: - fileName = '{}/mask.nc'.format(tempdir) - write_netcdf(dsMask, fileName) - args.extend(['-m', fileName]) + if not isinstance(dsMask, list): + dsMask = [dsMask] + for index, ds in enumerate(dsMask): + fileName = '{}/mask{}.nc'.format(tempdir, index) + write_netcdf(ds, fileName) + args.extend(['-m', fileName]) if dsInverse is not None: - fileName = '{}/inverse.nc'.format(tempdir) - write_netcdf(dsInverse, fileName) - args.extend(['-i', fileName]) + if not isinstance(dsInverse, list): + dsInverse = [dsInverse] + for index, ds in enumerate(dsInverse): + fileName = '{}/inverse{}.nc'.format(tempdir, index) + write_netcdf(ds, fileName) + args.extend(['-i', fileName]) if dsPreserve is not None: - fileName = '{}/preserve.nc'.format(tempdir) - write_netcdf(dsPreserve, fileName) - args.extend(['-p', fileName]) + if not isinstance(dsPreserve, list): + dsPreserve = [dsPreserve] + for index, ds in enumerate(dsPreserve): + fileName = '{}/preserve{}.nc'.format(tempdir, index) + write_netcdf(ds, fileName) + args.extend(['-p', fileName]) # go into the directory of the output file so the graph.info file ends # up in the same place @@ -189,4 +200,4 @@ def mask(dsMesh, fcMask=None, fcSeed=None, positiveLon=False): dsOut = xarray.open_dataset(outFileName) dsOut.load() - return dsOut \ No newline at end of file + return dsOut From a6a4ecfc5867a48f3a82e39e3567401104495ee8 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 10 May 2019 07:39:14 +0200 Subject: [PATCH 136/180] Add support for cmake to mesh_conversion_tools This is needed to be able to build on linux and OSX on conda-forge --- conda_package/recipe/build.sh | 14 +++++--------- conda_package/recipe/meta.yaml | 1 + mesh_tools/mesh_conversion_tools/CMakeLists.txt | 17 +++++++++++++++++ 3 files changed, 23 insertions(+), 9 deletions(-) create mode 100644 mesh_tools/mesh_conversion_tools/CMakeLists.txt diff --git a/conda_package/recipe/build.sh b/conda_package/recipe/build.sh index 3606720cb..f88a71de3 100644 --- a/conda_package/recipe/build.sh +++ b/conda_package/recipe/build.sh @@ -10,14 +10,10 @@ ${PYTHON} -m pip install . --no-deps -vv cd mesh_tools/mesh_conversion_tools -export CXX=${GXX} -export CFLAGS="-O3 -std=c++0x -fopenmp -lstdc++" - +# build and install JIGSAW +mkdir build +cd build +cmake -DCMAKE_INSTALL_PREFIX=${PREFIX} -DCMAKE_BUILD_TYPE=Release .. make - -install -d ${PREFIX}/bin/ -for exec in MpasMeshConverter.x MpasCellCuller.x MpasMaskCreator.x -do - install -m 755 ${exec} ${PREFIX}/bin/ -done +make install diff --git a/conda_package/recipe/meta.yaml b/conda_package/recipe/meta.yaml index d2cce64f1..caae64eee 100644 --- a/conda_package/recipe/meta.yaml +++ b/conda_package/recipe/meta.yaml @@ -17,6 +17,7 @@ build: requirements: build: - {{ compiler('cxx') }} + - cmake host: - python - hdf5 diff --git a/mesh_tools/mesh_conversion_tools/CMakeLists.txt b/mesh_tools/mesh_conversion_tools/CMakeLists.txt new file mode 100644 index 000000000..aeebcb0de --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/CMakeLists.txt @@ -0,0 +1,17 @@ +cmake_minimum_required (VERSION 3.0.2) +project (mesh_conversion_tools) + +include_directories (netcdf-cxx-4.2 .) + +set(SOURCES netcdf_utils.cpp netcdf-cxx-4.2/ncvalues.cpp netcdf-cxx-4.2/netcdf.cpp) + +add_executable (MpasMeshConverter.x mpas_mesh_converter.cpp ${SOURCES}) +target_link_libraries (MpasMeshConverter.x netcdf) + +add_executable (MpasCellCuller.x mpas_cell_culler.cpp ${SOURCES}) +target_link_libraries (MpasCellCuller.x netcdf) + +add_executable (MpasMaskCreator.x mpas_mask_creator.cpp jsoncpp.cpp ${SOURCES}) +target_link_libraries (MpasMaskCreator.x netcdf) + +install (TARGETS MpasMeshConverter.x MpasCellCuller.x MpasMaskCreator.x DESTINATION bin) From c88c86314b85ac0b0e30bd00e7f15104c078a309 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 10 May 2019 08:26:04 +0200 Subject: [PATCH 137/180] Rename conflicting name merge --> mergeOp This was causing issues with clang compilers --- .../mpas_cell_culler.cpp | 78 +++++++++---------- 1 file changed, 39 insertions(+), 39 deletions(-) diff --git a/mesh_tools/mesh_conversion_tools/mpas_cell_culler.cpp b/mesh_tools/mesh_conversion_tools/mpas_cell_culler.cpp index fc523cd55..87d62e9aa 100644 --- a/mesh_tools/mesh_conversion_tools/mpas_cell_culler.cpp +++ b/mesh_tools/mesh_conversion_tools/mpas_cell_culler.cpp @@ -16,7 +16,7 @@ using namespace std; -enum { merge, invert, preserve }; +enum { mergeOp, invertOp, preserveOp }; int nCells, nVertices, nEdges, vertexDegree, maxEdges; bool spherical, periodic; @@ -155,13 +155,13 @@ int main ( int argc, char *argv[] ) { for ( int i = 3; i < argc; i+=2 ) { foundOperation = false; if (strcmp(argv[i], "-m") == 0 ) { - mask_ops.push_back(static_cast(merge)); + mask_ops.push_back(static_cast(mergeOp)); foundOperation = true; } else if ( strcmp(argv[i], "-i") == 0 ){ - mask_ops.push_back(static_cast(invert)); + mask_ops.push_back(static_cast(invertOp)); foundOperation = true; } else if ( strcmp(argv[i], "-p") == 0 ){ - mask_ops.push_back(static_cast(preserve)); + mask_ops.push_back(static_cast(preserveOp)); foundOperation = true; } else if ( strcmp(argv[i], "-c") == 0 ){ outputMap = true; @@ -471,8 +471,8 @@ int mergeCellMasks(const string masksFilename, const int maskOp){/*{{{*/ } } - if ( maskOp == invert || maskOp == merge ) { - if ( maskOp == invert ) { + if ( maskOp == invertOp || maskOp == mergeOp ) { + if ( maskOp == invertOp ) { for (i = 0; i < nCells; i++){ flattenedMask[i] = (flattenedMask[i] + 1) % 2; } @@ -481,7 +481,7 @@ int mergeCellMasks(const string masksFilename, const int maskOp){/*{{{*/ for ( i = 0; i < nCells; i++ ){ cullCell[i] = max(cullCell[i], flattenedMask[i]); } - } else if ( maskOp == preserve ) { + } else if ( maskOp == preserveOp ) { for ( i = 0; i < nCells; i++ ) { if ( flattenedMask[i] && cullCell[i] ) { cullCell[i] = 0; @@ -614,21 +614,21 @@ int outputGridDimensions( const string outputFilename ){/*{{{*/ int nCellsNew, nEdgesNew, nVerticesNew; // Return this code to the OS in case of failure. static const int NC_ERR = 2; - + // set error behaviour (matches fortran behaviour) NcError err(NcError::verbose_nonfatal); - + // open the scvtmesh file NcFile grid(outputFilename.c_str(), NcFile::Replace, NULL, 0, NcFile::Offset64Bits); /* for(vec_int_itr = edgesOnCell.begin(); vec_int_itr != edgesOnCell.end(); ++vec_int_itr){ - maxEdges = std::max(maxEdges, (int)(*vec_int_itr).size()); + maxEdges = std::max(maxEdges, (int)(*vec_int_itr).size()); }*/ - + // check to see if the file was opened if(!grid.is_valid()) return NC_ERR; - + // define dimensions NcDim *nCellsDim; NcDim *nEdgesDim; @@ -652,7 +652,7 @@ int outputGridDimensions( const string outputFilename ){/*{{{*/ for(int iEdge = 0; iEdge < nEdges; iEdge++){ nEdgesNew += (edgeMap.at(iEdge) != -1); } - + // write dimensions if (!(nCellsDim = grid.add_dim( "nCells", nCellsNew) )) return NC_ERR; if (!(nEdgesDim = grid.add_dim( "nEdges", nEdgesNew) )) return NC_ERR; @@ -662,7 +662,7 @@ int outputGridDimensions( const string outputFilename ){/*{{{*/ if (!(timeDim = grid.add_dim( "Time") )) return NC_ERR; grid.close(); - + // file closed when file obj goes out of scope return 0; }/*}}}*/ @@ -675,10 +675,10 @@ int outputGridAttributes( const string inputFilename, const string outputFilenam * **********************************************************************/ // Return this code to the OS in case of failure. static const int NC_ERR = 2; - + // set error behaviour (matches fortran behaviour) NcError err(NcError::verbose_nonfatal); - + // open the scvtmesh file NcFile grid(outputFilename.c_str(), NcFile::Write); @@ -689,7 +689,7 @@ int outputGridAttributes( const string inputFilename, const string outputFilenam string history_str = ""; string id_str = ""; string parent_str = ""; - + // write attributes if(!spherical){ if (!(sphereAtt = grid.add_att( "on_a_sphere", "NO\0"))) return NC_ERR; @@ -734,7 +734,7 @@ int outputGridAttributes( const string inputFilename, const string outputFilenam if (!(id = grid.add_att( "file_id", id_str.c_str() ))) return NC_ERR; grid.close(); - + // file closed when file obj goes out of scope return 0; }/*}}}*/ @@ -749,16 +749,16 @@ int mapAndOutputGridCoordinates( const string inputFilename, const string output * **********************************************************************/ // Return this code to the OS in case of failure. static const int NC_ERR = 2; - + // set error behaviour (matches fortran behaviour) NcError err(NcError::verbose_nonfatal); - + // open the scvtmesh file NcFile grid(outputFilename.c_str(), NcFile::Write); - + // check to see if the file was opened if(!grid.is_valid()) return NC_ERR; - + // fetch dimensions NcDim *nCellsDim = grid.get_dim( "nCells" ); NcDim *nEdgesDim = grid.get_dim( "nEdges" ); @@ -774,7 +774,7 @@ int mapAndOutputGridCoordinates( const string inputFilename, const string output NcVar *idx2cellVar, *idx2edgeVar, *idx2vertexVar; int i, idx_map; - + double *xOld, *yOld, *zOld, *latOld, *lonOld; double *xNew, *yNew, *zNew, *latNew, *lonNew; int *idxToNew; @@ -833,7 +833,7 @@ int mapAndOutputGridCoordinates( const string inputFilename, const string output delete[] latNew; delete[] lonNew; delete[] idxToNew; - + //Build and write edge coordinate arrays xOld = new double[nEdges]; yOld = new double[nEdges]; @@ -964,16 +964,16 @@ int mapAndOutputCellFields( const string inputFilename, const string outputFilen * ***************************************************************/ // Return this code to the OS in case of failure. static const int NC_ERR = 2; - + // set error behaviour (matches fortran behaviour) NcError err(NcError::verbose_nonfatal); - + // open the scvtmesh file NcFile grid(outputFilename.c_str(), NcFile::Write); - + // check to see if the file was opened if(!grid.is_valid()) return NC_ERR; - + // fetch dimensions NcDim *nCellsDim = grid.get_dim( "nCells" ); NcDim *nEdgesDim = grid.get_dim( "nEdges" ); @@ -993,7 +993,7 @@ int mapAndOutputCellFields( const string inputFilename, const string outputFilen double *areaCellNew; int *tmp_arr_old, *nEdgesOnCellOld, *nEdgesOnCellNew; int *tmp_arr_new; - + tmp_arr_old = new int[nCells*maxEdges]; nEdgesOnCellOld = new int[nCells]; nEdgesOnCellNew = new int[nCellsNew]; @@ -1106,7 +1106,7 @@ int mapAndOutputCellFields( const string inputFilename, const string outputFilen delete[] tmp_arr_new; // Map areaCell - areaCellNew = new double[nCellsNew]; + areaCellNew = new double[nCellsNew]; for(int iCell = 0; iCell < nCells; iCell++){ if(cellMap.at(iCell) != -1){ @@ -1154,16 +1154,16 @@ int mapAndOutputEdgeFields( const string inputFilename, const string outputFilen * ***************************************************************/ // Return this code to the OS in case of failure. static const int NC_ERR = 2; - + // set error behaviour (matches fortran behaviour) NcError err(NcError::verbose_nonfatal); - + // open the scvtmesh file NcFile grid(outputFilename.c_str(), NcFile::Write); - + // check to see if the file was opened if(!grid.is_valid()) return NC_ERR; - + // fetch dimensions NcDim *nEdgesDim = grid.get_dim( "nEdges" ); NcDim *maxEdges2Dim = grid.get_dim( "maxEdges2" ); @@ -1265,7 +1265,7 @@ int mapAndOutputEdgeFields( const string inputFilename, const string outputFilen #endif } } - + if (!(voeVar = grid.add_var("verticesOnEdge", ncInt, nEdgesDim, twoDim))) return NC_ERR; if (!voeVar->put(verticesOnEdgeNew,nEdgesNew,2)) return NC_ERR; if (!(coeVar = grid.add_var("cellsOnEdge", ncInt, nEdgesDim, twoDim))) return NC_ERR; @@ -1394,16 +1394,16 @@ int mapAndOutputVertexFields( const string inputFilename, const string outputFil * ***************************************************************/ // Return this code to the OS in case of failure. static const int NC_ERR = 2; - + // set error behaviour (matches fortran behaviour) NcError err(NcError::verbose_nonfatal); - + // open the scvtmesh file NcFile grid(outputFilename.c_str(), NcFile::Write); - + // check to see if the file was opened if(!grid.is_valid()) return NC_ERR; - + // fetch dimensions NcDim *nVerticesDim = grid.get_dim( "nVertices" ); NcDim *vertexDegreeDim = grid.get_dim( "vertexDegree" ); From 8cfffeed4ab814091c234405fda7bdfbb766f673 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 10 May 2019 09:10:48 +0200 Subject: [PATCH 138/180] Update conversion test to use Agg backend Failing to build on Azure linux without this --- conda_package/mpas_tools/tests/test_conversion.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/conda_package/mpas_tools/tests/test_conversion.py b/conda_package/mpas_tools/tests/test_conversion.py index 6591e7669..4cce4d55a 100755 --- a/conda_package/mpas_tools/tests/test_conversion.py +++ b/conda_package/mpas_tools/tests/test_conversion.py @@ -2,6 +2,8 @@ from mpas_tools.conversion import convert, cull, mask from mpas_tools.io import write_netcdf +import matplotlib +matplotlib.use('Agg') from geometric_features import read_feature_collection import xarray From d46107bab2cf3ac012bb08c729f13fde91e8aa99 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 10 May 2019 19:11:50 +0200 Subject: [PATCH 139/180] Bump version number --- conda_package/docs/conf.py | 4 ++-- conda_package/recipe/meta.yaml | 2 +- conda_package/setup.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/conda_package/docs/conf.py b/conda_package/docs/conf.py index 0e8e5b05f..1255a17bd 100644 --- a/conda_package/docs/conf.py +++ b/conda_package/docs/conf.py @@ -63,9 +63,9 @@ # built documents. # # The short X.Y version. -version = u'0.0.1' +version = u'0.0.2' # The full version, including alpha/beta/rc tags. -release = u'0.0.1' +release = u'0.0.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/conda_package/recipe/meta.yaml b/conda_package/recipe/meta.yaml index caae64eee..803bc3e2b 100644 --- a/conda_package/recipe/meta.yaml +++ b/conda_package/recipe/meta.yaml @@ -1,5 +1,5 @@ {% set name = "mpas_tools" %} -{% set version = "0.0.1" %} +{% set version = "0.0.2" %} package: name: '{{ name|lower }}' diff --git a/conda_package/setup.py b/conda_package/setup.py index 713aa156b..019da4133 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages -version = '0.0.1' +version = '0.0.2' setup(name='mpas_tools', version=version, From 9c1e553d385f15cf140c87b5cced1b65364538e5 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 10 May 2019 13:24:58 -0600 Subject: [PATCH 140/180] Explicitly add -std=c++0x flag --- mesh_tools/mesh_conversion_tools/CMakeLists.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mesh_tools/mesh_conversion_tools/CMakeLists.txt b/mesh_tools/mesh_conversion_tools/CMakeLists.txt index aeebcb0de..c9312a12b 100644 --- a/mesh_tools/mesh_conversion_tools/CMakeLists.txt +++ b/mesh_tools/mesh_conversion_tools/CMakeLists.txt @@ -1,6 +1,8 @@ cmake_minimum_required (VERSION 3.0.2) project (mesh_conversion_tools) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") + include_directories (netcdf-cxx-4.2 .) set(SOURCES netcdf_utils.cpp netcdf-cxx-4.2/ncvalues.cpp netcdf-cxx-4.2/netcdf.cpp) From 7463f35361f6e433471c547621cc16c91ccdc582 Mon Sep 17 00:00:00 2001 From: "Riley X. Brady" Date: Tue, 14 May 2019 15:26:48 -0600 Subject: [PATCH 141/180] bugfix on time_series_file.close() location --- .../paraview_vtk_field_extractor.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index 58151e3b1..95a29e0c0 100755 --- a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -164,8 +164,6 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, else: nHyperSlabs += len(extra_dim_vals) - time_series_file.close() - any_var_has_time_dim = np.any(var_has_time_dim) if topo_dim is not None: @@ -176,6 +174,8 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, else: nTopoLevels = None + time_series_file.close() + try: os.makedirs(out_dir) except OSError: From 19355a892001e8b067a0241cf9dc4f66cb365b04 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 17 May 2019 10:00:06 +0200 Subject: [PATCH 142/180] Add an external script to conda-package planar_hex --- mesh_tools/planar_hex/mpas_tools | 1 + mesh_tools/planar_hex/planar_hex | 11 +++++++++++ 2 files changed, 12 insertions(+) create mode 120000 mesh_tools/planar_hex/mpas_tools create mode 100755 mesh_tools/planar_hex/planar_hex diff --git a/mesh_tools/planar_hex/mpas_tools b/mesh_tools/planar_hex/mpas_tools new file mode 120000 index 000000000..627733f3b --- /dev/null +++ b/mesh_tools/planar_hex/mpas_tools @@ -0,0 +1 @@ +../../conda_package/mpas_tools/ \ No newline at end of file diff --git a/mesh_tools/planar_hex/planar_hex b/mesh_tools/planar_hex/planar_hex new file mode 100755 index 000000000..0af1f7a8c --- /dev/null +++ b/mesh_tools/planar_hex/planar_hex @@ -0,0 +1,11 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +import re +import sys + +from mpas_tools.planar_hex import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) From 2bf75f52c7728f9f4269942e3529aa76a08f70c3 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Thu, 30 May 2019 14:00:28 -0600 Subject: [PATCH 143/180] Minor improvements after code review * Add error is mandatory command line arguments are missing * Add checks before inquiring optional dimensions * Clean up attribute merging * Add attribute with merge point metadata --- mesh_tools/merge_split_meshes/merge_grids.py | 54 +++++++++++++------- 1 file changed, 36 insertions(+), 18 deletions(-) diff --git a/mesh_tools/merge_split_meshes/merge_grids.py b/mesh_tools/merge_split_meshes/merge_grids.py index 12eebb91e..1ef7117b4 100755 --- a/mesh_tools/merge_split_meshes/merge_grids.py +++ b/mesh_tools/merge_split_meshes/merge_grids.py @@ -4,14 +4,10 @@ ''' import sys -import numpy as np import netCDF4 import argparse -import math -from collections import OrderedDict -import scipy.spatial -import time from datetime import datetime +import json #print "== Gathering information. (Invoke with --help for more details. All arguments are optional)\n" @@ -26,6 +22,10 @@ options = parser.parse_args() +if options.file1 == None: + sys.exit("Missing argument for name of file 1. Please add with -1 argument.") +if options.file2 == None: + sys.exit("Missing argument for name of file 2. Please add with -2 argument.") f1 = netCDF4.Dataset(options.file1) nCells1 = len(f1.dimensions['nCells']) @@ -39,12 +39,14 @@ nVertices2 = len(f2.dimensions['nVertices']) Time2= len(f2.dimensions['Time']) -if Time1 != Time2: - sys.exit("ERROR: The two files have different lengths of the Time dimension.") if len(f1.dimensions['vertexDegree']) != len(f2.dimensions['vertexDegree']): - sys.exit("ERROR: The two files have different lengths of the vertexDegree dimension.") -if len(f1.dimensions['nVertLevels']) != len(f2.dimensions['nVertLevels']): - sys.exit("ERROR: The two files have different lengths of the nVertLevels dimension.") + sys.exit("ERROR: The two files have different lengths of the vertexDegree dimension.") +# Check some other possible dimensions: +optionalDims = ('Time', 'nVertLevels', 'nVertInterfaces') +for dim in optionalDims: + if dim in f1.dimensions and dim in f2.dimensions: + if len(f1.dimensions[dim]) != len(f2.dimensions[dim]): + sys.exit("ERROR: The two files have different lengths of the {} dimension.".format(dim)) # Create new file @@ -62,10 +64,13 @@ maxEdges = max(len(f1.dimensions['maxEdges']), len(f2.dimensions['maxEdges'])) fout.createDimension('maxEdges', maxEdges) fout.createDimension('maxEdges2', maxEdges*2) -fout.createDimension('nVertLevels', len(f1.dimensions['nVertLevels'])) -fout.createDimension('nVertInterfaces', len(f1.dimensions['nVertInterfaces'])) -fout.createDimension('Time', size=None) # make unlimited dimension +for dim in optionalDims: + if dim in f1.dimensions and dim in f2.dimensions: + if dim == 'Time': + fout.createDimension('Time', size=None) # make unlimited dimension + else: + fout.createDimension(dim, len(f1.dimensions[dim])) # compare list of variables @@ -176,10 +181,23 @@ newVar[nVertices1:,:] = part2 -# add some needed attributes -fout.on_a_sphere = "NO" -fout.sphere_radius = 0.0 -fout.is_periodic = "NO" +# add some standard attributes +attrToCopy = ("on_a_sphere", "sphere_radius", "is_periodic") +for attr in attrToCopy: + if attr in f1.ncattrs() and attr in f2.ncattrs(): + if f1.getncattr(attr) == f2.getncattr(attr): + fout.setncattr(attr, f1.getncattr(attr)) + else: + print("Warning: Value for '{0}' global attribute differs between input files. '{0}' being skipped.".format(attr)) + else: + print("Warning: '{0}' global attribute not present in both input files. '{0}' being skipped.".format(attr)) +# Add merge info to allow exact splitting later +fout.merge_point = json.dumps({'nCells': nCells1, + 'nEdges': nEdges1, + 'nVertices': nVertices1, + 'maxEdges1': len(f1.dimensions['maxEdges']), + 'maxEdges2': len(f2.dimensions['maxEdges']) + }) # Update history attribute of netCDF file thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) setattr(fout, 'history', thiscommand ) @@ -187,5 +205,5 @@ f1.close() f2.close() -print('\nMerge completed.') +print('\nMerge completed to file {}.'.format(options.outFile)) From edd6d34e32319420a631d293d2f1967a47962604 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Wed, 29 May 2019 13:54:03 -0400 Subject: [PATCH 144/180] Add Tool to split 2 merged meshes into seperate files For splitting 2 MPAS non-contiguous meshes that were previously merged into a single file using `merge_grids.py` --- mesh_tools/merge_split_meshes/split_grids.py | 210 +++++++++++++++++++ 1 file changed, 210 insertions(+) create mode 100755 mesh_tools/merge_split_meshes/split_grids.py diff --git a/mesh_tools/merge_split_meshes/split_grids.py b/mesh_tools/merge_split_meshes/split_grids.py new file mode 100755 index 000000000..30ad7c2c5 --- /dev/null +++ b/mesh_tools/merge_split_meshes/split_grids.py @@ -0,0 +1,210 @@ +#!/usr/bin/env python +""" +Tool to split 2 previously merged MPAS non-contiguous meshes into separate files +""" + +import os +import sys +import json +import argparse + +from datetime import datetime + +from netCDF4 import Dataset + + +def parse_args(args=None): + + parser = argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + + parser.add_argument('infile', + help='Mesh file to split', metavar='MESHFILE') + + parser.add_argument('-1', '--outfile1', default='mesh1.nc', metavar='FILENAME', + help='File name for first mesh output \n(default: %(default)s)') + + parser.add_argument('-2', '--outfile2', default='mesh2.nc', metavar='FILENAME', + help='File name for second mesh output \n(default: %(default)s)') + + parser.add_argument('--nCells', type=int, + help='The number of cells in the first mesh') + + parser.add_argument('--nEdges', type=int, + help='The number of edges in the first mesh') + + parser.add_argument('--nVertices', type=int, + help='The number of vertices in the first mesh') + + return parser.parse_intermixed_args(args) + + +def split_grids(infile=None, outfile1=None, outfile2=None, + nCells=None, nEdges=None, nVertices=None, runner=None): + now = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + if not runner: + runner = '{}.split_grids(infile={}, outfile1={}, outfile2={}, nCells={},' \ + 'nEdges={}, nVertices={})'.format(os.path.splitext(__file__)[0], + infile, outfile1, outfile2, + nCells, nEdges, nVertices) + + merge_point_args_missing = (nCells is None, + nEdges is None, + nVertices is None) + + print('Opening {} to split'.format(infile)) + with Dataset(infile) as nc_in: + # NOTE: Because nCells, nEdges, and nVertices are optional arguments and + # the previous merge point can be specified in the mesh file, we + # need to do some complicated error handling. + merge_point_in_file = 'merge_point' in nc_in.ncattrs() + if not merge_point_in_file and any(merge_point_args_missing): + raise SystemExit('ERROR: Previous merge point under specified!\n' + ' nCells, nEdges, and nVertices options must all ' + 'be given, or merge_point global attribute must exist' + ' in {}'.format(infile)) + elif merge_point_in_file and not any(merge_point_args_missing): + print('Warning: command line arguments are overriding previous merge ' + 'point as specified in {} merge_point global' + ' attribute'.format(infile)) + elif merge_point_in_file: + if not all(merge_point_args_missing): + print('Warning: nCells, nEdges, and nVertices options must all ' + 'be given to override speification in {} merge_point global ' + 'attribute'.format(infile)) + try: + mp = json.loads(nc_in.merge_point) + except json.decoder.JSONDecodeError: + raise SystemExit('ERROR: {} merge_point global attribute is not valid JSON.\n' + ' merge_point: {}'.format(infile, nc_in.merge_point)) + + if {'nCells', 'nEdges', 'nVertices'} <= set(mp): + nCells = mp['nCells'] + nEdges = mp['nEdges'] + nVertices = mp['nVertices'] + else: + raise SystemExit('ERROR: merge_point global attribute of {} must ' + 'contain nCells, nEdges, and nVertices.\n' + ' merge_point: {}'.format(infile, mp)) + + print('Creating the mesh files:\n {}\n {}'.format( + outfile1, outfile2)) + with Dataset(outfile1, 'w', format="NETCDF3_CLASSIC") as mesh1, \ + Dataset(outfile2, 'w', format="NETCDF3_CLASSIC") as mesh2: + mesh1.createDimension('nCells', nCells) + mesh1.createDimension('nEdges', nEdges) + mesh1.createDimension('nVertices', nVertices) + mesh1.createDimension('TWO', 2) + mesh1.createDimension('vertexDegree', + nc_in.dimensions['vertexDegree'].size) + + mesh2.createDimension('nCells', nc_in.dimensions['nCells'].size - nCells) + mesh2.createDimension('nEdges', nc_in.dimensions['nEdges'].size - nEdges) + mesh2.createDimension('nVertices', nc_in.dimensions['nVertices'].size - nVertices) + mesh2.createDimension('TWO', 2) + mesh2.createDimension('vertexDegree', + nc_in.dimensions['vertexDegree'].size) + + if 'StrLen' in nc_in.dimensions: + mesh1.createDimension('StrLen', nc_in.dimensions['StrLen'].size) + mesh2.createDimension('StrLen', nc_in.dimensions['StrLen'].size) + + # FIXME: Technically could be different in each mesh. + mesh1.createDimension('maxEdges', nc_in.dimensions['maxEdges'].size) + mesh2.createDimension('maxEdges', nc_in.dimensions['maxEdges'].size) + mesh1.createDimension('maxEdges2', nc_in.dimensions['maxEdges2'].size) + mesh2.createDimension('maxEdges2', nc_in.dimensions['maxEdges2'].size) + + mesh1.createDimension('nVertLevels', nc_in.dimensions['nVertLevels'].size) + mesh1.createDimension('nVertInterfaces', nc_in.dimensions['nVertInterfaces'].size) + mesh1.createDimension('Time', size=None) # make unlimited + + mesh2.createDimension('nVertLevels', nc_in.dimensions['nVertLevels'].size) + mesh2.createDimension('nVertInterfaces', nc_in.dimensions['nVertInterfaces'].size) + mesh2.createDimension('Time', size=None) # make unlimited + + print('Splitting variable:') + for var in nc_in.variables: + print(' {}'.format(var)) + var_in = nc_in.variables[var] + + var1 = mesh1.createVariable(var, var_in.dtype, var_in.dimensions) + var2 = mesh2.createVariable(var, var_in.dtype, var_in.dimensions) + + slice1, slice2 = var_slice(var_in.dimensions, nc_in, nCells, nEdges, nVertices) + + var1[:] = nc_in.variables[var][slice1] + var2[:] = nc_in.variables[var][slice2] + + # Adjust the indexes + if var == 'indexToCellID': + var2[:] -= nCells + elif var == 'indexToEdgeID': + var2[:] -= nVertices + elif var == 'indexToEdgeID': + var2[:] -= nVertices + elif var in ['cellsOnCell', 'cellsOnEdge', 'cellsOnVertex']: + tmp = var2[...] + tmp[tmp > 0] -= nCells + var2[:] = tmp + elif var in ['edgesOnCell', 'edgesOnEdge', 'edgesOnVertex']: + tmp = var2[...] + tmp[tmp > 0] -= nEdges + var2[:] = tmp + elif var in ['verticesOnCell', 'verticesOnEdge']: + tmp = var2[...] + tmp[tmp > 0] -= nVertices + var2[:] = tmp + + mesh1.on_a_sphere = nc_in.on_a_sphere + mesh1.sphere_radius = nc_in.sphere_radius + mesh1.is_periodic = nc_in.is_periodic + + mesh2.on_a_sphere = nc_in.on_a_sphere + mesh2.sphere_radius = nc_in.sphere_radius + mesh2.is_periodic = nc_in.is_periodic + + run_command = '{}: {} \n'.format(now, runner) + if 'history' in nc_in.ncattrs(): + mesh1.history = maybe_encode(run_command + nc_in.history) + mesh2.history = maybe_encode(run_command + nc_in.history) + else: + mesh1.history = maybe_encode(run_command) + mesh2.history = maybe_encode(run_command) + + print('Split complete!') + + +def var_slice(dimensions, nc_in, nCells, nEdges, nVertices): + slice1 = () + slice2 = () + for dim in dimensions: + if dim == 'nCells': + slice1 += (slice(0, nCells),) + slice2 += (slice(nCells, nc_in.dimensions['nCells'].size),) + elif dim == 'nEdges': + slice1 += (slice(0, nEdges),) + slice2 += (slice(nEdges, nc_in.dimensions['nEdges'].size),) + elif dim == 'nVertices': + slice1 += (slice(0, nVertices),) + slice2 += (slice(nVertices, nc_in.dimensions['nVertices'].size),) + else: + slice1 += (slice(None),) + slice2 += (slice(None),) + + return slice1, slice2 + + +# NOTE: Python 2 and 3 string fun conflicting with NC_CHAR vs NC_STRING, see: +# https://github.com/Unidata/netcdf4-python/issues/529 +def maybe_encode(string, encoding='ascii'): + try: + return string.encode(encoding) + except UnicodeEncodeError: + return string + + +if __name__ == '__main__': + arguments = parse_args() + arguments.runner = ' '.join(sys.argv[:]) + split_grids(**vars(arguments)) From 9f5d83ede90c09d920fa64e2793ae756608590a0 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Sun, 2 Jun 2019 13:01:39 -0400 Subject: [PATCH 145/180] Updates to merge_grids and split_grids merge_grids: * Require required arguments * Now allows programmatic execution * Raises exceptions instead of sys.exit() for programmatic execution * Simplified some logic * Some changes for PEP8 * Harmonized variables/arguments/stdout with split_grids split_grids: * Now allows users to specify maxEdges for each output mesh * Now uses maxEdges merge_point specification for each merged mesh * Harmonized variables/arguments/stdout with merge_grids * All default behavior discussed in help text * Raises ValueErrors instead of SystemExit for programmatic execution --- mesh_tools/merge_split_meshes/merge_grids.py | 414 ++++++++++--------- mesh_tools/merge_split_meshes/split_grids.py | 67 ++- 2 files changed, 258 insertions(+), 223 deletions(-) diff --git a/mesh_tools/merge_split_meshes/merge_grids.py b/mesh_tools/merge_split_meshes/merge_grids.py index 1ef7117b4..4026b0c30 100755 --- a/mesh_tools/merge_split_meshes/merge_grids.py +++ b/mesh_tools/merge_split_meshes/merge_grids.py @@ -1,209 +1,219 @@ #!/usr/bin/env python -''' +""" Tool to merge 2 MPAS non-contiguous meshes together into a single file -''' +""" +import os import sys -import netCDF4 -import argparse -from datetime import datetime import json +import argparse +from datetime import datetime -#print "== Gathering information. (Invoke with --help for more details. All arguments are optional)\n" -parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) -parser.description = __doc__ -parser.add_argument("-1", dest="file1", help="name of file 1", metavar="FILENAME") -parser.add_argument("-2", dest="file2", help="name of file 2", metavar="FILENAME") -parser.add_argument("-o", dest="outFile", help="name of output file", default="merged_mpas.nc", metavar="FILENAME") -#for option in parser.option_list: -# if option.default != ("NO", "DEFAULT"): -# option.help += (" " if option.help else "") + "[default: %default]" -options = parser.parse_args() - - -if options.file1 == None: - sys.exit("Missing argument for name of file 1. Please add with -1 argument.") -if options.file2 == None: - sys.exit("Missing argument for name of file 2. Please add with -2 argument.") - -f1 = netCDF4.Dataset(options.file1) -nCells1 = len(f1.dimensions['nCells']) -nEdges1 = len(f1.dimensions['nEdges']) -nVertices1 = len(f1.dimensions['nVertices']) -Time1= len(f1.dimensions['Time']) - -f2 = netCDF4.Dataset(options.file2) -nCells2 = len(f2.dimensions['nCells']) -nEdges2 = len(f2.dimensions['nEdges']) -nVertices2 = len(f2.dimensions['nVertices']) -Time2= len(f2.dimensions['Time']) - -if len(f1.dimensions['vertexDegree']) != len(f2.dimensions['vertexDegree']): - sys.exit("ERROR: The two files have different lengths of the vertexDegree dimension.") -# Check some other possible dimensions: -optionalDims = ('Time', 'nVertLevels', 'nVertInterfaces') -for dim in optionalDims: - if dim in f1.dimensions and dim in f2.dimensions: - if len(f1.dimensions[dim]) != len(f2.dimensions[dim]): - sys.exit("ERROR: The two files have different lengths of the {} dimension.".format(dim)) - - -# Create new file -fout = netCDF4.Dataset(options.outFile, "w", format="NETCDF3_CLASSIC") - -# add merged dimensions -print("Adding merged dimensions to new file.") -fout.createDimension('nCells', nCells1+nCells2) -fout.createDimension('nEdges', nEdges1+nEdges2) -fout.createDimension('nVertices', nVertices1+nVertices2) -fout.createDimension('TWO', 2) -fout.createDimension('vertexDegree', len(f1.dimensions['vertexDegree'])) -if 'StrLen' in f1.dimensions: - fout.createDimension('StrLen', len(f1.dimensions['StrLen'])) -maxEdges = max(len(f1.dimensions['maxEdges']), len(f2.dimensions['maxEdges'])) -fout.createDimension('maxEdges', maxEdges) -fout.createDimension('maxEdges2', maxEdges*2) - -for dim in optionalDims: - if dim in f1.dimensions and dim in f2.dimensions: - if dim == 'Time': - fout.createDimension('Time', size=None) # make unlimited dimension - else: - fout.createDimension(dim, len(f1.dimensions[dim])) - - -# compare list of variables -vars1 = f1.variables -vars2 = f2.variables - -# only copy variables common to both files -for varname in vars1: - if varname in vars2: - print("Merging variable {}".format(varname)) - if f1.variables[varname].dimensions != f2.variables[varname].dimensions: - sys.exit("Error: Variable {} has different dimensions in the two files.").format(varname) - - theVar = f1.variables[varname] - newVar = fout.createVariable(varname, theVar.dtype, theVar.dimensions) - # (Assuming here that nCells, nEdges, and nVertices are never both in a variable) - # now assign value - if 'nCells' in theVar.dimensions: - ind = theVar.dimensions.index('nCells') - tup1 = () - tup2 = () - tupMerge = () - for ind in range(len(theVar.dimensions)): - if theVar.dimensions[ind] == 'nCells': - tup1 += (slice(0,nCells1),) - tup2 += (slice(0,nCells2),) - tupMerge += (slice(nCells1, nCells1+nCells2),) - else: - tup1 += (slice(None),) - tup2 += (slice(None),) - tupMerge += (slice(None),) - newVar[tup1] = f1.variables[varname][tup1] - newVar[tupMerge] = f2.variables[varname][tup2] - elif 'nEdges' in theVar.dimensions: - ind = theVar.dimensions.index('nEdges') - tup1 = () - tup2 = () - tupMerge = () - for ind in range(len(theVar.dimensions)): - if theVar.dimensions[ind] == 'nEdges': - tup1 += (slice(0,nEdges1),) - tup2 += (slice(0,nEdges2),) - tupMerge += (slice(nEdges1, nEdges1+nEdges2),) - else: - tup1 += (slice(None),) - tup2 += (slice(None),) - tupMerge += (slice(None),) - newVar[tup1] = f1.variables[varname][tup1] - newVar[tupMerge] = f2.variables[varname][tup2] - elif 'nVertices' in theVar.dimensions: - ind = theVar.dimensions.index('nVertices') - tup1 = () - tup2 = () - tupMerge = () - for ind in range(len(theVar.dimensions)): - if theVar.dimensions[ind] == 'nVertices': - tup1 += (slice(0,nVertices1),) - tup2 += (slice(0,nVertices2),) - tupMerge += (slice(nVertices1, nVertices1+nVertices2),) - else: - tup1 += (slice(None),) - tup2 += (slice(None),) - tupMerge += (slice(None),) - newVar[tup1] = f1.variables[varname][tup1] - newVar[tupMerge] = f2.variables[varname][tup2] - else: - # just take file 1's version - newVar[:] = theVar[:] - - # Indexes need adjusting: - if varname == "indexToCellID": - newVar[nCells1:] += nCells1 - elif varname == "indexToEdgeID": - newVar[nEdges1:] += nEdges1 - elif varname == "indexToVertexID": - newVar[nVertices1:] += nVertices1 - elif varname == "cellsOnEdge": - part2 = newVar[nEdges1:,:] - part2[part2>0] += nCells1 - newVar[nEdges1:,:] = part2 - elif varname == "edgesOnCell": - part2 = newVar[nCells1:,:] - part2[part2>0] += nEdges1 - newVar[nCells1:,:] = part2 - elif varname == "edgesOnEdge": - part2 = newVar[nEdges1:,:] - part2[part2>0] += nEdges1 - newVar[nEdges1:,:] = part2 - elif varname == "cellsOnCell": - part2 = newVar[nCells1:,:] - part2[part2>0] += nCells1 - newVar[nCells1:,:] = part2 - elif varname == "verticesOnCell": - part2 = newVar[nCells1:,:] - part2[part2>0] += nVertices1 - newVar[nCells1:,:] = part2 - elif varname == "verticesOnEdge": - part2 = newVar[nEdges1:,:] - part2[part2>0] += nVertices1 - newVar[nEdges1:,:] = part2 - elif varname == "edgesOnVertex": - part2 = newVar[nVertices1:,:] - part2[part2>0] += nEdges1 - newVar[nVertices1:,:] = part2 - elif varname == "cellsOnVertex": - part2 = newVar[nVertices1:,:] - part2[part2>0] += nCells1 - newVar[nVertices1:,:] = part2 - - -# add some standard attributes -attrToCopy = ("on_a_sphere", "sphere_radius", "is_periodic") -for attr in attrToCopy: - if attr in f1.ncattrs() and attr in f2.ncattrs(): - if f1.getncattr(attr) == f2.getncattr(attr): - fout.setncattr(attr, f1.getncattr(attr)) - else: - print("Warning: Value for '{0}' global attribute differs between input files. '{0}' being skipped.".format(attr)) - else: - print("Warning: '{0}' global attribute not present in both input files. '{0}' being skipped.".format(attr)) -# Add merge info to allow exact splitting later -fout.merge_point = json.dumps({'nCells': nCells1, - 'nEdges': nEdges1, - 'nVertices': nVertices1, - 'maxEdges1': len(f1.dimensions['maxEdges']), - 'maxEdges2': len(f2.dimensions['maxEdges']) - }) -# Update history attribute of netCDF file -thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) -setattr(fout, 'history', thiscommand ) -fout.close() -f1.close() -f2.close() - -print('\nMerge completed to file {}.'.format(options.outFile)) - +from netCDF4 import Dataset + + +def parse_args(args=None): + parser = argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + + parser.add_argument('infile1', metavar='FILENAME1', + help='File name for first mesh to merge') + + parser.add_argument('infile2', metavar='FILENAME2', + help='File name for second mesh to merge') + + parser.add_argument('-o', dest='outfile', default='merged_mesh.nc', metavar='FILENAME', + help='The merged mesh file') + + return parser.parse_intermixed_args(args) + + +def merge_grids(infile1=None, infile2=None, outfile=None, runner=None): + now = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + if not runner: + runner = '{}.merge_grids(infile1={}, infile2={}, outfile={})'.format( + os.path.splitext(__file__)[0], infile1, infile2, outfile) + + print('Opening files to merge:\n {}\n {}'.format(infile1, infile2)) + print('Creating the merged mesh file: {}'.format(outfile)) + with Dataset(infile1) as nc_in1, Dataset(infile2) as nc_in2, \ + Dataset(outfile, 'w', format="NETCDF3_CLASSIC") as mesh: + nCells1 = nc_in1.dimensions['nCells'].size + nEdges1 = nc_in1.dimensions['nEdges'].size + nVertices1 = nc_in1.dimensions['nVertices'].size + + nCells2 = nc_in2.dimensions['nCells'].size + nEdges2 = nc_in2.dimensions['nEdges'].size + nVertices2 = nc_in2.dimensions['nVertices'].size + + if nc_in1.dimensions['vertexDegree'].size != nc_in2.dimensions['vertexDegree'].size: + raise ValueError("ERROR: The two files have different lengths of the " + "vertexDegree dimension.") + + mesh.createDimension('nCells', nCells1 + nCells2) + mesh.createDimension('nEdges', nEdges1 + nEdges2) + mesh.createDimension('nVertices', nVertices1 + nVertices2) + mesh.createDimension('TWO', 2) + mesh.createDimension('vertexDegree', nc_in1.dimensions['vertexDegree'].size) + if 'StrLen' in nc_in1.dimensions: + mesh.createDimension('StrLen', nc_in1.dimensions['StrLen'].size) + maxEdges = max(nc_in1.dimensions['maxEdges'].size, nc_in2.dimensions['maxEdges'].size) + mesh.createDimension('maxEdges', maxEdges) + mesh.createDimension('maxEdges2', maxEdges * 2) + + optionalDims = ('Time', 'nVertLevels', 'nVertInterfaces') + for dim in optionalDims: + if dim in nc_in1.dimensions and dim in nc_in2.dimensions: + if len(nc_in1.dimensions[dim]) != len(nc_in2.dimensions[dim]): + raise ValueError("ERROR: The two files have different lengths " + "of the {} dimension.".format(dim)) + if dim == 'Time': + mesh.createDimension('Time', size=None) # make unlimited dimension + else: + mesh.createDimension(dim, nc_in1.dimensions[dim].size) + + print('Merging variable:') + vars1 = set(nc_in1.variables) + vars2 = set(nc_in2.variables) + # only copy variables common to both files + for varname in (vars1 & vars2): + print(' {}'.format(varname)) + if nc_in1.variables[varname].dimensions \ + != nc_in2.variables[varname].dimensions: + raise ValueError("ERROR: Variable {} has different dimensions in " + "the two files.".format(varname)) + + theVar = nc_in1.variables[varname] + newVar = mesh.createVariable(varname, theVar.dtype, theVar.dimensions) + # (Assuming here that nCells, nEdges, and nVertices are never both in a variable) + # now assign value + if 'nCells' in theVar.dimensions: + tup1 = () + tup2 = () + tupMerge = () + for ind in range(len(theVar.dimensions)): + if theVar.dimensions[ind] == 'nCells': + tup1 += (slice(0, nCells1),) + tup2 += (slice(0, nCells2),) + tupMerge += (slice(nCells1, nCells1 + nCells2),) + else: + tup1 += (slice(None),) + tup2 += (slice(None),) + tupMerge += (slice(None),) + newVar[tup1] = nc_in1.variables[varname][tup1] + newVar[tupMerge] = nc_in2.variables[varname][tup2] + elif 'nEdges' in theVar.dimensions: + tup1 = () + tup2 = () + tupMerge = () + for ind in range(len(theVar.dimensions)): + if theVar.dimensions[ind] == 'nEdges': + tup1 += (slice(0, nEdges1),) + tup2 += (slice(0, nEdges2),) + tupMerge += (slice(nEdges1, nEdges1 + nEdges2),) + else: + tup1 += (slice(None),) + tup2 += (slice(None),) + tupMerge += (slice(None),) + newVar[tup1] = nc_in1.variables[varname][tup1] + newVar[tupMerge] = nc_in2.variables[varname][tup2] + elif 'nVertices' in theVar.dimensions: + tup1 = () + tup2 = () + tupMerge = () + for ind in range(len(theVar.dimensions)): + if theVar.dimensions[ind] == 'nVertices': + tup1 += (slice(0, nVertices1),) + tup2 += (slice(0, nVertices2),) + tupMerge += (slice(nVertices1, nVertices1 + nVertices2),) + else: + tup1 += (slice(None),) + tup2 += (slice(None),) + tupMerge += (slice(None),) + newVar[tup1] = nc_in1.variables[varname][tup1] + newVar[tupMerge] = nc_in2.variables[varname][tup2] + else: + # just take file 1's version + newVar[:] = theVar[:] + + # Indexes need adjusting: + if varname == "indexToCellID": + newVar[nCells1:] += nCells1 + elif varname == "indexToEdgeID": + newVar[nEdges1:] += nEdges1 + elif varname == "indexToVertexID": + newVar[nVertices1:] += nVertices1 + elif varname == "cellsOnEdge": + part2 = newVar[nEdges1:, :] + part2[part2 > 0] += nCells1 + newVar[nEdges1:, :] = part2 + elif varname == "edgesOnCell": + part2 = newVar[nCells1:, :] + part2[part2 > 0] += nEdges1 + newVar[nCells1:, :] = part2 + elif varname == "edgesOnEdge": + part2 = newVar[nEdges1:, :] + part2[part2 > 0] += nEdges1 + newVar[nEdges1:, :] = part2 + elif varname == "cellsOnCell": + part2 = newVar[nCells1:, :] + part2[part2 > 0] += nCells1 + newVar[nCells1:, :] = part2 + elif varname == "verticesOnCell": + part2 = newVar[nCells1:, :] + part2[part2 > 0] += nVertices1 + newVar[nCells1:, :] = part2 + elif varname == "verticesOnEdge": + part2 = newVar[nEdges1:, :] + part2[part2 > 0] += nVertices1 + newVar[nEdges1:, :] = part2 + elif varname == "edgesOnVertex": + part2 = newVar[nVertices1:, :] + part2[part2 > 0] += nEdges1 + newVar[nVertices1:, :] = part2 + elif varname == "cellsOnVertex": + part2 = newVar[nVertices1:, :] + part2[part2 > 0] += nCells1 + newVar[nVertices1:, :] = part2 + + attrToCopy = ("on_a_sphere", "sphere_radius", "is_periodic") + for attr in attrToCopy: + if attr in nc_in1.ncattrs() and attr in nc_in2.ncattrs(): + if nc_in1.getncattr(attr) == nc_in2.getncattr(attr): + mesh.setncattr(attr, nc_in1.getncattr(attr)) + else: + print( + "Warning: Value for '{0}' global attribute differs between " + "input files. '{0}' being skipped.".format(attr)) + else: + print("Warning: '{0}' global attribute not present in both input " + "files. '{0}' being skipped.".format(attr)) + + # Add merge info to allow exact splitting later + mesh.merge_point = json.dumps({'nCells': nCells1, + 'nEdges': nEdges1, + 'nVertices': nVertices1, + 'maxEdges1': nc_in1.dimensions['maxEdges'].size, + 'maxEdges2': nc_in2.dimensions['maxEdges'].size + }) + + run_command = "{}: {} \n".format(now, runner) + mesh.history = maybe_encode(run_command) + + print('Merge complete! Output file: {}.'.format(outfile)) + + +# NOTE: Python 2 and 3 string fun conflicting with NC_CHAR vs NC_STRING, see: +# https://github.com/Unidata/netcdf4-python/issues/529 +def maybe_encode(string, encoding='ascii'): + try: + return string.encode(encoding) + except UnicodeEncodeError: + return string + + +if __name__ == '__main__': + arguments = parse_args() + arguments.runner = ' '.join(sys.argv[:]) + merge_grids(**vars(arguments)) diff --git a/mesh_tools/merge_split_meshes/split_grids.py b/mesh_tools/merge_split_meshes/split_grids.py index 30ad7c2c5..c13e7180e 100755 --- a/mesh_tools/merge_split_meshes/split_grids.py +++ b/mesh_tools/merge_split_meshes/split_grids.py @@ -14,12 +14,11 @@ def parse_args(args=None): - parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) - parser.add_argument('infile', - help='Mesh file to split', metavar='MESHFILE') + parser.add_argument('infile', metavar='MESHFILE', + help='Mesh file to split') parser.add_argument('-1', '--outfile1', default='mesh1.nc', metavar='FILENAME', help='File name for first mesh output \n(default: %(default)s)') @@ -28,19 +27,31 @@ def parse_args(args=None): help='File name for second mesh output \n(default: %(default)s)') parser.add_argument('--nCells', type=int, - help='The number of cells in the first mesh') + help='The number of cells in the first mesh \n' + '(default: the value specified in MESHFILE global ' + 'attribute merge_point)') parser.add_argument('--nEdges', type=int, - help='The number of edges in the first mesh') + help='The number of edges in the first mesh \n' + '(default: the value specified in MESHFILE global ' + 'attribute merge_point)') parser.add_argument('--nVertices', type=int, - help='The number of vertices in the first mesh') + help='The number of vertices in the first mesh \n' + '(default: the value specified in MESHFILE global ' + 'attribute merge_point)') + + parser.add_argument('--maxEdges', type=int, nargs=2, metavar=('MAXEDGES1', 'MAXEDGES2'), + help='The number of maxEdges in each mesh \n' + '(default: the value specified in MESHFILE global ' + 'attribute merge_point\n OR: will use MESHFILE ' + 'maxEdges dimension and assume same for both)') return parser.parse_intermixed_args(args) def split_grids(infile=None, outfile1=None, outfile2=None, - nCells=None, nEdges=None, nVertices=None, runner=None): + nCells=None, nEdges=None, nVertices=None, maxEdges=None, runner=None): now = datetime.now().strftime("%a %b %d %H:%M:%S %Y") if not runner: runner = '{}.split_grids(infile={}, outfile1={}, outfile2={}, nCells={},' \ @@ -59,10 +70,10 @@ def split_grids(infile=None, outfile1=None, outfile2=None, # need to do some complicated error handling. merge_point_in_file = 'merge_point' in nc_in.ncattrs() if not merge_point_in_file and any(merge_point_args_missing): - raise SystemExit('ERROR: Previous merge point under specified!\n' + raise ValueError('ERROR: Previous merge point under specified!\n' ' nCells, nEdges, and nVertices options must all ' - 'be given, or merge_point global attribute must exist' - ' in {}'.format(infile)) + 'be given, or merge_point global attribute must exist ' + 'in {}'.format(infile)) elif merge_point_in_file and not any(merge_point_args_missing): print('Warning: command line arguments are overriding previous merge ' 'point as specified in {} merge_point global' @@ -75,17 +86,20 @@ def split_grids(infile=None, outfile1=None, outfile2=None, try: mp = json.loads(nc_in.merge_point) except json.decoder.JSONDecodeError: - raise SystemExit('ERROR: {} merge_point global attribute is not valid JSON.\n' + raise ValueError('ERROR: {} merge_point global attribute is not valid JSON.\n' ' merge_point: {}'.format(infile, nc_in.merge_point)) - if {'nCells', 'nEdges', 'nVertices'} <= set(mp): + mp_keyset = set(mp) + if {'nCells', 'nEdges', 'nVertices'} <= mp_keyset: nCells = mp['nCells'] nEdges = mp['nEdges'] nVertices = mp['nVertices'] else: - raise SystemExit('ERROR: merge_point global attribute of {} must ' + raise ValueError('ERROR: merge_point global attribute of {} must ' 'contain nCells, nEdges, and nVertices.\n' ' merge_point: {}'.format(infile, mp)) + if {'maxEdges1', 'maxEdges2'} <= mp_keyset: + maxEdges = [mp['maxEdges1'], mp['maxEdges2']] print('Creating the mesh files:\n {}\n {}'.format( outfile1, outfile2)) @@ -109,11 +123,15 @@ def split_grids(infile=None, outfile1=None, outfile2=None, mesh1.createDimension('StrLen', nc_in.dimensions['StrLen'].size) mesh2.createDimension('StrLen', nc_in.dimensions['StrLen'].size) - # FIXME: Technically could be different in each mesh. - mesh1.createDimension('maxEdges', nc_in.dimensions['maxEdges'].size) - mesh2.createDimension('maxEdges', nc_in.dimensions['maxEdges'].size) - mesh1.createDimension('maxEdges2', nc_in.dimensions['maxEdges2'].size) - mesh2.createDimension('maxEdges2', nc_in.dimensions['maxEdges2'].size) + if maxEdges is None: + maxEdges = [nc_in.dimensions['maxEdges'].size, + nc_in.dimensions['maxEdges'].size] + + mesh1.createDimension('maxEdges', maxEdges[0]) + mesh1.createDimension('maxEdges2', maxEdges[0] * 2) + + mesh2.createDimension('maxEdges', maxEdges[1]) + mesh2.createDimension('maxEdges2', maxEdges[1] * 2) mesh1.createDimension('nVertLevels', nc_in.dimensions['nVertLevels'].size) mesh1.createDimension('nVertInterfaces', nc_in.dimensions['nVertInterfaces'].size) @@ -131,7 +149,8 @@ def split_grids(infile=None, outfile1=None, outfile2=None, var1 = mesh1.createVariable(var, var_in.dtype, var_in.dimensions) var2 = mesh2.createVariable(var, var_in.dtype, var_in.dimensions) - slice1, slice2 = var_slice(var_in.dimensions, nc_in, nCells, nEdges, nVertices) + slice1, slice2 = var_slice(var_in.dimensions, nc_in, + nCells, nEdges, nVertices, maxEdges) var1[:] = nc_in.variables[var][slice1] var2[:] = nc_in.variables[var][slice2] @@ -172,10 +191,10 @@ def split_grids(infile=None, outfile1=None, outfile2=None, mesh1.history = maybe_encode(run_command) mesh2.history = maybe_encode(run_command) - print('Split complete!') + print('Split complete! Mesh files:\n {}\n {}'.format(outfile1, outfile2)) -def var_slice(dimensions, nc_in, nCells, nEdges, nVertices): +def var_slice(dimensions, nc_in, nCells, nEdges, nVertices, maxEdges): slice1 = () slice2 = () for dim in dimensions: @@ -188,6 +207,12 @@ def var_slice(dimensions, nc_in, nCells, nEdges, nVertices): elif dim == 'nVertices': slice1 += (slice(0, nVertices),) slice2 += (slice(nVertices, nc_in.dimensions['nVertices'].size),) + elif dim == 'maxEdges': + slice1 += (slice(0, maxEdges[0]),) + slice2 += (slice(0, maxEdges[1]),) + elif dim == 'maxEdges2': + slice1 += (slice(0, maxEdges[0]*2),) + slice2 += (slice(0, maxEdges[1]*2),) else: slice1 += (slice(None),) slice2 += (slice(None),) From f1d192d862210bfc6dcaab7638ba6e01cb6bfe44 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Tue, 4 Jun 2019 09:46:43 -0600 Subject: [PATCH 146/180] Expand on typical usage in help comment --- mesh_tools/merge_split_meshes/split_grids.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/mesh_tools/merge_split_meshes/split_grids.py b/mesh_tools/merge_split_meshes/split_grids.py index c13e7180e..dce4f189a 100755 --- a/mesh_tools/merge_split_meshes/split_grids.py +++ b/mesh_tools/merge_split_meshes/split_grids.py @@ -1,6 +1,11 @@ #!/usr/bin/env python """ -Tool to split 2 previously merged MPAS non-contiguous meshes into separate files +Tool to split 2 previously merged MPAS non-contiguous meshes into separate files. +Typical usage is: + split_grids.py -1 outfile1.nc -2 outfile2.nc infile +The optional arguments for nCells, nEdges, nVertices, and maxEdges should +generally not be required as this information is saved in the combined mesh file +as global attributes by the merge_grids.py script. """ import os From 622583ea6534c10bda6cb192dff82689a0c7b0a8 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Tue, 4 Jun 2019 09:47:06 -0600 Subject: [PATCH 147/180] Continue with warning if desired global attributes are missing --- mesh_tools/merge_split_meshes/split_grids.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/mesh_tools/merge_split_meshes/split_grids.py b/mesh_tools/merge_split_meshes/split_grids.py index dce4f189a..32390a528 100755 --- a/mesh_tools/merge_split_meshes/split_grids.py +++ b/mesh_tools/merge_split_meshes/split_grids.py @@ -180,13 +180,14 @@ def split_grids(infile=None, outfile1=None, outfile2=None, tmp[tmp > 0] -= nVertices var2[:] = tmp - mesh1.on_a_sphere = nc_in.on_a_sphere - mesh1.sphere_radius = nc_in.sphere_radius - mesh1.is_periodic = nc_in.is_periodic - - mesh2.on_a_sphere = nc_in.on_a_sphere - mesh2.sphere_radius = nc_in.sphere_radius - mesh2.is_periodic = nc_in.is_periodic + attrToCopy = ("on_a_sphere", "sphere_radius", "is_periodic") + for attr in attrToCopy: + if attr in nc_in.ncattrs(): + mesh1.setncattr(attr, nc_in.getncattr(attr)) + mesh2.setncattr(attr, nc_in.getncattr(attr)) + else: + print("Warning: '{0}' global attribute not present in input " + "file. '{0}' will not be added to the two output files.".format(attr)) run_command = '{}: {} \n'.format(now, runner) if 'history' in nc_in.ncattrs(): From c71c096d489fd4d005f1415f7550ddc3e0ab9be5 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Tue, 4 Jun 2019 09:51:43 -0600 Subject: [PATCH 148/180] Fix bugs in edges/vertex index adjustment for file2 --- mesh_tools/merge_split_meshes/split_grids.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mesh_tools/merge_split_meshes/split_grids.py b/mesh_tools/merge_split_meshes/split_grids.py index 32390a528..5009d5cff 100755 --- a/mesh_tools/merge_split_meshes/split_grids.py +++ b/mesh_tools/merge_split_meshes/split_grids.py @@ -164,8 +164,8 @@ def split_grids(infile=None, outfile1=None, outfile2=None, if var == 'indexToCellID': var2[:] -= nCells elif var == 'indexToEdgeID': - var2[:] -= nVertices - elif var == 'indexToEdgeID': + var2[:] -= nEdges + elif var == 'indexToVertexID': var2[:] -= nVertices elif var in ['cellsOnCell', 'cellsOnEdge', 'cellsOnVertex']: tmp = var2[...] From 895307a91635a40ff3b3db720e56b13163359ee5 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Tue, 4 Jun 2019 13:53:24 -0400 Subject: [PATCH 149/180] Prep for conda package and some PEP8 --- mesh_tools/merge_split_meshes/merge_grids.py | 6 ++++- mesh_tools/merge_split_meshes/split_grids.py | 23 ++++++++++++-------- 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/mesh_tools/merge_split_meshes/merge_grids.py b/mesh_tools/merge_split_meshes/merge_grids.py index 4026b0c30..71895224b 100755 --- a/mesh_tools/merge_split_meshes/merge_grids.py +++ b/mesh_tools/merge_split_meshes/merge_grids.py @@ -213,7 +213,11 @@ def maybe_encode(string, encoding='ascii'): return string -if __name__ == '__main__': +def main(): arguments = parse_args() arguments.runner = ' '.join(sys.argv[:]) merge_grids(**vars(arguments)) + + +if __name__ == '__main__': + main() diff --git a/mesh_tools/merge_split_meshes/split_grids.py b/mesh_tools/merge_split_meshes/split_grids.py index 5009d5cff..0ed7ecb25 100755 --- a/mesh_tools/merge_split_meshes/split_grids.py +++ b/mesh_tools/merge_split_meshes/split_grids.py @@ -180,14 +180,15 @@ def split_grids(infile=None, outfile1=None, outfile2=None, tmp[tmp > 0] -= nVertices var2[:] = tmp - attrToCopy = ("on_a_sphere", "sphere_radius", "is_periodic") - for attr in attrToCopy: - if attr in nc_in.ncattrs(): - mesh1.setncattr(attr, nc_in.getncattr(attr)) - mesh2.setncattr(attr, nc_in.getncattr(attr)) - else: - print("Warning: '{0}' global attribute not present in input " - "file. '{0}' will not be added to the two output files.".format(attr)) + attr_to_copy = ("on_a_sphere", "sphere_radius", "is_periodic") + for attr in attr_to_copy: + if attr in nc_in.ncattrs(): + mesh1.setncattr(attr, nc_in.getncattr(attr)) + mesh2.setncattr(attr, nc_in.getncattr(attr)) + else: + print("Warning: '{0}' global attribute not present in input " + "file. '{0}' will not be added to the two output " + "files.".format(attr)) run_command = '{}: {} \n'.format(now, runner) if 'history' in nc_in.ncattrs(): @@ -235,7 +236,11 @@ def maybe_encode(string, encoding='ascii'): return string -if __name__ == '__main__': +def main(): arguments = parse_args() arguments.runner = ' '.join(sys.argv[:]) split_grids(**vars(arguments)) + + +if __name__ == '__main__': + main() From 2f698579eddf21e29121c6e579af51875ac49a47 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Tue, 4 Jun 2019 13:55:36 -0400 Subject: [PATCH 150/180] Move merge and split grids into conda package --- conda_package/mpas_tools/mesh/__init__.py | 0 .../merge_grids.py => conda_package/mpas_tools/mesh/merge.py | 0 .../split_grids.py => conda_package/mpas_tools/mesh/split.py | 0 conda_package/setup.py | 4 +++- 4 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 conda_package/mpas_tools/mesh/__init__.py rename mesh_tools/merge_split_meshes/merge_grids.py => conda_package/mpas_tools/mesh/merge.py (100%) rename mesh_tools/merge_split_meshes/split_grids.py => conda_package/mpas_tools/mesh/split.py (100%) diff --git a/conda_package/mpas_tools/mesh/__init__.py b/conda_package/mpas_tools/mesh/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/mesh_tools/merge_split_meshes/merge_grids.py b/conda_package/mpas_tools/mesh/merge.py similarity index 100% rename from mesh_tools/merge_split_meshes/merge_grids.py rename to conda_package/mpas_tools/mesh/merge.py diff --git a/mesh_tools/merge_split_meshes/split_grids.py b/conda_package/mpas_tools/mesh/split.py similarity index 100% rename from mesh_tools/merge_split_meshes/split_grids.py rename to conda_package/mpas_tools/mesh/split.py diff --git a/conda_package/setup.py b/conda_package/setup.py index 019da4133..bd0c522f0 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -41,4 +41,6 @@ install_requires=['numpy', 'xarray', 'netCDF4', 'pyevtk'], entry_points={'console_scripts': ['planar_hex = mpas_tools.planar_hex:main', - 'translate_planar_grid = mpas_tools.translate:main']}) + 'translate_planar_grid = mpas_tools.translate:main', + 'merge_grids = mpas_tools.mesh.merge:main', + 'split_grids = mpas_tools.mesh.split:main']}) From 827b40722453c97aefada89826db136ae3d5ea9e Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Tue, 4 Jun 2019 13:59:23 -0400 Subject: [PATCH 151/180] Add symbolic link for merge_grids/split_grids to preseve past UI --- mesh_tools/merge_split_meshes/merge_grids.py | 1 + mesh_tools/merge_split_meshes/split_grids.py | 1 + 2 files changed, 2 insertions(+) create mode 120000 mesh_tools/merge_split_meshes/merge_grids.py create mode 120000 mesh_tools/merge_split_meshes/split_grids.py diff --git a/mesh_tools/merge_split_meshes/merge_grids.py b/mesh_tools/merge_split_meshes/merge_grids.py new file mode 120000 index 000000000..3b52ca21d --- /dev/null +++ b/mesh_tools/merge_split_meshes/merge_grids.py @@ -0,0 +1 @@ +../../conda_package/mpas_tools/mesh/merge.py \ No newline at end of file diff --git a/mesh_tools/merge_split_meshes/split_grids.py b/mesh_tools/merge_split_meshes/split_grids.py new file mode 120000 index 000000000..9537926ab --- /dev/null +++ b/mesh_tools/merge_split_meshes/split_grids.py @@ -0,0 +1 @@ +../../conda_package/mpas_tools/mesh/split.py \ No newline at end of file From 0386ffcd648751355b6038e1ef2cfd146a903715 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Tue, 4 Jun 2019 14:28:31 -0400 Subject: [PATCH 152/180] Python 2 fixes --- conda_package/mpas_tools/mesh/merge.py | 2 +- conda_package/mpas_tools/mesh/split.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/conda_package/mpas_tools/mesh/merge.py b/conda_package/mpas_tools/mesh/merge.py index 71895224b..44cf5bbeb 100755 --- a/conda_package/mpas_tools/mesh/merge.py +++ b/conda_package/mpas_tools/mesh/merge.py @@ -26,7 +26,7 @@ def parse_args(args=None): parser.add_argument('-o', dest='outfile', default='merged_mesh.nc', metavar='FILENAME', help='The merged mesh file') - return parser.parse_intermixed_args(args) + return parser.parse_args(args) def merge_grids(infile1=None, infile2=None, outfile=None, runner=None): diff --git a/conda_package/mpas_tools/mesh/split.py b/conda_package/mpas_tools/mesh/split.py index 0ed7ecb25..26e01f1e6 100755 --- a/conda_package/mpas_tools/mesh/split.py +++ b/conda_package/mpas_tools/mesh/split.py @@ -52,7 +52,7 @@ def parse_args(args=None): 'attribute merge_point\n OR: will use MESHFILE ' 'maxEdges dimension and assume same for both)') - return parser.parse_intermixed_args(args) + return parser.parse_args(args) def split_grids(infile=None, outfile1=None, outfile2=None, @@ -90,7 +90,7 @@ def split_grids(infile=None, outfile1=None, outfile2=None, 'attribute'.format(infile)) try: mp = json.loads(nc_in.merge_point) - except json.decoder.JSONDecodeError: + except ValueError: raise ValueError('ERROR: {} merge_point global attribute is not valid JSON.\n' ' merge_point: {}'.format(infile, nc_in.merge_point)) From abeceb370c706da0d6769d35339ca1902bdfeb0c Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Tue, 4 Jun 2019 16:38:38 -0400 Subject: [PATCH 153/180] Updates for conda packaging This: * Moves merge_grids from mesh_tools.mesh.merge to mesh_tools.merge_grids to keep API more in line with legacy scripts and co-locate it with other mesh tools (ditto for split_grids) * reflects the above changes in mesh_tools/merge_split_meshes/*_grids.py * Adds merge_grids and split_grids to docs Also fixes some other conda package and documentation errors: * Fixes `make_planer_hex_mesh` function name in docs * Corrects the mpas_tools version number * requires netcdf4 for the conda build host in the recipe meta.yml to keep hdf5 and netcdf requirements in sync * Docs URL in recipe meta.yml link in now valid --- conda_package/docs/api.rst | 15 ++++++- conda_package/mpas_tools/__init__.py | 2 +- .../{mesh/merge.py => merge_grids.py} | 17 +++++++ conda_package/mpas_tools/mesh/__init__.py | 0 .../{mesh/split.py => split_grids.py} | 44 +++++++++++++++++++ conda_package/recipe/meta.yaml | 7 ++- conda_package/setup.py | 4 +- mesh_tools/merge_split_meshes/merge_grids.py | 2 +- mesh_tools/merge_split_meshes/split_grids.py | 2 +- 9 files changed, 86 insertions(+), 7 deletions(-) rename conda_package/mpas_tools/{mesh/merge.py => merge_grids.py} (95%) delete mode 100644 conda_package/mpas_tools/mesh/__init__.py rename conda_package/mpas_tools/{mesh/split.py => split_grids.py} (87%) diff --git a/conda_package/docs/api.rst b/conda_package/docs/api.rst index fb57a2b4e..b737d8056 100644 --- a/conda_package/docs/api.rst +++ b/conda_package/docs/api.rst @@ -14,7 +14,7 @@ MPAS mesh tools .. autosummary:: :toctree: generated/ - make_periodic_planar_hex_mesh + make_planar_hex_mesh .. currentmodule:: mpas_tools.translate @@ -33,6 +33,19 @@ MPAS mesh tools cull mask +.. currentmodule:: mpas_tools.merge_grids + +.. autosummary:: + :toctree: generated/ + + merge_grids + +.. currentmodule:: mpas_tools.split_grids + +.. autosummary:: + :toctree: generated/ + + split_grids .. currentmodule:: mpas_tools.io diff --git a/conda_package/mpas_tools/__init__.py b/conda_package/mpas_tools/__init__.py index ac825a01c..af0360983 100644 --- a/conda_package/mpas_tools/__init__.py +++ b/conda_package/mpas_tools/__init__.py @@ -1,2 +1,2 @@ -__version_info__ = (0, 0, 1) +__version_info__ = (0, 0, 2) __version__ = '.'.join(str(vi) for vi in __version_info__) diff --git a/conda_package/mpas_tools/mesh/merge.py b/conda_package/mpas_tools/merge_grids.py similarity index 95% rename from conda_package/mpas_tools/mesh/merge.py rename to conda_package/mpas_tools/merge_grids.py index 44cf5bbeb..308aeb81f 100755 --- a/conda_package/mpas_tools/mesh/merge.py +++ b/conda_package/mpas_tools/merge_grids.py @@ -30,6 +30,23 @@ def parse_args(args=None): def merge_grids(infile1=None, infile2=None, outfile=None, runner=None): + """ + Merges two MPAS non-contiguous meshes together into a single file + + Parameters + ---------- + infile1 : str + The file name for the first mesh to merge + + infile2 : str + The file name for the second mesh to merge + + outfile : str + The file name for the first mesh to merge + + runner : str, optional + The command to write into the global history attribute of the outfile + """ now = datetime.now().strftime("%a %b %d %H:%M:%S %Y") if not runner: runner = '{}.merge_grids(infile1={}, infile2={}, outfile={})'.format( diff --git a/conda_package/mpas_tools/mesh/__init__.py b/conda_package/mpas_tools/mesh/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/conda_package/mpas_tools/mesh/split.py b/conda_package/mpas_tools/split_grids.py similarity index 87% rename from conda_package/mpas_tools/mesh/split.py rename to conda_package/mpas_tools/split_grids.py index 26e01f1e6..9ce5c5109 100755 --- a/conda_package/mpas_tools/mesh/split.py +++ b/conda_package/mpas_tools/split_grids.py @@ -57,6 +57,50 @@ def parse_args(args=None): def split_grids(infile=None, outfile1=None, outfile2=None, nCells=None, nEdges=None, nVertices=None, maxEdges=None, runner=None): + """ + Split two previously merged MPAS non-contiguous meshes together into + separate files. Typical usage is: + + .. code:: python + + split_grids(infile='infile.nc', outfile1='outfile1.nc', outfile2='outfile2.nc') + + The optional arguments for ``nCells``, ``nEdges``, ``nVertices``, and ``maxEdges`` + should generally not be required as this information sould have been saved in + ``infiles``'s global attribute ``merge_point`` when created by + :func:`mpas_tools.merge_grids.merge_grids`. + + Parameters + ---------- + infile : str + The file name for the mesh to split + + outfile1 : str + The file name for the first split mesh + + outfile2 : str + The file name for the second split mesh + + nCells : int, optional + The number of cells in the first mesh (default: the value specified in + infile global attribute merge_point) + + nEdges : int, optional + The number of edges in the first mesh (default: the value specified in + infile global attribute merge_point + + nVertices : int, optional + The number of vertices in the first mesh (default: the value specified in + infile global attribute merge_point + + maxEdges : list[int, int], optional + A list of the number of max edges (int) in each mesh (default: the value + specified in infile global attribute merge_point OR will use infile + maxEdges dimension and assume same for both) + + runner : str, optional + The command to write into the global history attribute of the outfile + """ now = datetime.now().strftime("%a %b %d %H:%M:%S %Y") if not runner: runner = '{}.split_grids(infile={}, outfile1={}, outfile2={}, nCells={},' \ diff --git a/conda_package/recipe/meta.yaml b/conda_package/recipe/meta.yaml index 803bc3e2b..1980b8e2a 100644 --- a/conda_package/recipe/meta.yaml +++ b/conda_package/recipe/meta.yaml @@ -13,6 +13,8 @@ build: entry_points: - planar_hex = mpas_tools.planar_hex:main - translate_planar_grid = mpas_tools.translate:main + - merge_grids = mpas_tools.merge_grids:main + - split_grids = mpas_tools.split_grids:main requirements: build: @@ -20,6 +22,7 @@ requirements: - cmake host: - python + - netcdf4 - hdf5 - libnetcdf - setuptools @@ -65,6 +68,8 @@ test: - widen_transect_edge_masks.py --help - add_critical_land_blockages_to_mask.py --help - paraview_vtk_field_extractor.py -f mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc -v latCell,lonCell --ignore_time -o vtk_test + - split_grids --help + - merge_grids --help about: home: https://github.com/MPAS-Dev/MPAS-Tools/ @@ -75,7 +80,7 @@ about: description: | A set of tools for creating and manipulating meshes for the climate components based on the Model for Prediction Across Scales (MPAS) framework - doc_url: 'https://github.com/MPAS-Dev/MPAS-Tools/README.md' + doc_url: 'https://github.com/MPAS-Dev/MPAS-Tools/blob/master/README.md' dev_url: 'https://github.com/MPAS-Dev/MPAS-Tools/' extra: diff --git a/conda_package/setup.py b/conda_package/setup.py index bd0c522f0..3ad0150b8 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -42,5 +42,5 @@ entry_points={'console_scripts': ['planar_hex = mpas_tools.planar_hex:main', 'translate_planar_grid = mpas_tools.translate:main', - 'merge_grids = mpas_tools.mesh.merge:main', - 'split_grids = mpas_tools.mesh.split:main']}) + 'merge_grids = mpas_tools.merge_grids:main', + 'split_grids = mpas_tools.split_grids:main']}) diff --git a/mesh_tools/merge_split_meshes/merge_grids.py b/mesh_tools/merge_split_meshes/merge_grids.py index 3b52ca21d..da21c28fa 120000 --- a/mesh_tools/merge_split_meshes/merge_grids.py +++ b/mesh_tools/merge_split_meshes/merge_grids.py @@ -1 +1 @@ -../../conda_package/mpas_tools/mesh/merge.py \ No newline at end of file +../../conda_package/mpas_tools/merge_grids.py \ No newline at end of file diff --git a/mesh_tools/merge_split_meshes/split_grids.py b/mesh_tools/merge_split_meshes/split_grids.py index 9537926ab..50ac14e26 120000 --- a/mesh_tools/merge_split_meshes/split_grids.py +++ b/mesh_tools/merge_split_meshes/split_grids.py @@ -1 +1 @@ -../../conda_package/mpas_tools/mesh/split.py \ No newline at end of file +../../conda_package/mpas_tools/split_grids.py \ No newline at end of file From 46f30c79d0795aa99fc8f180c9ca8b1aa88e8fbb Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 13 Jun 2019 11:08:17 +0200 Subject: [PATCH 154/180] Fix writing of pvd files in the paraview extractor The files were not being explicitly closed, which may only be an issue in python 3. This merge also fixes some formatting issues that violated PEP8. --- .../paraview_vtk_field_extractor.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index 95a29e0c0..c963f025e 100755 --- a/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -63,7 +63,8 @@ Together, this can be used to plot topography by using a calculator filter like the following: - coords*(1.0 + 100.0/mag(coords)*((1 - boundaryMask)*(-bottomDepth) + 10.0*boundaryMask)) + coords*(1.0 + 100.0/mag(coords)*((1 - boundaryMask)*(-bottomDepth) + + 10.0*boundaryMask)) If this is entered into a Calculator Filter in ParaView with the "coordinate result" box checked, the result will to display the MPAS-Ocean topography, @@ -249,7 +250,8 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, else: if xtimeName not in time_series_file.variables: raise ValueError("xtime variable name {} not found in " - "{}".format(xtimeName, time_series_file)) + "{}".format(xtimeName, + time_series_file)) var = time_series_file.variables[xtimeName] if len(var.shape) == 2: xtime = var[local_time_indices[time_index], @@ -377,7 +379,8 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if any_var_has_time_dim: # finish the pdv file pvd_file.write('\n') - pvd_file.write('\n') # }}} + pvd_file.write('\n') + pvd_file.close() # }}} if __name__ == "__main__": From 93d79c507aafc73482dcb605f3d4a23f16087434 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Fri, 14 Jun 2019 12:26:20 -0600 Subject: [PATCH 155/180] Add set_lat_lon_fields_in_planar_grid to conda package --- conda_package/setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/conda_package/setup.py b/conda_package/setup.py index 019da4133..dc7d30ca5 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -29,6 +29,7 @@ packages=find_packages(), package_data={}, scripts=['mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py', + 'mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py', 'landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py', 'landice/mesh_tools_li/define_cullMask.py', 'landice/mesh_tools_li/interpolate_to_mpasli_grid.py', From 0e475a571acb9a905839185d5eea3c6623a43535 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Fri, 14 Jun 2019 12:33:28 -0600 Subject: [PATCH 156/180] Update planar_grid_transformations tools for py3 --- .../multires_scaled_hex.py | 11 +++++--- .../scale_planar_grid.py | 12 ++++++--- .../set_lat_lon_fields_in_planar_grid.py | 27 ++++++++++--------- .../translate_planar_grid.py | 18 ++++++------- 4 files changed, 39 insertions(+), 29 deletions(-) diff --git a/mesh_tools/planar_grid_transformations/multires_scaled_hex.py b/mesh_tools/planar_grid_transformations/multires_scaled_hex.py index f52f3c9a8..e051521af 100755 --- a/mesh_tools/planar_grid_transformations/multires_scaled_hex.py +++ b/mesh_tools/planar_grid_transformations/multires_scaled_hex.py @@ -15,6 +15,9 @@ Phillip J. Wolfram 12/22/2015 """ + +from __future__ import absolute_import, division, print_function, unicode_literals + import matplotlib.pyplot as plt import numpy as np import numexpr as ne @@ -64,7 +67,7 @@ def multires_scaled_hex(infname, outfname, xc=25000/2.0, yc=50000/2.0, radius=50 vertices = np.unique(vertices).tolist() for i in 1+np.arange(ntimes): - print 'Processing layer %d of %d...'%(i,ntimes) + print('Processing layer %d of %d...'%(i,ntimes)) for acell in cells[:]: for cellneighs in ds.variables['cellsOnCell'][acell]-1: cells.append(cellneighs) @@ -99,7 +102,7 @@ def multires_scaled_hex(infname, outfname, xc=25000/2.0, yc=50000/2.0, radius=50 plt.axis('equal') plt.show() - print 'done!' + print('done!') # compute vertex locations from circumcenters to ensure grid is Voronoi interior = np.prod(ds.variables['cellsOnVertex'][:],axis=1) > 0 @@ -109,7 +112,7 @@ def multires_scaled_hex(infname, outfname, xc=25000/2.0, yc=50000/2.0, radius=50 verticesOnCell[ic,nedge:] = np.nan for nl in np.arange(nllyod): - print 'On iteration %d of %d'%(nl+1, nllyod) + print('On iteration %d of %d'%(nl+1, nllyod)) if nl > 0: # update xc generators to be centroid of cells xc = np.nanmean(x[verticesOnCell], axis=1) @@ -140,7 +143,7 @@ def multires_scaled_hex(infname, outfname, xc=25000/2.0, yc=50000/2.0, radius=50 ds.close() - print 'finished grid' + print('finished grid') if __name__ == "__main__": diff --git a/mesh_tools/planar_grid_transformations/scale_planar_grid.py b/mesh_tools/planar_grid_transformations/scale_planar_grid.py index 8f8c446db..5327c583e 100755 --- a/mesh_tools/planar_grid_transformations/scale_planar_grid.py +++ b/mesh_tools/planar_grid_transformations/scale_planar_grid.py @@ -1,5 +1,10 @@ #!/usr/bin/env python -import numpy, math +""" +This script scales the grid of an existing MPAS mesh by a scalar amount. +""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import sys from netCDF4 import Dataset as NetCDFFile from optparse import OptionParser from datetime import datetime @@ -12,7 +17,7 @@ if not options.filename: parser.error("A grid file is required.") -print "Applying scale factor of: ", options.scale +print("Applying scale factor of: ", options.scale) scale = float(options.scale) @@ -45,5 +50,6 @@ newhist = thiscommand setattr(grid, 'history', newhist ) - grid.close() + +print("Scale operation is complete.") diff --git a/mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py b/mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py index 3cb035e7b..32688c2f9 100755 --- a/mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py +++ b/mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py @@ -3,10 +3,11 @@ Take MPAS planar grid and populate the lat/lon fields based on a specified projection. ''' +from __future__ import absolute_import, division, print_function, unicode_literals + import sys import netCDF4 import pyproj -#import numpy as np from optparse import OptionParser from datetime import datetime @@ -16,7 +17,7 @@ projections = dict() # add more as needed: -# CISM's projection is as follows, with the vertical datum as EIGEN-GL04C geoid. +# CISM's projection is as follows, with the vertical datum as EIGEN-GL04C geoid. # datum is actually EIGEN-GL04C but that is not an option in Proj. Therefore using EGM08 which should be within ~1m everywhere (and 10-20 cm in most places) # NOTE!!!!!! egm08_25.gtx can be downloaded from: http://download.osgeo.org/proj/vdatum/egm08_25/egm08_25.gtx and the path in the projection specification line should point to it! #projections['gis-bamber'] = pyproj.Proj('+proj=stere +lat_ts=71.0 +lat_0=90 +lon_0=321.0 +k_0=1.0 +x_0=800000.0 +y_0=3400000.0 +geoidgrids=./egm08_25.gtx') @@ -35,28 +36,28 @@ -print "== Gathering information. (Invoke with --help for more details. All arguments are optional)" +print("== Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser() parser.description = "This script populates the MPAS lat and lon fields based on the projection specified by the -p option." parser.add_option("-f", "--file", dest="fileInName", help="MPAS land ice file name.", default="landice_grid.nc", metavar="FILENAME") -parser.add_option("-p", "--proj", dest="projection", help="projection used for the data. Valid options are: \n" + str(projections.keys()), metavar="PROJ") +parser.add_option("-p", "--proj", dest="projection", help="projection used for the data. Valid options are: \n" + str(list(projections.keys())), metavar="PROJ") for option in parser.option_list: if option.default != ("NO", "DEFAULT"): option.help += (" " if option.help else "") + "[default: %default]" options, args = parser.parse_args() if not options.projection: - sys.exit('Error: data projection required with -p or --proj command line argument. Valid options are: ' + str(projections.keys())) + sys.exit('Error: data projection required with -p or --proj command line argument. Valid options are: ' + str(list(projections.keys()))) if not options.fileInName: - print "No filename specified, so using 'landice_grid.nc'." + print("No filename specified, so using 'landice_grid.nc'.") options.fileInName = 'landice_grid.nc' -print '' # make a space in stdout before further output +print('') # make a space in stdout before further output # ================================================= -print "Using {} projection, defined as: {}".format(options.projection, projections[options.projection].srs) +print("Using {} projection, defined as: {}".format(options.projection, projections[options.projection].srs)) # get needed fields f = netCDF4.Dataset(options.fileInName, 'r+') @@ -74,8 +75,8 @@ latEdge = f.variables['latEdge'] lonEdge = f.variables['lonEdge'] -print "Input file xCell min/max values:", xCell[:].min(), xCell[:].max() -print "Input file yCell min/max values:", yCell[:].min(), yCell[:].max() +print("Input file xCell min/max values:", xCell[:].min(), xCell[:].max()) +print("Input file yCell min/max values:", yCell[:].min(), yCell[:].max()) # populate x,y fields # MPAS uses lat/lon in radians, so have pyproj return fields in radians. @@ -83,8 +84,8 @@ lonVertex[:], latVertex[:] = pyproj.transform(projections[options.projection], projections['latlon'], xVertex[:], yVertex[:], radians=True) lonEdge[:], latEdge[:] = pyproj.transform(projections[options.projection], projections['latlon'], xEdge[:], yEdge[:], radians=True) -print "Calculated latCell min/max values (radians):", latCell[:].min(), latCell[:].max() -print "Calculated lonCell min/max values (radians):", lonCell[:].min(), lonCell[:].max() +print("Calculated latCell min/max values (radians):", latCell[:].min(), latCell[:].max()) +print("Calculated lonCell min/max values (radians):", lonCell[:].min(), lonCell[:].max()) # Update history attribute of netCDF file thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) @@ -97,4 +98,4 @@ f.close() -print "Lat/lon calculations completed." +print("Lat/lon calculations completed. File has been written.") diff --git a/mesh_tools/planar_grid_transformations/translate_planar_grid.py b/mesh_tools/planar_grid_transformations/translate_planar_grid.py index 0541039c6..b0e9f35a5 100755 --- a/mesh_tools/planar_grid_transformations/translate_planar_grid.py +++ b/mesh_tools/planar_grid_transformations/translate_planar_grid.py @@ -2,15 +2,15 @@ ''' Translate planar MPAS grid by one of three methods ''' +from __future__ import absolute_import, division, print_function, unicode_literals import sys import netCDF4 -#import numpy as np from optparse import OptionParser from datetime import datetime -print "== Gathering information. (Invoke with --help for more details. All arguments are optional)" +print("== Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser() parser.description = ("This script translates the coordinate system of the planar MPAS mesh specified with the -f flag. " "There are 3 possible methods to choose from:" @@ -27,7 +27,7 @@ option.help += (" " if option.help else "") + "[default: %default]" options, args = parser.parse_args() -print "Attempting to translate coordinates in file: %s"%options.fileInName +print("Attempting to translate coordinates in file: {}".format(options.fileInName)) if options.dataFileName and (options.xshift or options.yshift): @@ -44,17 +44,17 @@ if options.dataFileName: method = 'file' - print " Translating coordinates in %s so the domain center matches the domain center in %s."%(options.fileInName, options.dataFileName) + print(" Translating coordinates in {} so the domain center matches the domain center in {}.".format(options.fileInName, options.dataFileName)) if options.xshift or options.yshift: method = 'xy' - print " Translating coordinates in %s by user-specified values. X-shift=%f; Y-shift=%f"%(options.fileInName, options.xshift, options.yshift) + print(" Translating coordinates in {} by user-specified values. X-shift={}; Y-shift={}".format(options.fileInName, options.xshift, options.yshift)) -if options.center: +if options.center: method = 'center' - print " Translating coordinates in %s so the origin is the center of the domain." + print(" Translating coordinates in %s so the origin is the center of the domain.") -print '' # make a space in stdout before further output +print('') # make a space in stdout before further output # ================================================= @@ -114,4 +114,4 @@ f.close() -print "Translation completed." +print("Translation completed.") From baec630ee461224fe31af9702c0aa7b3b7f5c70b Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Fri, 14 Jun 2019 13:20:29 -0600 Subject: [PATCH 157/180] Convert landice/output_processing_li to py3 --- .../convert_landice_bitmasks.py | 16 +++++++++------- landice/output_processing_li/plot_globalStats.py | 13 +++++++------ .../output_processing_li/plot_mass_balance.py | 13 +++++++------ 3 files changed, 23 insertions(+), 19 deletions(-) diff --git a/landice/output_processing_li/convert_landice_bitmasks.py b/landice/output_processing_li/convert_landice_bitmasks.py index 0b9a5a9f0..b4ecec846 100755 --- a/landice/output_processing_li/convert_landice_bitmasks.py +++ b/landice/output_processing_li/convert_landice_bitmasks.py @@ -3,6 +3,8 @@ Script to convert landice bit mask into individual masks for each bit and save them to the netcdf file. Converts any of cellMask, edgeMask, vertexMask present in file. ''' +from __future__ import absolute_import, division, print_function, unicode_literals + import numpy from netCDF4 import Dataset @@ -32,7 +34,7 @@ } -print "** Gathering information." +print("** Gathering information.") parser = OptionParser() parser.add_option("-f", "--filename", dest="filename", help="file to visualize; default: output.nc", default="output.nc", metavar="FILE") options, args = parser.parse_args() @@ -50,9 +52,9 @@ else: newMaskVar = inFile.createVariable(varName, 'i', ('Time','nCells')) for t in range(nTime): - newMaskVar[t,:] = (inFile.variables['cellMask'][t,:] & masks[maskName]) / masks[maskName] + newMaskVar[t,:] = (inFile.variables['cellMask'][t,:] & masks[maskName]) // masks[maskName] inFile.sync() - print "cellMask converted to individual masks." + print("cellMask converted to individual masks.") if 'edgeMask' in inFile.variables: for maskName in masks: @@ -62,9 +64,9 @@ else: newMaskVar = inFile.createVariable(varName, 'i', ('Time','nEdges')) for t in range(nTime): - newMaskVar[t,:] = (inFile.variables['edgeMask'][t,:] & masks[maskName]) / masks[maskName] + newMaskVar[t,:] = (inFile.variables['edgeMask'][t,:] & masks[maskName]) // masks[maskName] inFile.sync() - print "edgeMask converted to individual masks." + print("edgeMask converted to individual masks.") if 'vertexMask' in inFile.variables: for maskName in masks: @@ -74,9 +76,9 @@ else: newMaskVar = inFile.createVariable(varName, 'i', ('Time','nVertices')) for t in range(nTime): - newMaskVar[t,:] = (inFile.variables['vertexMask'][t,:] & masks[maskName]) / masks[maskName] + newMaskVar[t,:] = (inFile.variables['vertexMask'][t,:] & masks[maskName]) // masks[maskName] inFile.sync() - print "vertexMask converted to individual masks." + print("vertexMask converted to individual masks.") inFile.close() diff --git a/landice/output_processing_li/plot_globalStats.py b/landice/output_processing_li/plot_globalStats.py index d69c83158..43110602f 100755 --- a/landice/output_processing_li/plot_globalStats.py +++ b/landice/output_processing_li/plot_globalStats.py @@ -3,6 +3,8 @@ Script to plot common time-series from one or more landice globalStats files. ''' +from __future__ import absolute_import, division, print_function, unicode_literals + import sys import numpy as np import numpy.ma as ma @@ -13,7 +15,7 @@ rhoi = 910.0 -print "** Gathering information. (Invoke with --help for more details. All arguments are optional)" +print("** Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser(description=__doc__) parser.add_option("-1", dest="file1inName", help="input filename", default="globalStats.nc", metavar="FILENAME") parser.add_option("-2", dest="file2inName", help="input filename", metavar="FILENAME") @@ -22,7 +24,7 @@ parser.add_option("-u", dest="units", help="units for mass/volume: m3, kg, Gt", default="m3", metavar="FILENAME") options, args = parser.parse_args() -print "Using ice density of {} kg/m3 if required for unit conversions".format(rhoi) +print("Using ice density of {} kg/m3 if required for unit conversions".format(rhoi)) # create axes to plot into fig = plt.figure(1, figsize=(9, 11), facecolor='w') @@ -40,7 +42,7 @@ massUnit = "Gt" else: sys.exit("Unknown mass/volume units") -print "Using volume/mass units of: ", massUnit +print("Using volume/mass units of: ", massUnit) axVol = fig.add_subplot(nrow, ncol, 1) plt.xlabel('Year') @@ -94,7 +96,7 @@ def plotStat(fname): - print "Reading and plotting file: " + fname + print("Reading and plotting file: {}".format(fname)) name = fname @@ -167,8 +169,7 @@ def plotStat(fname): axCalvFlux.legend(loc='best', prop={'size': 6}) -print "Generating plot." +print("Generating plot.") fig.tight_layout() plt.show() - diff --git a/landice/output_processing_li/plot_mass_balance.py b/landice/output_processing_li/plot_mass_balance.py index e5fcc8c64..c7ee81773 100755 --- a/landice/output_processing_li/plot_mass_balance.py +++ b/landice/output_processing_li/plot_mass_balance.py @@ -4,7 +4,8 @@ Currently only assesses grounded ice sheet mass balance. ''' -import sys +from __future__ import absolute_import, division, print_function, unicode_literals + import numpy as np from netCDF4 import Dataset from optparse import OptionParser @@ -13,16 +14,16 @@ rhoi = 910.0 -print "** Gathering information. (Invoke with --help for more details. All arguments are optional)" +print("** Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser(description=__doc__) parser.add_option("-f", dest="fileName", help="input filename", default="globalStats.nc", metavar="FILENAME") options, args = parser.parse_args() -print "Using ice density of {} kg/m3 if required for unit conversions".format(rhoi) +print("Using ice density of {} kg/m3 if required for unit conversions".format(rhoi)) -print "Mass balance will be inaccurate if not writing stats on every timestep." +print("Mass balance will be inaccurate if not writing stats on every timestep.") -print "Reading and plotting file: " + options.fileName +print("Reading and plotting file: {}".format(options.fileName)) f = Dataset(options.fileName,'r') yr = f.variables['daysSinceStart'][:]/365.0 dyr = np.zeros(yr.shape) @@ -83,7 +84,7 @@ plt.legend(loc='best', prop={'size': 6}) plt.tight_layout() -print "Plotting complete." +print("Plotting complete.") plt.show() f.close() From 58a9e02b4902651d6833066a019a1b38fdda7b28 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Fri, 14 Jun 2019 13:28:02 -0600 Subject: [PATCH 158/180] Complete conversion to py3 in landice/mesh_tools_li --- .../conversion_exodus_init_to_mpasli_mesh.py | 36 ++++++++++--------- ...ate_landice_grid_from_generic_MPAS_grid.py | 8 +++-- landice/mesh_tools_li/define_cullMask.py | 6 ++-- 3 files changed, 29 insertions(+), 21 deletions(-) diff --git a/landice/mesh_tools_li/conversion_exodus_init_to_mpasli_mesh.py b/landice/mesh_tools_li/conversion_exodus_init_to_mpasli_mesh.py index 2b990db58..7956c3dd7 100755 --- a/landice/mesh_tools_li/conversion_exodus_init_to_mpasli_mesh.py +++ b/landice/mesh_tools_li/conversion_exodus_init_to_mpasli_mesh.py @@ -1,10 +1,14 @@ #!/usr/bin/env python """ +Script to convert Albany-Land Ice output file in Exodus format to an MPAS-Land Ice format mesh. + Created on Tue Feb 13 23:50:20 2018 @author: Tong Zhang, Matt Hoffman """ +from __future__ import absolute_import, division, print_function, unicode_literals + import numpy as np from netCDF4 import Dataset from optparse import OptionParser @@ -61,20 +65,20 @@ # change the unit of the exo coord data from km to m. Be careful if it changes in the future if ordering == 1.0: - print "column wise pattern" + print("column wise pattern") layer_num = int(stride) data_exo_layer = data_exo[::layer_num] x_exo_layer = x_exo[::layer_num] y_exo_layer = y_exo[::layer_num] elif ordering == 0.0: - print "layer wise pattern" + print("layer wise pattern") node_num = int(stride) data_exo_layer = data_exo[0:node_num+1] x_exo_layer = x_exo[0:node_num+1] y_exo_layer = y_exo[0:node_num+1] - layer_num = int(len(data_exo)/node_num) + layer_num = len(data_exo)//node_num else: - print "The ordering is probably wrong" + print("The ordering is probably wrong") # slice the exo data to get the MPAS data node_num_layer = len(x_exo_layer) @@ -83,7 +87,7 @@ # set beta value to some uniform value before we put new data in it if (options.conversion_method == 'coord'): - print "use coordinate method" + print("use coordinate method") for i in range(node_num_layer): index_x, = np.where(abs(x[:]-x_exo_layer[i])/(abs(x[:])+1e-10)<1e-3) index_y, = np.where(abs(y[:]-y_exo_layer[i])/(abs(y[:])+1e-10)<1e-3) @@ -98,7 +102,7 @@ # This method may fail at the point where x or y = 0, while x_exo or y_exo is not elif (options.conversion_method == 'id'): - print "use global id method. Need a global id file" + print("use global id method. Need a global id file") usefullCellID = np.loadtxt(options.id_file,dtype='i') usefullCellID_array = usefullCellID[1::] # The first number in the file is the total number. skip it @@ -112,7 +116,7 @@ else: sys.exit("wrong conversion method! Set option m as id or coord!") -print "Successful in converting data from Exodus to MPAS!" +print("Successful in converting data from Exodus to MPAS!") nCells = len(dataset.dimensions['nCells']) thickness = dataset.variables['thickness'][0,:] @@ -141,10 +145,10 @@ # 5) Update mask # 6) go to step 1) -print "\nStart extrapolation!" +print("\nStart extrapolation!") while np.count_nonzero(keepCellMask) != nCells: - + keepCellMask = np.copy(keepCellMaskNew) searchCells = np.where(keepCellMask==0)[0] @@ -164,7 +168,7 @@ dataset.variables[options.var_name][0,iCell] = sum(dataset.variables[options.var_name][0,nonzero_id])/nonzero_num keepCellMask[iCell] = 1 - print ("{0:8d} cells left for extrapolation in total {1:8d} cells".format(nCells-np.count_nonzero(keepCellMask), nCells)) + print("{0:8d} cells left for extrapolation in total {1:8d} cells".format(nCells-np.count_nonzero(keepCellMask), nCells)) else: @@ -200,10 +204,10 @@ keepCellMaskNew[iCell] = 1 - print ("{0:8d} cells left for extrapolation in total {1:8d} cells".format(nCells-np.count_nonzero(keepCellMask), nCells)) + print("{0:8d} cells left for extrapolation in total {1:8d} cells".format(nCells-np.count_nonzero(keepCellMask), nCells)) -print "\nStart idw smoothing for extrapolated field!" +print("\nStart idw smoothing for extrapolated field!") iter_num = 0 while iter_num < int(options.smooth_iter_num): @@ -226,7 +230,7 @@ dataset.variables[options.var_name][0,iCell] = sum(dataset.variables[options.var_name][0,nonzero_id])/nonzero_num - print ("{0:3d} smoothing in total {1:3s} iters".format(iter_num, options.smooth_iter_num)) + print("{0:3d} smoothing in total {1:3s} iters".format(iter_num, options.smooth_iter_num)) else: @@ -254,13 +258,13 @@ var_interp = 1.0/sum(1.0/ds)*sum(1.0/ds*var_adj) dataset.variables[options.var_name][0,iCell] = var_interp - print ("{0:3d} smoothing in total {1:3s} iters".format(iter_num, options.smooth_iter_num)) + print("{0:3d} smoothing in total {1:3s} iters".format(iter_num, options.smooth_iter_num)) iter_num = iter_num + 1 if iter_num == 0: - print "\nNo smoothing! Iter number is 0!" + print("\nNo smoothing! Iter number is 0!") -print "\nExtrapolation and smoothing finished!" +print("\nExtrapolation and smoothing finished!") dataset.close() diff --git a/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py b/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py index 70f6a0db7..2d290ab33 100755 --- a/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py +++ b/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py @@ -1,7 +1,9 @@ #!/usr/bin/env python -# Script to create a grid with land ice variables from an MPAS grid. -# I've only tested it with a periodic_hex grid, but it should work with any MPAS grid. -# Currently variable attributes are not copied (and periodic_hex does not assign any, so this is ok). If variable attributes are added to periodic_hex, this script should be modified to copy them (looping over dir(var), skipping over variable function names "assignValue", "getValue", "typecode"). +""" +Script to create a grid with land ice variables from an MPAS grid. +Currently variable attributes are not copied. +This script could be modified to copy them (looping over dir(var), skipping over variable function names "assignValue", "getValue", "typecode"). +""" from __future__ import absolute_import, division, print_function, \ unicode_literals diff --git a/landice/mesh_tools_li/define_cullMask.py b/landice/mesh_tools_li/define_cullMask.py index 971a13fc6..6ce776c09 100755 --- a/landice/mesh_tools_li/define_cullMask.py +++ b/landice/mesh_tools_li/define_cullMask.py @@ -1,6 +1,8 @@ #!/usr/bin/env python -# Script for adding a field named cullMask to an MPAS land ice grid for use with the MpasCellCuller tool that actually culls the unwanted cells. -# Matt Hoffman, February 28, 2013 +""" +Script for adding a field named cullMask to an MPAS land ice grid for use with the MpasCellCuller tool that actually culls the unwanted cells. +Matt Hoffman, February 28, 2013 +""" from __future__ import absolute_import, division, print_function, \ unicode_literals From e9b23f71d57e62de49475a61e15fab1aa340557d Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Fri, 14 Jun 2019 15:02:33 -0600 Subject: [PATCH 159/180] Fix "progress bar" in create_landice_grid_from_generic_MPAS_grid --- .../mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py b/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py index 2d290ab33..65276e3a9 100755 --- a/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py +++ b/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py @@ -133,7 +133,7 @@ vars2copy.append(optionalVar) for varname in vars2copy: - print("-"), + print("- ", end='') print("|") for varname in vars2copy: thevar = filein.variables[varname] From b7234f50c6966ac4e73d3a25dbc66fa336da1b12 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 15 Jun 2019 20:33:40 +0200 Subject: [PATCH 160/180] Update to v0.0.3 for release Add a conda build config file so all 3 supported python versions get built --- conda_package/docs/conf.py | 4 ++-- conda_package/mpas_tools/__init__.py | 2 +- conda_package/recipe/conda_build_config.yaml | 10 ++++++++++ conda_package/recipe/meta.yaml | 5 ++++- conda_package/setup.py | 2 +- 5 files changed, 18 insertions(+), 5 deletions(-) create mode 100644 conda_package/recipe/conda_build_config.yaml diff --git a/conda_package/docs/conf.py b/conda_package/docs/conf.py index 1255a17bd..478b663c1 100644 --- a/conda_package/docs/conf.py +++ b/conda_package/docs/conf.py @@ -63,9 +63,9 @@ # built documents. # # The short X.Y version. -version = u'0.0.2' +version = u'0.0.3' # The full version, including alpha/beta/rc tags. -release = u'0.0.2' +release = u'0.0.3' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/conda_package/mpas_tools/__init__.py b/conda_package/mpas_tools/__init__.py index af0360983..70e40a3b5 100644 --- a/conda_package/mpas_tools/__init__.py +++ b/conda_package/mpas_tools/__init__.py @@ -1,2 +1,2 @@ -__version_info__ = (0, 0, 2) +__version_info__ = (0, 0, 3) __version__ = '.'.join(str(vi) for vi in __version_info__) diff --git a/conda_package/recipe/conda_build_config.yaml b/conda_package/recipe/conda_build_config.yaml new file mode 100644 index 000000000..a024c5f34 --- /dev/null +++ b/conda_package/recipe/conda_build_config.yaml @@ -0,0 +1,10 @@ +channel_sources: + - conda-forge,defaults + +channel_targets: + - conda-forge main + +python: + - 3.7 + - 3.6 + - 2.7 diff --git a/conda_package/recipe/meta.yaml b/conda_package/recipe/meta.yaml index 1980b8e2a..1e38165e8 100644 --- a/conda_package/recipe/meta.yaml +++ b/conda_package/recipe/meta.yaml @@ -1,5 +1,5 @@ {% set name = "mpas_tools" %} -{% set version = "0.0.2" %} +{% set version = "0.0.3" %} package: name: '{{ name|lower }}' @@ -26,6 +26,9 @@ requirements: - hdf5 - libnetcdf - setuptools + - netcdf4 + - openmp # [osx] + run: - python - netcdf4 diff --git a/conda_package/setup.py b/conda_package/setup.py index 3954fe19c..eda282ec7 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages -version = '0.0.2' +version = '0.0.3' setup(name='mpas_tools', version=version, From 67a96142b799aea4c9eba0c984596246993a48fa Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 21 Jun 2019 09:32:03 +0200 Subject: [PATCH 161/180] Update copyright date to 2019 --- LICENSE | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/LICENSE b/LICENSE index f6af5ee0a..401bf749c 100644 --- a/LICENSE +++ b/LICENSE @@ -1,7 +1,7 @@ -Copyright (c) 2013-2018, Los Alamos National Security, LLC (LANS) (Ocean: LA-CC-13-047; +Copyright (c) 2013-2019, Los Alamos National Security, LLC (LANS) (Ocean: LA-CC-13-047; Land Ice: LA-CC-13-117) and the University Corporation for Atmospheric Research (UCAR). -All rights reserved. +All rights reserved. LANS is the operator of the Los Alamos National Laboratory under Contract No. DE-AC52-06NA25396 with the U.S. Department of Energy. UCAR manages the National From b2d758d94792503f2ac2e21ab9672dddedda6ab3 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 21 Jun 2019 09:32:18 +0200 Subject: [PATCH 162/180] Find version number in __init__.py where possible --- conda_package/docs/conf.py | 8 ++++---- conda_package/setup.py | 9 ++++++++- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/conda_package/docs/conf.py b/conda_package/docs/conf.py index 478b663c1..4968e1e79 100644 --- a/conda_package/docs/conf.py +++ b/conda_package/docs/conf.py @@ -52,9 +52,9 @@ project = u'mpas_tools' copyright = u'This software is open source software available under the BSD-3' \ u'license. Copyright (c) 2019 Triad National Security, LLC. ' \ - u'All rights reserved. Copyright (c) 2018 Lawrence Livermore ' \ + u'All rights reserved. Copyright (c) 2019 Lawrence Livermore ' \ u'National Security, LLC. All rights reserved. Copyright (c) ' \ - u'2018 UT-Battelle, LLC. All rights reserved.' + u'2019 UT-Battelle, LLC. All rights reserved.' author = u'Xylar Asay-Davis, Doug Jacobsen, Michael Duda, Mark Petersen, ' \ u'Matt Hoffman, Adridan Turner, Philip Wolfram' @@ -63,9 +63,9 @@ # built documents. # # The short X.Y version. -version = u'0.0.3' +version = '.'.join(str(vi) for vi in mpas_tools.__version_info__[0:2]) # The full version, including alpha/beta/rc tags. -release = u'0.0.3' +release = mpas_tools.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/conda_package/setup.py b/conda_package/setup.py index eda282ec7..481d1e713 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -1,8 +1,15 @@ #!/usr/bin/env python +import os +import re from setuptools import setup, find_packages -version = '0.0.3' +here = os.path.abspath(os.path.dirname(__file__)) +with open(os.path.join(here, 'mpas_tools', '__init__.py')) as f: + init_file = f.read() + +version = re.search(r'{}\s*=\s*[(]([^)]*)[)]'.format('__version_info__'), + init_file).group(1).replace(', ', '.') setup(name='mpas_tools', version=version, From e542d4f4d1c1062da239a86ff78d5107e13a63c6 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Tue, 9 Jul 2019 14:38:19 -0600 Subject: [PATCH 163/180] Add new script to remove non-monotonically increasing times from output file --- .../remove_output_file_time_loops.py | 83 +++++++++++++++++++ 1 file changed, 83 insertions(+) create mode 100755 landice/output_processing_li/remove_output_file_time_loops.py diff --git a/landice/output_processing_li/remove_output_file_time_loops.py b/landice/output_processing_li/remove_output_file_time_loops.py new file mode 100755 index 000000000..e8eeafae0 --- /dev/null +++ b/landice/output_processing_li/remove_output_file_time_loops.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python +''' +Script to remove repeated time entries in globalStats or other output file that occur +due to inexact restarts used in conjunction with the adpative timestepper. +Requires 'daysSinceStart' field is available. (Could be modified to use xtime instead) +''' + +from __future__ import absolute_import, division, print_function, unicode_literals + +import sys +from netCDF4 import Dataset +import numpy as np +import argparse + + +parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument("-f", "--file", dest="file", help="File to be cleaned.", metavar="FILE", default="globalStats.nc") +args = parser.parse_args() + +f = Dataset(args.file, 'r') +days = f.variables['daysSinceStart'][:] +nt = len(f.dimensions['Time']) + +keepInd = np.zeros((nt,)) +keepInd[0] = 1 +prevMaxDay = days[0] +for i in range(1,nt): + if days[i] > prevMaxDay: + keepInd[i] = 1 + prevMaxDay = days[i] + else: + keepInd[i] = 0 +print("Keeping {} indices out of {}".format(int(keepInd.sum()), nt)) +keepList = np.nonzero(keepInd)[0] + +if int(keepInd.sum())==nt: + print("No cleaning required.") + sys.exit() + +# Copy all fields to a new file +fnameCleaned=args.file+".cleaned" +fileout = Dataset(fnameCleaned, 'w') + +for name in f.ncattrs(): + setattr(fileout, name, getattr(f, name) ) + print('Copied global attribute {} = {}'.format(name, getattr(f, name))) + +if hasattr(fileout, 'history'): + setattr(fileout, 'history', sys.argv[:] ) + +fileout.sync() + +print("---- Copying dimensions from input file to output file ----") +for dim in f.dimensions.keys(): + print(dim) + if dim == 'Time': + dimvalue = None # netCDF4 won't properly get this with the command below (you need to use the isunlimited method) + else: + dimvalue = len(f.dimensions[dim]) + fileout.createDimension(dim, dimvalue) +fileout.sync() + +print("---- Copying variables from input file to output file ----") +for varname in f.variables: + print(varname) + thevar = f.variables[varname] + newVar = fileout.createVariable(varname, thevar.dtype, thevar.dimensions) + if 'Time' in f.variables[varname].dimensions: + if 'Time' == f.variables[varname].dimensions[0]: + if len(f.variables[varname].dimensions) == 1: + newVar[:] = thevar[keepList] + else: + newVar[:] = thevar[keepList,:] + else: + sys.exit("Error: 'Time' is in dimension list for variable {}, but it is not the first dimension. Script needs improving to handle this case.".format(varname)) + else: + newVar[:] = thevar[:] +print("----") + +fileout.close() +f.close() + +print("Complete. Cleaned output written to {}".format(fnameCleaned)) From 735f3da288cc9272f893160a5caa295faa8834a4 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Mon, 29 Jul 2019 15:25:33 -0600 Subject: [PATCH 164/180] Add masked array error fix to interpolation script --- landice/mesh_tools_li/interpolate_to_mpasli_grid.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/landice/mesh_tools_li/interpolate_to_mpasli_grid.py b/landice/mesh_tools_li/interpolate_to_mpasli_grid.py index 6c7128e28..89e771ec0 100755 --- a/landice/mesh_tools_li/interpolate_to_mpasli_grid.py +++ b/landice/mesh_tools_li/interpolate_to_mpasli_grid.py @@ -355,6 +355,7 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): # Open the output file, get needed dimensions & variables try: MPASfile = netCDF4.Dataset(options.mpasFile,'r+') + MPASfile.set_auto_mask(False) try: nVertLevels = len(MPASfile.dimensions['nVertLevels']) except: @@ -387,6 +388,7 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): # Open the input file, get needed dimensions inputFile = netCDF4.Dataset(options.inputFile,'r') +inputFile.set_auto_mask(False) # Figure out if this is CISM or MPAS if 'x1' in inputFile.variables: From 63b51b40561abafe120324a6ff61ecdcb0a5fd64 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Mon, 29 Jul 2019 16:05:07 -0600 Subject: [PATCH 165/180] use second limb of time loops This commit changes remove_output_file_time_loops.py to use the second limb of a time loop instead of the first. That results in a smooth resulting time-series, whereas the first loop sometimes left a noticeable jump in time-series values. --- .../remove_output_file_time_loops.py | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/landice/output_processing_li/remove_output_file_time_loops.py b/landice/output_processing_li/remove_output_file_time_loops.py index e8eeafae0..aebb269fe 100755 --- a/landice/output_processing_li/remove_output_file_time_loops.py +++ b/landice/output_processing_li/remove_output_file_time_loops.py @@ -22,21 +22,29 @@ nt = len(f.dimensions['Time']) keepInd = np.zeros((nt,)) -keepInd[0] = 1 +keepInd[:] = 1 # initialize to keep all days prevMaxDay = days[0] +nLoops = 0 for i in range(1,nt): - if days[i] > prevMaxDay: - keepInd[i] = 1 - prevMaxDay = days[i] - else: - keepInd[i] = 0 -print("Keeping {} indices out of {}".format(int(keepInd.sum()), nt)) + if days[i] < prevMaxDay: + # found a loop. + print("Found a time loop at index {}".format(i)) + nLoops += 1 + # We want the second instance of this time period, not the first + # So we need to "unkeep" all previous time slices before this one that have greater times + ind = np.where(days[:i] > days[i])[0] + keepInd[ind] = 0 + prevMaxDay = days[i] + +print("Found and repairing {} time loops. Keeping {} indices out of {}.".format(nLoops, int(keepInd.sum()), nt)) keepList = np.nonzero(keepInd)[0] if int(keepInd.sum())==nt: print("No cleaning required.") sys.exit() +# ----- continue processing if needed ----- + # Copy all fields to a new file fnameCleaned=args.file+".cleaned" fileout = Dataset(fnameCleaned, 'w') From f6fde00df4d14f71697ccca49f39db08e12ccbaa Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Wed, 21 Aug 2019 10:10:24 -0700 Subject: [PATCH 166/180] Add nearest neighbor interpolation to interp script --- .../interpolate_to_mpasli_grid.py | 69 ++++++++++++++++++- 1 file changed, 68 insertions(+), 1 deletion(-) diff --git a/landice/mesh_tools_li/interpolate_to_mpasli_grid.py b/landice/mesh_tools_li/interpolate_to_mpasli_grid.py index 89e771ec0..08d9c1159 100755 --- a/landice/mesh_tools_li/interpolate_to_mpasli_grid.py +++ b/landice/mesh_tools_li/interpolate_to_mpasli_grid.py @@ -31,7 +31,7 @@ parser.description = __doc__ parser.add_option("-s", "--source", dest="inputFile", help="name of source (input) file. Can be either CISM format or MPASLI format.", default="cism.nc", metavar="FILENAME") parser.add_option("-d", "--destination", dest="mpasFile", help="name of destination file on which to interpolate fields. This needs to be MPASLI format with desired fields already existing.", default="landice_grid.nc", metavar="FILENAME") -parser.add_option("-m", "--method", dest="interpType", help="interpolation method to use. b=bilinear, d=barycentric, e=ESMF", default="b", metavar="METHOD") +parser.add_option("-m", "--method", dest="interpType", help="interpolation method to use. b=bilinear, d=barycentric, e=ESMF, n=nearest neighbor", default="b", metavar="METHOD") parser.add_option("-w", "--weight", dest="weightFile", help="ESMF weight file to input. Only used by ESMF interpolation method", metavar="FILENAME") parser.add_option("-t", "--thickness-only", dest="thicknessOnly", action="store_true", default=False, help="Only interpolate thickness and ignore all other variables (useful for setting up a cullMask)") for option in parser.option_list: @@ -161,6 +161,18 @@ def delaunay_interp_weights(xy, uv, d=2): #---------------------------- +def nn_interp_weights(xy, uv, d=2): + ''' + xy = input x,y coords + uv = output (MPSALI) x,y coords + Note: could separate out building tree and interpolation for efficiency if many fields need to be processed + ''' + tree = scipy.spatial.cKDTree(xy) + dist,idx = tree.query(uv, k=1) # k is the number of nearest neighbors. +# outfield = values.flatten()[idx] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. + return idx +#---------------------------- + def delaunay_interpolate(values, gridType): if gridType == 'x0': vtx = vtx0; wts = wts0 @@ -220,6 +232,14 @@ def interpolate_field(MPASfieldName): elif options.interpType == 'd': print(" ...Interpolating to {} using barycentric method...".format(MPASfieldName)) MPASfield = delaunay_interpolate(InputField, fieldInfo[MPASfieldName]['gridType']) + elif options.interpType == 'n': + print(" ...Interpolating to {} using nearest neighbor method...".format(MPASfieldName)) + if fieldInfo[MPASfieldName]['gridType'] == 'x0': + MPASfield = InputField.flatten()[nn_idx_x0] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. + elif fieldInfo[MPASfieldName]['gridType'] == 'x1': + MPASfield = InputField.flatten()[nn_idx_x1] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. + elif fieldInfo[MPASfieldName]['gridType'] == 'cell': + MPASfield = InputField.flatten()[nn_idx_cell] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. elif options.interpType == 'e': print(" ...Interpolating to {} using ESMF-weights method...".format(MPASfieldName)) MPASfield = ESMF_interp(InputField) @@ -288,6 +308,14 @@ def interpolate_field_with_layers(MPASfieldName): mpas_grid_input_layers[z,:] = delaunay_interpolate(InputField[z,:,:], fieldInfo[MPASfieldName]['gridType']) elif filetype=='mpas': mpas_grid_input_layers[z,:] = delaunay_interpolate(InputField[:,z], fieldInfo[MPASfieldName]['gridType']) + elif options.interpType == 'n': + print(" ...Layer {}, Interpolating this layer to MPAS grid using nearest neighbor method...".format(z)) + if fieldInfo[MPASfieldName]['gridType'] == 'x0': + mpas_grid_input_layers[z,:] = InputField[z,:,:].flatten()[nn_idx_x0] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. + elif fieldInfo[MPASfieldName]['gridType'] == 'x1': + mpas_grid_input_layers[z,:] = InputField[z,:,:].flatten()[nn_idx_x1] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. + elif fieldInfo[MPASfieldName]['gridType'] == 'cell': + mpas_grid_input_layers[z,:] = InputField[:,z].flatten()[nn_idx_cell] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. elif options.interpType == 'e': print(" ...Layer{}, Interpolating this layer to MPAS grid using ESMF-weights method...".format(z)) mpas_grid_input_layers[z,:] = ESMF_interp(InputField[z,:,:]) @@ -520,6 +548,42 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): vtCell, wtsCell, outsideIndcell, treecell = delaunay_interp_weights(inputmpasXY, mpasXY) end = time.clock(); print('done in {}'.format(end-start)) +#---------------------------- +# Setup NN interpolation weights if needed +if options.interpType == 'n': + mpasXY = np.vstack((xCell[:], yCell[:])).transpose() + + if filetype=='cism': + [Yi,Xi] = np.meshgrid(x1[:], y1[:]) + cismXY1 = np.zeros([Xi.shape[0]*Xi.shape[1],2]) + cismXY1[:,0] = Yi.flatten() + cismXY1[:,1] = Xi.flatten() + + print('\nBuilding interpolation weights: CISM x1/y1 -> MPAS') + start = time.clock() + nn_idx_x1 = nn_interp_weights(cismXY1, mpasXY) + end = time.clock(); print('done in {}'.format(end-start)) + + if 'x0' in inputFile.variables and not options.thicknessOnly: + # Need to setup separate weights for this grid + [Yi,Xi] = np.meshgrid(x0[:], y0[:]) + cismXY0 = np.zeros([Xi.shape[0]*Xi.shape[1],2]) + cismXY0[:,0] = Yi.flatten() + cismXY0[:,1] = Xi.flatten() + + print('Building interpolation weights: CISM x0/y0 -> MPAS') + start = time.clock() + nn_idx_x0 = nn_interp_weights(cismXY0, mpasXY) + end = time.clock(); print('done in {}'.format(end-start)) + + elif filetype=='mpas': + inputmpasXY= np.vstack((inputxCell[:], inputyCell[:])).transpose() + print('Building interpolation weights: MPAS in -> MPAS out') + start = time.clock() + nn_idx_cell = nn_interp_weights(inputmpasXY, mpasXY) + end = time.clock(); print('done in {}'.format(end-start)) + + #---------------------------- # Map Input-Output field names - add new fields here as needed @@ -545,6 +609,9 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): fieldInfo['observedThicknessTendencyUncertainty'] = {'InputName':'dHdtErr', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} fieldInfo['thicknessUncertainty'] = {'InputName':'thkErr', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} + fieldInfo['ismip6shelfMelt_basin'] = {'InputName':'ismip6shelfMelt_basin', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} + fieldInfo['ismip6shelfMelt_deltaT'] = {'InputName':'ismip6shelfMelt_deltaT', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} + elif filetype=='mpas': fieldInfo['thickness'] = {'InputName':'thickness', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} From 7ef63f28b08897d3cac6fa5605d9c55294426ccf Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 3 Oct 2019 20:20:54 +0200 Subject: [PATCH 167/180] Add bare-bones docs with Read The Docs --- conda_package/docs/environment.yml | 1 - readthedocs.yml | 31 ++++++++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 readthedocs.yml diff --git a/conda_package/docs/environment.yml b/conda_package/docs/environment.yml index eefa4830d..8779e9135 100644 --- a/conda_package/docs/environment.yml +++ b/conda_package/docs/environment.yml @@ -1,7 +1,6 @@ name: mpas_tools_docs channels: - conda-forge - - xylar dependencies: - python=3.7 - pytest diff --git a/readthedocs.yml b/readthedocs.yml new file mode 100644 index 000000000..afe4968d0 --- /dev/null +++ b/readthedocs.yml @@ -0,0 +1,31 @@ +# .readthedocs.yml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Build documentation in the conda_package/docs/ directory with Sphinx +sphinx: + configuration: conda_package/docs/conf.py + +# Optionally build your docs in additional formats such as PDF and ePub +# Build PDF +formats: + - pdf + +# Optionally set the version of Python and requirements required to build your docs +python: + version: 3.7 + install: + - method: setuptools + path: conda_package/ + system_packages: true + +conda: + environment: conda_package/docs/environment.yml + +build: + image: latest + + From f36008d69f22965cbf059d8f902514f7e525281d Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 3 Oct 2019 21:02:15 +0200 Subject: [PATCH 168/180] Fix conda package setup.py when called with a path In such cases, we need to cd to the location of setup.py before finding package modules and scripts. --- conda_package/setup.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/conda_package/setup.py b/conda_package/setup.py index 481d1e713..318d284eb 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -11,6 +11,9 @@ version = re.search(r'{}\s*=\s*[(]([^)]*)[)]'.format('__version_info__'), init_file).group(1).replace(', ', '.') + +os.chdir(here) + setup(name='mpas_tools', version=version, description='A set of tools for creating and manipulating meshes for the' From 143b9cd1eefd7f6a907beb3fc45c4ed801dcdada Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 3 Oct 2019 21:25:37 +0200 Subject: [PATCH 169/180] In conda package, copy directories within setup.py This was being done externally in scripts but it is easier to include it in setup.py. This also seems to be the only way to do this within Read The Docs, which doesn't support custom install scripts. --- conda_package/setup.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/conda_package/setup.py b/conda_package/setup.py index 318d284eb..42277cb67 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -3,6 +3,7 @@ import os import re from setuptools import setup, find_packages +import shutil here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'mpas_tools', '__init__.py')) as f: @@ -11,9 +12,11 @@ version = re.search(r'{}\s*=\s*[(]([^)]*)[)]'.format('__version_info__'), init_file).group(1).replace(', ', '.') - os.chdir(here) +for path in ['ocean', 'landice', 'visualization', 'mesh_tools']: + shutil.copytree('../{}'.format(path), './{}'.format(path)) + setup(name='mpas_tools', version=version, description='A set of tools for creating and manipulating meshes for the' From 9adccb56a2a6f2fe067b61d14e2014b7ce856b0c Mon Sep 17 00:00:00 2001 From: Mark Petersen Date: Sun, 27 Oct 2019 06:44:12 -0600 Subject: [PATCH 170/180] Upgrade create_SCRIP_file_from_MPAS_mesh.py to python3 --- .../create_SCRIP_file_from_MPAS_mesh.py | 52 +++++++++---------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py index 813fe30dd..1c9eb7f41 100755 --- a/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py +++ b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py @@ -9,31 +9,31 @@ from optparse import OptionParser -print "== Gathering information. (Invoke with --help for more details. All arguments are optional)" +print ("== Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser() parser.description = "This script takes an MPAS grid file and generates a SCRIP grid file." parser.add_option("-m", "--mpas", dest="mpasFile", help="MPAS grid file name used as input.", default="grid.nc", metavar="FILENAME") parser.add_option("-s", "--scrip", dest="scripFile", help="SCRIP grid file to output.", default="scrip.nc", metavar="FILENAME") parser.add_option("-l", "--landice", dest="landiceMasks", help="If flag is on, landice masks will be computed and used.", action="store_true") for option in parser.option_list: - if option.default != ("NO", "DEFAULT"): - option.help += (" " if option.help else "") + "[default: %default]" + if option.default != ("NO", "DEFAULT"): + option.help += (" " if option.help else "") + "[default: %default]" options, args = parser.parse_args() if not options.mpasFile: - sys.exit('Error: MPAS input grid file is required. Specify with -m command line argument.') + sys.exit('Error: MPAS input grid file is required. Specify with -m command line argument.') if not options.scripFile: - sys.exit('Error: SCRIP output grid file is required. Specify with -s command line argument.') + sys.exit('Error: SCRIP output grid file is required. Specify with -s command line argument.') if not options.landiceMasks: options.landiceMasks = False if options.landiceMasks: - print " -- Landice Masks are enabled" + print (" -- Landice Masks are enabled") else: - print " -- Landice Masks are disabled" + print (" -- Landice Masks are disabled") -print '' # make a space in stdout before further output +print ('') # make a space in stdout before further output # =============================================== @@ -56,10 +56,10 @@ if sphereRadius <= 0: - print " -- WARNING: conservative remapping is NOT possible when 'sphereRadius' <= 0 because 'grid_area' field will be infinite (from division by 0)" + print (" -- WARNING: conservative remapping is NOT possible when 'sphereRadius' <= 0 because 'grid_area' field will be infinite (from division by 0)") if on_a_sphere == "NO": - print " -- WARNING: 'on_a_sphere' attribute is 'NO', which means that there may be some disagreement regarding area between the planar (source) and spherical (target) mesh" + print (" -- WARNING: 'on_a_sphere' attribute is 'NO', which means that there may be some disagreement regarding area between the planar (source) and spherical (target) mesh") if options.landiceMasks: landIceMask = fin.variables['landIceMask'][:] @@ -94,19 +94,19 @@ grid_corner_lon_local = np.zeros( (nCells, maxVertices) ) # It is WAYYY faster to fill in the array entry-by-entry in memory than to disk. grid_corner_lat_local = np.zeros( (nCells, maxVertices) ) for iCell in range(nCells): - vertexMax = nEdgesOnCell[iCell] - grid_corner_lat_local[iCell, 0:vertexMax] = latVertex[verticesOnCell[iCell, 0:vertexMax] - 1] - grid_corner_lon_local[iCell, 0:vertexMax] = lonVertex[verticesOnCell[iCell, 0:vertexMax] - 1] - if vertexMax < maxVertices: - # repeat the last vertex location for any remaining, unused vertex indices - grid_corner_lat_local[iCell, vertexMax:] = latVertex[verticesOnCell[iCell, vertexMax-1] - 1] - grid_corner_lon_local[iCell, vertexMax:] = lonVertex[verticesOnCell[iCell, vertexMax-1] - 1] + vertexMax = nEdgesOnCell[iCell] + grid_corner_lat_local[iCell, 0:vertexMax] = latVertex[verticesOnCell[iCell, 0:vertexMax] - 1] + grid_corner_lon_local[iCell, 0:vertexMax] = lonVertex[verticesOnCell[iCell, 0:vertexMax] - 1] + if vertexMax < maxVertices: + # repeat the last vertex location for any remaining, unused vertex indices + grid_corner_lat_local[iCell, vertexMax:] = latVertex[verticesOnCell[iCell, vertexMax-1] - 1] + grid_corner_lon_local[iCell, vertexMax:] = lonVertex[verticesOnCell[iCell, vertexMax-1] - 1] if options.landiceMasks: - # If landiceMasks are enabled, mask out ocean under landice. - grid_imask[iCell] = 1 - landIceMask[0, iCell] + # If landiceMasks are enabled, mask out ocean under landice. + grid_imask[iCell] = 1 - landIceMask[0, iCell] else: - grid_imask[iCell] = 1 # If landiceMasks are not enabled, don't mask anything out. + grid_imask[iCell] = 1 # If landiceMasks are not enabled, don't mask anything out. grid_corner_lat[:] = grid_corner_lat_local[:] grid_corner_lon[:] = grid_corner_lon_local[:] @@ -122,13 +122,13 @@ #plt.show() -print "Input latCell min/max values (radians):", latCell[:].min(), latCell[:].max() -print "Input lonCell min/max values (radians):", lonCell[:].min(), lonCell[:].max() -print "Calculated grid_center_lat min/max values (radians):", grid_center_lat[:].min(), grid_center_lat[:].max() -print "Calculated grid_center_lon min/max values (radians):", grid_center_lon[:].min(), grid_center_lon[:].max() -print "Calculated grid_area min/max values (sq radians):", grid_area[:].min(), grid_area[:].max() +print ("Input latCell min/max values (radians):", latCell[:].min(), latCell[:].max()) +print ("Input lonCell min/max values (radians):", lonCell[:].min(), lonCell[:].max()) +print ("Calculated grid_center_lat min/max values (radians):", grid_center_lat[:].min(), grid_center_lat[:].max()) +print ("Calculated grid_center_lon min/max values (radians):", grid_center_lon[:].min(), grid_center_lon[:].max()) +print ("Calculated grid_area min/max values (sq radians):", grid_area[:].min(), grid_area[:].max()) fin.close() fout.close() -print "Creation of SCRIP file is complete." +print ("Creation of SCRIP file is complete.") From ec04dcef826cb43fdcc768dae4d94ca83ac8fd64 Mon Sep 17 00:00:00 2001 From: Mark Petersen Date: Mon, 28 Oct 2019 05:13:55 -0600 Subject: [PATCH 171/180] Change spacing --- .../create_SCRIP_file_from_MPAS_mesh.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py index 1c9eb7f41..c2addb766 100755 --- a/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py +++ b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py @@ -9,7 +9,7 @@ from optparse import OptionParser -print ("== Gathering information. (Invoke with --help for more details. All arguments are optional)") +print("== Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser() parser.description = "This script takes an MPAS grid file and generates a SCRIP grid file." parser.add_option("-m", "--mpas", dest="mpasFile", help="MPAS grid file name used as input.", default="grid.nc", metavar="FILENAME") @@ -29,11 +29,11 @@ options.landiceMasks = False if options.landiceMasks: - print (" -- Landice Masks are enabled") + print(" -- Landice Masks are enabled") else: - print (" -- Landice Masks are disabled") + print(" -- Landice Masks are disabled") -print ('') # make a space in stdout before further output +print('') # make a space in stdout before further output # =============================================== @@ -56,10 +56,10 @@ if sphereRadius <= 0: - print (" -- WARNING: conservative remapping is NOT possible when 'sphereRadius' <= 0 because 'grid_area' field will be infinite (from division by 0)") + print(" -- WARNING: conservative remapping is NOT possible when 'sphereRadius' <= 0 because 'grid_area' field will be infinite (from division by 0)") if on_a_sphere == "NO": - print (" -- WARNING: 'on_a_sphere' attribute is 'NO', which means that there may be some disagreement regarding area between the planar (source) and spherical (target) mesh") + print(" -- WARNING: 'on_a_sphere' attribute is 'NO', which means that there may be some disagreement regarding area between the planar (source) and spherical (target) mesh") if options.landiceMasks: landIceMask = fin.variables['landIceMask'][:] @@ -122,13 +122,13 @@ #plt.show() -print ("Input latCell min/max values (radians):", latCell[:].min(), latCell[:].max()) -print ("Input lonCell min/max values (radians):", lonCell[:].min(), lonCell[:].max()) -print ("Calculated grid_center_lat min/max values (radians):", grid_center_lat[:].min(), grid_center_lat[:].max()) -print ("Calculated grid_center_lon min/max values (radians):", grid_center_lon[:].min(), grid_center_lon[:].max()) -print ("Calculated grid_area min/max values (sq radians):", grid_area[:].min(), grid_area[:].max()) +print("Input latCell min/max values (radians):", latCell[:].min(), latCell[:].max()) +print("Input lonCell min/max values (radians):", lonCell[:].min(), lonCell[:].max()) +print("Calculated grid_center_lat min/max values (radians):", grid_center_lat[:].min(), grid_center_lat[:].max()) +print("Calculated grid_center_lon min/max values (radians):", grid_center_lon[:].min(), grid_center_lon[:].max()) +print("Calculated grid_area min/max values (sq radians):", grid_area[:].min(), grid_area[:].max()) fin.close() fout.close() -print ("Creation of SCRIP file is complete.") +print("Creation of SCRIP file is complete.") From 0f156ec929f08a0d4246cbb0ad62ba8742d1530d Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 28 Oct 2019 09:51:23 +0100 Subject: [PATCH 172/180] Add MPAS scrip creation script to conda package Add some missing tests in conda recipe. Remove redundant listing of a script in setup.py --- conda_package/recipe/meta.yaml | 2 ++ conda_package/setup.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/conda_package/recipe/meta.yaml b/conda_package/recipe/meta.yaml index 1e38165e8..402b9ac37 100644 --- a/conda_package/recipe/meta.yaml +++ b/conda_package/recipe/meta.yaml @@ -63,6 +63,8 @@ test: - MpasCellCuller.x nonperiodic_mesh_30x20_1km.nc culled_nonperiodic_mesh_30x20_1km.nc - python -m pytest conda_package/mpas_tools/tests - mark_horns_for_culling.py --help + - set_lat_lon_fields_in_planar_grid.py --help + - create_SCRIP_file_from_MPAS_mesh.py --help - create_landice_grid_from_generic_MPAS_grid.py --help - define_cullMask.py --help - interpolate_to_mpasli_grid.py --help diff --git a/conda_package/setup.py b/conda_package/setup.py index 42277cb67..3abc52f42 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -43,6 +43,7 @@ package_data={}, scripts=['mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py', 'mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py', + 'mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py', 'landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py', 'landice/mesh_tools_li/define_cullMask.py', 'landice/mesh_tools_li/interpolate_to_mpasli_grid.py', @@ -50,7 +51,6 @@ 'ocean/coastline_alteration/add_land_locked_cells_to_mask.py', 'ocean/coastline_alteration/widen_transect_edge_masks.py', 'ocean/coastline_alteration/add_critical_land_blockages_to_mask.py', - 'ocean/coastline_alteration/add_critical_land_blockages_to_mask.py', 'visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py'], install_requires=['numpy', 'xarray', 'netCDF4', 'pyevtk'], entry_points={'console_scripts': From 15a9492fb6a56060779c8dd63db956bd38cddf2a Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 28 Oct 2019 10:54:09 +0100 Subject: [PATCH 173/180] In setup.py only copy external source dirs if not already done This is needed only for Read The Docs, which won't allow a build script. --- conda_package/setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/conda_package/setup.py b/conda_package/setup.py index 3abc52f42..5d05dfef2 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -15,7 +15,8 @@ os.chdir(here) for path in ['ocean', 'landice', 'visualization', 'mesh_tools']: - shutil.copytree('../{}'.format(path), './{}'.format(path)) + if not os.path.exists(path): + shutil.copytree('../{}'.format(path), './{}'.format(path)) setup(name='mpas_tools', version=version, From c3609c0326ce492d2c4bafb8ffae678c280240b4 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 28 Oct 2019 13:49:12 +0100 Subject: [PATCH 174/180] Update to v0.0.4 --- conda_package/mpas_tools/__init__.py | 2 +- conda_package/recipe/meta.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/conda_package/mpas_tools/__init__.py b/conda_package/mpas_tools/__init__.py index 70e40a3b5..05286d486 100644 --- a/conda_package/mpas_tools/__init__.py +++ b/conda_package/mpas_tools/__init__.py @@ -1,2 +1,2 @@ -__version_info__ = (0, 0, 3) +__version_info__ = (0, 0, 4) __version__ = '.'.join(str(vi) for vi in __version_info__) diff --git a/conda_package/recipe/meta.yaml b/conda_package/recipe/meta.yaml index 402b9ac37..9445b05ce 100644 --- a/conda_package/recipe/meta.yaml +++ b/conda_package/recipe/meta.yaml @@ -1,5 +1,5 @@ {% set name = "mpas_tools" %} -{% set version = "0.0.3" %} +{% set version = "0.0.4" %} package: name: '{{ name|lower }}' From e94f14a0c8ee29e9223a2648c6261eb6ab380ff9 Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Tue, 5 Nov 2019 10:15:17 -0700 Subject: [PATCH 175/180] Update define_cullMask.py to allow more input This commit allows the script to use input arguments for the radius beyond which to cull and the fractional distance beyond which to cull. Previously those were both hard-coded values, which complicated using those methods within COMPASS. --- landice/mesh_tools_li/define_cullMask.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/landice/mesh_tools_li/define_cullMask.py b/landice/mesh_tools_li/define_cullMask.py index 6ce776c09..18343a871 100755 --- a/landice/mesh_tools_li/define_cullMask.py +++ b/landice/mesh_tools_li/define_cullMask.py @@ -19,7 +19,7 @@ parser.add_option("-f", "--file", dest="file", help="grid file to modify; default: landice_grid.nc", metavar="FILE") parser.add_option("-m", "--method", dest="method", help="method to use for marking cells to cull. Supported methods: 'noIce', 'numCells', 'distance', 'radius', 'edgeFraction'", metavar="METHOD") parser.add_option("-n", "--numCells", dest="numCells", default=5, help="number of cells to keep beyond ice extent", metavar="NUM") -parser.add_option("-d", "--distance", dest="distance", default=50, help="distance (km) beyond ice extent to keep", metavar="DIST") +parser.add_option("-d", "--distance", dest="distance", default=50, help="numeric value to use for the various methods: distance method->distance (km), radius method->radius (km), edgeFraction method->fraction of width or height", metavar="DIST") parser.add_option("-p", "--plot", dest="makePlot", help="Include to have the script generate a plot of the resulting mask, default=false", default=False, action="store_true") options, args = parser.parse_args() @@ -152,15 +152,22 @@ # ========= # cut out beyond some radius (good for the dome) elif maskmethod == 'radius': - print("Method: remove cells beyond a radius") - ind = np.nonzero( (xCell[:]**2 + yCell[:]**2)**0.5 > 26000.0 ) + dist=float(options.distance) + print("Method: remove cells beyond a radius of {} km from center of mesh".format(dist)) + xc = (xCell.max()-xCell.min())/2.0 + xCell.min() + yc = (yCell.max()-yCell.min())/2.0 + yCell.min() + ind = np.nonzero( ( (xCell[:]-xc)**2 + (yCell[:]-yc)**2)**0.5 > dist*1000.0 ) cullCell[ind] = 1 # ========= # cut off some fraction of the height/width on all 4 sides - useful for cleaning up a mesh from periodic_general elif maskmethod == 'edgeFraction': - print("Method: remove a fraction from all 4 edges") - frac=0.025 + frac=float(options.distance) + print("Method: remove a fraction from all 4 edges of {}".format(frac)) + if frac>=0.5: + sys.exit("ERROR: fraction cannot be >=0.5.") + if frac<0.0: + sys.exit("ERROR: fraction cannot be <0.") cullCell[:] = 0 width = xCell.max()-xCell.min() From f5178ca6fa9dabce2ce143b59f17aa9caef7e660 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 18 Sep 2017 22:57:03 +0200 Subject: [PATCH 176/180] Rewrite MOC southern boundar extractor It now computes edge signs based on the order of cells on a given edge rather than of vertices, which may fix the issue seen previously. --- .../moc_southern_boundary_extractor.py | 203 +++++++++--------- 1 file changed, 99 insertions(+), 104 deletions(-) diff --git a/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py b/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py index 5c89e59b2..c8d889a93 100755 --- a/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py +++ b/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py @@ -9,96 +9,71 @@ boundary transect data on cells is not foreseen. Author: Xylar Asay-Davis -last modified: 11/02/2016 +last modified: 5/22/2018 ''' +from __future__ import absolute_import, division, print_function, \ + unicode_literals import xarray import argparse import numpy -def extractSouthernBounary(mesh, moc, latBuffer): - # Extrcts the southern boundary of each region mask in moc. Mesh info - # is taken from mesh. latBuffer is a number of radians above the southern- - # most point that should be considered to definitely be in the southern - # boundary. - - def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): - # Follows the boundary from a starting edge to produce a sequence of - # edges that form a closed loop. - # - # startEdge is an edge on the boundary that will be both the start and - # end of the loop. - # - # isBoundaryEdge is a mask that indicates which edges are on the - # boundary - # - # returns lists of edges, edge signs and vertices - - boundaryEdgesOnEdge = -numpy.ones((nEdges, 2), int) - - boundaryEdges = numpy.arange(nEdges)[isBoundaryEdge] - nBoundaryEdges = len(boundaryEdges) - - # Find the edges on vertex of the vertices on each boundary edge. - # Each boundary edge must have valid vertices, so none should be out - # of bounds. - edgesOnVerticesOnBoundaryEdge = \ - edgesOnVertex[verticesOnEdge[boundaryEdges, :], :] - - # The (typically 3) edges on each vertex of a boundary edge - # will be the edge itself, another boundary edge and 1 or more - # non-boundary edges. We want only the other boundary edge - - # other edge not be this edge - mask = numpy.not_equal(edgesOnVerticesOnBoundaryEdge, - boundaryEdges.reshape((nBoundaryEdges, 1, 1))) - - # other edge must be in range - mask = numpy.logical_and(mask, edgesOnVerticesOnBoundaryEdge >= 0) - mask = numpy.logical_and(mask, edgesOnVerticesOnBoundaryEdge < nEdges) - - # other edge must be a boundary edge - otherEdgeMask = mask.copy() - otherEdgeMask[mask] = \ - isBoundaryEdge[edgesOnVerticesOnBoundaryEdge[mask]] - - # otherEdgeMask should have exactly one non-zero entry per vertex - assert(numpy.all(numpy.equal(numpy.sum(numpy.array(otherEdgeMask, int), - axis=2), 1))) - - (edgeIndices, voeIndices, eovIndices) = numpy.nonzero(otherEdgeMask) - - boundaryEdgesOnEdge = -numpy.ones((nEdges, 2), int) - boundaryEdgesOnEdge[boundaryEdges[edgeIndices], voeIndices] = \ - edgesOnVerticesOnBoundaryEdge[edgeIndices, voeIndices, eovIndices] - - iEdge = startEdge - edgeSequence = [] - edgeSigns = [] - vertexSequence = [] - signs = (1, -1) - vertexOnEdgeIndex = 1 - nextEdge = boundaryEdgesOnEdge[iEdge, vertexOnEdgeIndex] - while True: - edgeSequence.append(iEdge) - edgeSigns.append(signs[vertexOnEdgeIndex]) - vertexSequence.append(verticesOnEdge[iEdge, vertexOnEdgeIndex]) - - # a trick to determine which is the next vertex and edge to follow - vertexOnEdgeIndex = int(boundaryEdgesOnEdge[nextEdge, 0] == iEdge) - - iEdge = nextEdge - nextEdge = boundaryEdgesOnEdge[nextEdge, vertexOnEdgeIndex] - if iEdge == startEdge: +def getEdgeSequenceOnBoundary(startEdge, edgeSign, edgesOnVertex, + verticesOnEdge): + # Follows the boundary from a starting edge to produce a sequence of + # edges that form a closed loop. + # + # startEdge is an edge on the boundary that will be both the start and + # end of the loop. + # + # isBoundaryEdge is a mask that indicates which edges are on the + # boundary + # + # returns lists of edges, edge signs and vertices + + iEdge = startEdge + edgeSequence = [] + vertexSequence = [] + while(True): + assert(edgeSign[iEdge] == 1. or edgeSign[iEdge] == -1.) + if edgeSign[iEdge] == 1.: + v = 0 + else: + v = 1 + iVertex = verticesOnEdge[iEdge, v] + + eov = edgesOnVertex[iVertex, :] + + # find the edge that is not iEdge but is on the boundary + nextEdge = -1 + for edge in eov: + if edge != iEdge and edgeSign[edge] != 0: + nextEdge = edge break + assert(nextEdge != -1) + + edgeSequence.append(iEdge) + vertexSequence.append(iVertex) + + iEdge = nextEdge + + if iEdge == startEdge: + break - edgeSequence = numpy.array(edgeSequence) - edgeSigns = numpy.array(edgeSigns) - vertexSequence = numpy.array(vertexSequence) + edgeSequence = numpy.array(edgeSequence) + edgeSequenceSigns = edgeSign[edgeSequence] + vertexSequence = numpy.array(vertexSequence) - return (edgeSequence, edgeSigns, vertexSequence) + return (edgeSequence, edgeSequenceSigns, vertexSequence) + + +def extractSouthernBounary(mesh, mocMask, latBuffer): + # Extrcts the southern boundary of each region mask in mocMask. Mesh info + # is taken from mesh. latBuffer is a number of radians above the southern- + # most point that should be considered to definitely be in the southern + # boundary. southernBoundaryEdges = [] southernBounderyEdgeSigns = [] @@ -106,8 +81,8 @@ def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): nCells = mesh.dims['nCells'] nEdges = mesh.dims['nEdges'] - nRegions = moc.dims['nRegions'] - assert(moc.dims['nCells'] == nCells) + nRegions = mocMask.dims['nRegions'] + assert(mocMask.dims['nCells'] == nCells) # convert to python zero-based indices cellsOnEdge = mesh.variables['cellsOnEdge'].values-1 @@ -124,7 +99,9 @@ def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): southernBoundaryVertices = [] for iRegion in range(nRegions): - cellMask = moc.variables['regionCellMasks'][:, iRegion].values + name = mocMask.regionNames[iRegion].values.astype('U') + print(name) + cellMask = mocMask.variables['regionCellMasks'][:, iRegion].values # land cells are outside not in the MOC region cellsOnEdgeMask = numpy.zeros(cellsOnEdge.shape, bool) @@ -132,19 +109,34 @@ def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): cellsOnEdgeMask[cellsOnEdgeInRange] = \ cellMask[cellsOnEdge[cellsOnEdgeInRange]] == 1 - isMOCBoundaryEdge = (cellsOnEdgeMask[:, 0] != cellsOnEdgeMask[:, 1]) + print(' computing edge sign...') + edgeSign = numpy.zeros(nEdges) + # positive sign if the first cell on edge is in the region + mask = numpy.logical_and(cellsOnEdgeMask[:, 0], + numpy.logical_not(cellsOnEdgeMask[:, 1])) + edgeSign[mask] = -1. + # negative sign if the second cell on edge is in the region + mask = numpy.logical_and(cellsOnEdgeMask[:, 1], + numpy.logical_not(cellsOnEdgeMask[:, 0])) + edgeSign[mask] = 1. + isMOCBoundaryEdge = edgeSign != 0. edgesMOCBoundary = numpy.arange(nEdges)[isMOCBoundaryEdge] + print(' done.') startEdge = numpy.argmin(latEdge[isMOCBoundaryEdge]) startEdge = edgesMOCBoundary[startEdge] minLat = latEdge[startEdge] + print(' getting edge sequence...') # follow the boundary from this point to get a loop of edges # Note: it is possible but unlikely that the southern-most point is # not within bulk region of the MOC mask if the region is not a single # shape - edgeSequence, edgeSigns, vertexSequence = \ - getEdgeSequenceOnBoundary(startEdge, isMOCBoundaryEdge) + edgeSequence, edgeSequenceSigns, vertexSequence = \ + getEdgeSequenceOnBoundary(startEdge, edgeSign, edgesOnVertex, + verticesOnEdge) + + print(' done: {} edges in transect.'.format(len(edgeSequence))) aboveSouthernBoundary = latEdge[edgeSequence] > minLat + latBuffer @@ -168,7 +160,7 @@ def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): if len(startIndices) == 0: # the whole sequence is the southern boundary southernBoundaryEdges.append(edgeSequence) - southernBounderyEdgeSigns.append(edgeSigns) + southernBounderyEdgeSigns.append(edgeSequenceSigns) southernBoundaryVertices.append(vertexSequence) continue @@ -183,7 +175,7 @@ def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): indices = numpy.mod(indices, len(edgeSequence)) southernBoundaryEdges.append(edgeSequence[indices]) - southernBounderyEdgeSigns.append(edgeSigns[indices]) + southernBounderyEdgeSigns.append(edgeSequenceSigns[indices]) # we want one extra vertex in the vertex sequence indices = numpy.arange(endIndices[longest], @@ -196,11 +188,11 @@ def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): southernBoundaryVertices) -def addTransectsToMOC(mesh, moc, southernBoundaryEdges, +def addTransectsToMOC(mesh, mocMask, southernBoundaryEdges, southernBounderyEdgeSigns, southernBoundaryVertices): - # Creates transect fields in moc from the edges, edge signs and vertices - # defining the southern boundaries. Mesh info (nEdges and nVertices) is - # taken from the mesh file. + # Creates transect fields in mocMask from the edges, edge signs and + # vertices defining the southern boundaries. Mesh info (nEdges and + # nVertices) is taken from the mesh file. nTransects = len(southernBoundaryEdges) @@ -241,16 +233,19 @@ def addTransectsToMOC(mesh, moc, southernBoundaryEdges, transectVertexGlobalIDs[iTransect, 0:transectCount] \ = southernBoundaryVertices[iTransect] + 1 - moc['transectEdgeMasks'] = (('nEdges', 'nTransects'), transectEdgeMasks) - moc['transectEdgeMaskSigns'] = (('nEdges', 'nTransects'), - transectEdgeMaskSigns) - moc['transectEdgeGlobalIDs'] = (('nTransects', 'maxEdgesInTransect'), - transectEdgeGlobalIDs) + mocMask['transectEdgeMasks'] = \ + (('nEdges', 'nTransects'), transectEdgeMasks) + mocMask['transectEdgeMaskSigns'] = (('nEdges', 'nTransects'), + transectEdgeMaskSigns) + mocMask['transectEdgeGlobalIDs'] = (('nTransects', 'maxEdgesInTransect'), + transectEdgeGlobalIDs) + + mocMask['transectVertexMasks'] = (('nVertices', 'nTransects'), + transectVertexMasks) + mocMask['transectVertexGlobalIDs'] = \ + (('nTransects', 'maxVerticesInTransect'), transectVertexGlobalIDs) - moc['transectVertexMasks'] = (('nVertices', 'nTransects'), - transectVertexMasks) - moc['transectVertexGlobalIDs'] = (('nTransects', 'maxVerticesInTransect'), - transectVertexGlobalIDs) + mocMask['transectNames'] = mocMask.regionNames if __name__ == "__main__": @@ -270,15 +265,15 @@ def addTransectsToMOC(mesh, moc, southernBoundaryEdges, required=True) args = parser.parse_args() - moc = xarray.open_dataset(args.in_file) + mocMask = xarray.open_dataset(args.in_file) mesh = xarray.open_dataset(args.mesh_file) southernBoundaryEdges, southernBounderyEdgeSigns, \ southernBoundaryVertices = \ - extractSouthernBounary(mesh, moc, latBuffer=3.*numpy.pi/180.) + extractSouthernBounary(mesh, mocMask, latBuffer=3.*numpy.pi/180.) - addTransectsToMOC(mesh, moc, southernBoundaryEdges, + addTransectsToMOC(mesh, mocMask, southernBoundaryEdges, southernBounderyEdgeSigns, southernBoundaryVertices) - moc.to_netcdf(args.out_file) + mocMask.to_netcdf(args.out_file) From cafc0e5d1ff5264ae7243af71a41d310afc3c3aa Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 12 Aug 2019 14:50:06 -0600 Subject: [PATCH 177/180] Add MOC southern transect script to package --- conda_package/recipe/meta.yaml | 1 + conda_package/setup.py | 1 + 2 files changed, 2 insertions(+) diff --git a/conda_package/recipe/meta.yaml b/conda_package/recipe/meta.yaml index 9445b05ce..0be0e9172 100644 --- a/conda_package/recipe/meta.yaml +++ b/conda_package/recipe/meta.yaml @@ -72,6 +72,7 @@ test: - add_land_locked_cells_to_mask.py --help - widen_transect_edge_masks.py --help - add_critical_land_blockages_to_mask.py --help + - moc_southern_boundary_extractor.py --help - paraview_vtk_field_extractor.py -f mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc -v latCell,lonCell --ignore_time -o vtk_test - split_grids --help - merge_grids --help diff --git a/conda_package/setup.py b/conda_package/setup.py index 5d05dfef2..82b57b3d9 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -52,6 +52,7 @@ 'ocean/coastline_alteration/add_land_locked_cells_to_mask.py', 'ocean/coastline_alteration/widen_transect_edge_masks.py', 'ocean/coastline_alteration/add_critical_land_blockages_to_mask.py', + 'ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py', 'visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py'], install_requires=['numpy', 'xarray', 'netCDF4', 'pyevtk'], entry_points={'console_scripts': From 92a6c70a1358725a0c7de5caca74110a3360794a Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 13 Aug 2019 10:09:41 -0600 Subject: [PATCH 178/180] Rename dimension in transectNames --- .../moc_southern_boundary_extractor.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py b/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py index c8d889a93..8a60d4c8f 100755 --- a/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py +++ b/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py @@ -245,7 +245,8 @@ def addTransectsToMOC(mesh, mocMask, southernBoundaryEdges, mocMask['transectVertexGlobalIDs'] = \ (('nTransects', 'maxVerticesInTransect'), transectVertexGlobalIDs) - mocMask['transectNames'] = mocMask.regionNames + mocMask['transectNames'] = mocMask.regionNames.rename( + {'nRegions': 'nTransects'}) if __name__ == "__main__": From 12ef7fd09871d3fd57e55300708b81f9771ec33d Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 13 Aug 2019 14:48:01 -0600 Subject: [PATCH 179/180] Better document --positive_lon flag for mask creator This flag should not be used in nearly all standar cases. --- mesh_tools/mesh_conversion_tools/mpas_mask_creator.cpp | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/mesh_tools/mesh_conversion_tools/mpas_mask_creator.cpp b/mesh_tools/mesh_conversion_tools/mpas_mask_creator.cpp index 2cb22cfab..aeb36f746 100644 --- a/mesh_tools/mesh_conversion_tools/mpas_mask_creator.cpp +++ b/mesh_tools/mesh_conversion_tools/mpas_mask_creator.cpp @@ -147,8 +147,12 @@ void print_usage() {/*{{{*/ cout << "\t\t\tthat will be used as seed points in a flood fill algorithim. This is useful when trying to remove isolated cells from a mesh." << endl; cout << "\t\t-f file.geojson: This argument pair defines a set of geojson features (regions, transects, or points)" << endl; cout << "\t\t\tthat will be converted into masks / lists." << endl; - cout << "\t\t--positive_lon: This argument causes the logitude range to be 0-360 degrees with the prime meridian at 0 degrees." << endl; - cout << "\t\t\tIf this flag is not set, the logitude range is -180-180 with 0 degrees being the prime meridian." << endl; + cout << "\t\t--positive_lon: It is unlikely that you want this argument. In rare cases when using a non-standard geojson" << endl; + cout << "\t\t\tfile where the logitude ranges from 0 to 360 degrees (with the prime meridian at 0 degrees), use this flag." << endl; + cout << "\t\t\tIf this flag is not set, the logitude range is -180-180 with 0 degrees being the prime meridian, which is the" << endl; + cout << "\t\t\tcase for standar geojson files including all features from the geometric_feature repo." << endl; + cout << "\t\t\tThe fact that longitudes in the input MPAS mesh range from 0 to 360 is not relevant to this flag," << endl; + cout << "\t\t\tas latitude and longitude are recomputed internally from Cartesian coordinates." << endl; cout << "\t\t\tWhether this flag is passed in or not, any longitudes written are in the 0-360 range." << endl; }/*}}}*/ From da0423195f8abe4e4c0fb9c2e8d59f3664cc1f07 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 4 Sep 2019 15:45:50 -0600 Subject: [PATCH 180/180] Add missing transect variables --- .../moc_southern_boundary_extractor.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py b/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py index 8a60d4c8f..99158622f 100755 --- a/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py +++ b/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py @@ -248,6 +248,15 @@ def addTransectsToMOC(mesh, mocMask, southernBoundaryEdges, mocMask['transectNames'] = mocMask.regionNames.rename( {'nRegions': 'nTransects'}) + mocMask['nTransectsInGroup'] = mocMask.nRegionsInGroup.rename( + {'nRegionGroups': 'nTransectGroups'}) + + mocMask['transectsInGroup'] = mocMask.regionsInGroup.rename( + {'nRegionGroups': 'nTransectGroups', 'maxRegionsInGroup': 'maxTransectsInGroup'}) + + mocMask['transectGroupNames'] = mocMask.regionGroupNames.rename( + {'nRegionGroups': 'nTransectGroups'}) + if __name__ == "__main__":