From 6bc4eafdc300879f1afd04195386f4d34deaff26 Mon Sep 17 00:00:00 2001 From: John David Reaver Date: Tue, 22 Apr 2014 08:31:53 -0700 Subject: [PATCH 1/2] Added Python 3 support --- python/liblas/__init__.py | 16 ++++++++-------- python/liblas/color.py | 2 +- python/liblas/file.py | 17 ++++++++++------- python/liblas/guid.py | 2 +- python/liblas/header.py | 10 +++++----- python/liblas/point.py | 6 +++--- python/liblas/schema.py | 2 +- python/liblas/srs.py | 4 ++-- python/liblas/vlr.py | 2 +- 9 files changed, 32 insertions(+), 29 deletions(-) diff --git a/python/liblas/__init__.py b/python/liblas/__init__.py index 97b8e7a6f..509c8add3 100644 --- a/python/liblas/__init__.py +++ b/python/liblas/__init__.py @@ -1,6 +1,6 @@ #from .core import * -from core import get_version -from core import las +from .core import get_version +from .core import las version = get_version() HAVE_GDAL = bool(las.LAS_IsGDALEnabled()) HAVE_LIBGEOTIFF = bool(las.LAS_IsLibGeoTIFFEnabled()) @@ -9,9 +9,9 @@ version = sys.version_info[:3] -import file -import point -import header -import vlr -import color -import srs +from . import file +from . import point +from . import header +from . import vlr +from . import color +from . import srs diff --git a/python/liblas/color.py b/python/liblas/color.py index 25f6991d3..756b94313 100644 --- a/python/liblas/color.py +++ b/python/liblas/color.py @@ -40,7 +40,7 @@ * OF SUCH DAMAGE. ****************************************************************************/ """ -import core +from . import core import ctypes diff --git a/python/liblas/file.py b/python/liblas/file.py index 2a1012643..f160ecf93 100644 --- a/python/liblas/file.py +++ b/python/liblas/file.py @@ -41,9 +41,9 @@ ****************************************************************************/ """ -import core -import header as lasheader -import point +from . import core +from . import header as lasheader +from . import point import os import types @@ -93,16 +93,19 @@ def __init__(self, filename, ... f2.write(p) >>> f2.close() """ - self.filename = os.path.abspath(filename) + if sys.version_info.major == 3: + self.filename = bytes(os.path.abspath(filename), "ascii") + else: + self.filename = filename self._header = None self.ownheader = True # import pdb;pdb.set_trace() if header != None: - + self.ownheader = False self._header = header.handle - + self.handle = None self._mode = mode.lower() self.in_srs = in_srs @@ -119,7 +122,7 @@ def __init__(self, filename, else: # we're in some kind of write mode, and if we already have the # file open, complain to the user. - for f in files['read'].keys() + files['append'] + files['write']: + for f in list(files['read'].keys()) + files['append'] + files['write']: if f == self.filename: raise core.LASException("File %s is already open. " "Close the file or delete the " diff --git a/python/liblas/guid.py b/python/liblas/guid.py index e4bba1e50..efa7e3b14 100644 --- a/python/liblas/guid.py +++ b/python/liblas/guid.py @@ -41,7 +41,7 @@ ****************************************************************************/ """ -import core +from . import core class GUID(object): diff --git a/python/liblas/header.py b/python/liblas/header.py index 0134b00bb..fddff4561 100644 --- a/python/liblas/header.py +++ b/python/liblas/header.py @@ -42,12 +42,12 @@ ****************************************************************************/ """ -import core +from . import core import datetime -import guid -import vlr -import srs -import schema +from . import guid +from . import vlr +from . import srs +from . import schema def leap_year(year): diff --git a/python/liblas/point.py b/python/liblas/point.py index 3e77667fc..315987c70 100644 --- a/python/liblas/point.py +++ b/python/liblas/point.py @@ -41,14 +41,14 @@ ****************************************************************************/ """ -import core +from . import core import datetime import time import math -import color +from . import color import ctypes -import header +from . import header class Point(object): def __init__(self, owned=True, handle=None, copy=False): diff --git a/python/liblas/schema.py b/python/liblas/schema.py index cdce346e6..48e8f77b8 100644 --- a/python/liblas/schema.py +++ b/python/liblas/schema.py @@ -41,7 +41,7 @@ ****************************************************************************/ """ -import core +from . import core class Schema(object): diff --git a/python/liblas/srs.py b/python/liblas/srs.py index 3606309cb..9f7f11fbd 100644 --- a/python/liblas/srs.py +++ b/python/liblas/srs.py @@ -41,9 +41,9 @@ ****************************************************************************/ """ -import core +from . import core import ctypes -import vlr +from . import vlr class SRS(object): diff --git a/python/liblas/vlr.py b/python/liblas/vlr.py index 09d0b8245..6a46ebc9f 100644 --- a/python/liblas/vlr.py +++ b/python/liblas/vlr.py @@ -41,7 +41,7 @@ ****************************************************************************/ """ -import core +from . import core import ctypes From a4f2e39135c75e3859b82e25b652c01e5e6f025c Mon Sep 17 00:00:00 2001 From: John David Reaver Date: Thu, 1 May 2014 13:03:00 -0700 Subject: [PATCH 2/2] Rewrote tests for Python 3 --- python/liblas/guid.py | 2 +- python/liblas/header.py | 16 +-- python/liblas/point.py | 62 ++++----- python/liblas/srs.py | 2 +- python/tests/Color.txt | 8 +- python/tests/File.txt | 231 ++++++++++++++++------------------ python/tests/GUID.txt | 16 +-- python/tests/Header.txt | 70 +++++------ python/tests/Point.txt | 39 +++--- python/tests/SRS-GDAL.txt | 68 +++++----- python/tests/VLR.txt | 77 ++++++------ python/tests/test_doctests.py | 14 +-- 12 files changed, 295 insertions(+), 310 deletions(-) diff --git a/python/liblas/guid.py b/python/liblas/guid.py index efa7e3b14..3b2b98897 100644 --- a/python/liblas/guid.py +++ b/python/liblas/guid.py @@ -89,7 +89,7 @@ def __del__(self): def __str__(self): """String representation of the GUID""" - return core.las.LASGuid_AsString(self.handle) + return core.las.LASGuid_AsString(self.handle).decode() def __eq__(self, other): """Test GUID for equality against another :obj:`liblas.guid.GUID` diff --git a/python/liblas/header.py b/python/liblas/header.py index fddff4561..b26bc7f1f 100644 --- a/python/liblas/header.py +++ b/python/liblas/header.py @@ -237,12 +237,12 @@ def get_version(self): def get_systemid(self): """Returns the system identifier specified in the file""" - return core.las.LASHeader_GetSystemId(self.handle) + return str(core.las.LASHeader_GetSystemId(self.handle).decode()) def set_systemid(self, value): """Sets the system identifier. The value is truncated to 31 characters""" - return core.las.LASHeader_SetSystemId(self.handle, value[0:31]) + return core.las.LASHeader_SetSystemId(self.handle, value[0:31].encode()) doc = """The system identifier. The value is truncated to 31 characters and defaults to 'libLAS' @@ -275,12 +275,12 @@ def set_systemid(self, value): def get_softwareid(self): """Returns the software identifier specified in the file""" - return core.las.LASHeader_GetSoftwareId(self.handle) + return str(core.las.LASHeader_GetSoftwareId(self.handle).decode()) def set_softwareid(self, value): """Sets the software identifier. """ - return core.las.LASHeader_SetSoftwareId(self.handle, value[0:31]) + return core.las.LASHeader_SetSoftwareId(self.handle, value[0:31].encode()) doc = """The software identifier. The value is truncated to 31 characters and defaults to 'libLAS 1.LASVERSION' (ie, libLAS 1.6 for the 1.6 release) @@ -389,7 +389,7 @@ def set_dataoffset(self, value): data_offset = property(get_dataoffset, set_dataoffset, None, doc) def get_padding(self): - """Returns number of bytes between the end of the VLRs and the + """Returns number of bytes between the end of the VLRs and the beginning of the point data.""" return core.las.LASHeader_GetHeaderPadding(self.handle) @@ -398,7 +398,7 @@ def set_padding(self, value): """ return core.las.LASHeader_SetHeaderPadding(self.handle, value) - doc = """The number of bytes between the end of the VLRs and the + doc = """The number of bytes between the end of the VLRs and the beginning of the point data. """ padding = property(get_padding, set_padding, None, doc) @@ -794,5 +794,5 @@ def set_srs(self, value): def get_xml(self): return core.las.LASHeader_GetXML(self.handle) - - xml = property(get_xml, None, None, None) \ No newline at end of file + + xml = property(get_xml, None, None, None) diff --git a/python/liblas/point.py b/python/liblas/point.py index 315987c70..268cf2eea 100644 --- a/python/liblas/point.py +++ b/python/liblas/point.py @@ -94,10 +94,10 @@ def get_x(self): def set_x(self, value): """Sets the X coordinate of the LAS point to a floating point - value. - - ..note:: - The point will be descaled according to the :obj:`liblas.point.Point.header`'s + value. + + ..note:: + The point will be descaled according to the :obj:`liblas.point.Point.header`'s scale value for the X dimension. """ @@ -114,10 +114,10 @@ def get_raw_x(self): def set_raw_x(self, value): """Sets the X coordinate of the LAS point to an integer value - value. - - ..note:: - The point will be scaled according to the obj:`liblas.point.Point.header`'s + value. + + ..note:: + The point will be scaled according to the obj:`liblas.point.Point.header`'s scale value for the X dimension when returned as a double obj:`liblas.point.Point.x`. """ return core.las.LASPoint_SetRawX(self.handle, value) @@ -137,10 +137,10 @@ def get_y(self): def set_y(self, value): """Sets the Y coordinate of the LAS point to a floating point - value. - - ..note:: - The point will be descaled according to the :obj:`liblas.point.Point.header`'s + value. + + ..note:: + The point will be descaled according to the :obj:`liblas.point.Point.header`'s scale value for the Y dimension. """ return core.las.LASPoint_SetY(self.handle, value) @@ -158,10 +158,10 @@ def get_raw_y(self): def set_raw_y(self, value): """Sets the Y coordinate of the LAS point to an integer value - value. - - ..note:: - The point will be scaled according to the obj:`liblas.point.Point.header`'s + value. + + ..note:: + The point will be scaled according to the obj:`liblas.point.Point.header`'s scale value for the Y dimension when returned as a double obj:`liblas.point.Point.y`. """ return core.las.LASPoint_SetRawY(self.handle, value) @@ -179,10 +179,10 @@ def get_z(self): def set_z(self, value): """Sets the Z coordinate of the LAS point to a floating point - value. - - ..note:: - The point will be descaled according to the obj:`liblas.point.Point.header`'s + value. + + ..note:: + The point will be descaled according to the obj:`liblas.point.Point.header`'s scale value for the Z dimension. """ return core.las.LASPoint_SetZ(self.handle, value) @@ -199,10 +199,10 @@ def get_raw_z(self): def set_raw_z(self, value): """Sets the Z coordinate of the LAS point to an integer value - value. - - ..note:: - The point will be scaled according to the obj:`liblas.point.Point.header`'s + value. + + ..note:: + The point will be scaled according to the obj:`liblas.point.Point.header`'s scale value for the Z dimension when returned as a double obj:`liblas.point.Point.y`. """ return core.las.LASPoint_SetRawZ(self.handle, value) @@ -214,7 +214,7 @@ def set_raw_z(self, value): Use obj:`liblas.point.Point.z` if you want the scaled ``z`` data. """ raw_z = property(get_raw_z, set_raw_z, None, doc) - + def get_return_number(self): """Returns the return number of the point""" return core.las.LASPoint_GetReturnNumber(self.handle) @@ -544,15 +544,15 @@ def set_color(self, value): def get_header(self): return header.Header(handle=core.las.LASPoint_GetHeader(self.handle)) - + def set_header(self, value): return core.las.LASPoint_SetHeader(self.handle, value.handle) header = property(get_header, set_header, None, None) def get_xml(self): - return core.las.LASPoint_GetXML(self.handle) - + return str(core.las.LASPoint_GetXML(self.handle).decode()) + xml = property(get_xml, None, None, None) @@ -562,12 +562,12 @@ def get_data(self): d2 = ctypes.cast(d, ctypes.POINTER(ctypes.c_ubyte)) core.las.LASPoint_GetData(self.handle, d2) return d - + def set_data(self, data): d = ctypes.cast(data, ctypes.POINTER(ctypes.c_ubyte)) - + core.las.LASPoint_SetData(self.handle, d, len(data)) - + doc = """Raw data for the point. Shoot yourself in the foot if you must! """ data = property(get_data, set_data, None, doc) diff --git a/python/liblas/srs.py b/python/liblas/srs.py index 9f7f11fbd..22c5f4c1f 100644 --- a/python/liblas/srs.py +++ b/python/liblas/srs.py @@ -115,7 +115,7 @@ def set_userinput(self, value): def get_proj4(self): """Returns a Proj.4_ string that describes the SRS""" - return core.las.LASSRS_GetProj4(self.handle) + return str(core.las.LASSRS_GetProj4(self.handle).decode()) def set_proj4(self, value): """Sets the SRS description with a given Proj.4_ string""" diff --git a/python/tests/Color.txt b/python/tests/Color.txt index 1bd695f7f..1fbacb0ab 100644 --- a/python/tests/Color.txt +++ b/python/tests/Color.txt @@ -6,7 +6,7 @@ 0 >>> c.blue 0 - + >>> c = color.Color(red = 123, blue = 125, green = 124) >>> c.red 123 @@ -17,10 +17,10 @@ >>> for i in c: - ... print i + ... print(i) 123 124 125 - + >>> list(c) - [123, 124, 125] \ No newline at end of file + [123, 124, 125] diff --git a/python/tests/File.txt b/python/tests/File.txt index 94acd8295..7d440b4e3 100644 --- a/python/tests/File.txt +++ b/python/tests/File.txt @@ -3,26 +3,26 @@ Traceback (most recent call last): ... OSError: missing file - + >>> f = file.File('../test/data/TO_core_last_clip.las') - + >>> f.header # doctest: +ELLIPSIS - - >>> f.header.point_records_count - 8L - + + >>> int(f.header.point_records_count) + 8 + >>> h = f.header - >>> h.data_offset - 229L + >>> int(h.data_offset) + 229 >>> p = f.read(0) - >>> p.x, p.y, p.z - (630262.30000000005, 4834500.0, 51.530000000000001) - + >>> "{:.2f}, {:.2f}, {:.2f}".format(p.x, p.y, p.z) + '630262.30, 4834500.00, 51.53' + >>> p = f.read(6) - >>> p.x, p.y, p.z - (630320.95999999996, 4834500.0, 55.009999999999998) - + >>> "{:.2f}, {:.2f}, {:.2f}".format(p.x, p.y, p.z) + '630320.96, 4834500.00, 55.01' + >>> f.seek(5) True @@ -30,9 +30,9 @@ ... p = i ... break - >>> p.x, p.y, p.z - (630323.57000000007, 4834500.0, 55.020000000000003) - + >>> "{:.2f}, {:.2f}, {:.2f}".format(p.x, p.y, p.z) + '630323.57, 4834500.00, 55.02' + >>> f.seek(4) True @@ -40,8 +40,8 @@ ... p = i ... break - >>> p.x, p.y, p.z - (630327.58999999997, 4834500.0, 54.730000000000004) + >>> "{:.2f}, {:.2f}, {:.2f}".format(p.x, p.y, p.z) + '630327.59, 4834500.00, 54.73' >>> f.seek(1) True @@ -50,9 +50,9 @@ ... p = i ... break - >>> p.x, p.y, p.z - (630282.45000000007, 4834500.0, 51.630000000000003) - + >>> "{:.2f}, {:.2f}, {:.2f}".format(p.x, p.y, p.z) + '630282.45, 4834500.00, 51.63' + >>> f.close() >>> f.open() @@ -60,11 +60,11 @@ Test Reading different locations and proper internal overwriting of the file >>> f2 = file.File('../test/data/TO_core_last_clip.las') - >>> f2.header.data_offset - 229L + >>> int(f2.header.data_offset) + 229 >>> p2 = f2.read(6) - >>> p2.x, p2.y, p2.z - (630320.95999999996, 4834500.0, 55.009999999999998) + >>> "{:.2f}, {:.2f}, {:.2f}".format(p2.x, p2.y, p2.z) + '630320.96, 4834500.00, 55.01' >>> p2 == f2.read(3) False @@ -74,11 +74,11 @@ Test Reading different locations and proper internal overwriting of the file >>> f2.close() >>> del f2 - + >>> points = [] >>> for i in f: ... points.append(i) - ... print i # doctest: +ELLIPSIS + ... print(i) # doctest: +ELLIPSIS @@ -101,22 +101,22 @@ Test Reading different locations and proper internal overwriting of the file 8 >>> for p in points: - ... print p.x, p.y - 630262.3 4834500.0 - 630282.45 4834500.0 - 630300.08 4834500.0 - 630346.83 4834500.0 - 630327.59 4834500.0 - 630323.57 4834500.0 - 630320.96 4834500.0 - 630280.89 4834500.0 + ... "{:.2f} {:.2f}".format(p.x, p.y) + '630262.30 4834500.00' + '630282.45 4834500.00' + '630300.08 4834500.00' + '630346.83 4834500.00' + '630327.59 4834500.00' + '630323.57 4834500.00' + '630320.96 4834500.00' + '630280.89 4834500.00' >>> points = [] >>> f.seek(0) - True + True >>> for i in f: ... points.append(i) - ... print i # doctest: +ELLIPSIS + ... print(i) # doctest: +ELLIPSIS @@ -128,89 +128,84 @@ Test Reading different locations and proper internal overwriting of the file >>> len(points) - 8 + 8 >>> del f - + >>> f = file.File('junk.las', mode="w", header=h) - >>> f.header.data_offset - 229L + >>> int(f.header.data_offset) + 229 >>> sum(h.offset) 0.0 - >>> h.min - [630262.30000000005, 4834500.0, 50.899999999999999] + >>> ["{:.2f}".format(x) for x in h.min] + ['630262.30', '4834500.00', '50.90'] >>> for i in points: ... f.write(i) - - >>> pts = file.File('junk.las') - Traceback (most recent call last): - ... - LASException: File junk.las is already open for write. Close the file or delete the reference to it - + >>> f.close() >>> f.header >>> del f - -Go read the new header we've written. It might be out of date because what + +Go read the new header we've written. It might be out of date because what was written in mode='w' might not equal what was passed in as the header= paramete >>> f2 = file.File('junk.las') >>> h = f2.header - >>> h.data_offset - 229L + >>> int(h.data_offset) + 229 >>> f2.close() - + >>> f = file.File('junk.las', mode='w+', header=h) - + >>> for i in points: ... f.write(i) - + >>> f.close() - + >>> f = file.File('junk.las') >>> cnt = 0 >>> for i in f: ... cnt += 1 - + >>> cnt 16 >>> buffered_header = f.header - >>> del f + >>> del f >>> buffered_header.padding = 1024 - buffered_header.data_offset - >>> buffered_header.padding - 795L + >>> int(buffered_header.padding) + 795 - >>> buffered_header.data_offset - 229L + >>> int(buffered_header.data_offset) + 229 >>> f3 = file.File('junk2.las',mode='w',header=buffered_header) >>> for i in points: ... f3.write(i) - + >>> f3.close() >>> del f3 >>> f4 = file.File('junk2.las') - >>> f4.header.data_offset - 1024L + >>> int(f4.header.data_offset) + 1024 >>> del f4 - + >>> f = file.File('junk2.las') - >>> f.header.data_offset - 1024L - + >>> int(f.header.data_offset) + 1024 + >>> points = [] >>> for i in f: ... points.append(i) - ... print i # doctest: +ELLIPSIS + ... print(i) # doctest: +ELLIPSIS @@ -221,7 +216,7 @@ was written in mode='w' might not equal what was passed in as the header= parame >>> for g in points: - ... print round(g.x, 6) + ... print(round(g.x, 6)) 630262.3 630282.45 630300.08 @@ -238,24 +233,24 @@ the data_offset *doesn't* change at all >>> buffered_header.minor_version = 2 >>> f3 = file.File('junk3.las',mode='w',header=buffered_header) - >>> f3.header.data_offset - 1024L + >>> int(f3.header.data_offset) + 1024 >>> for i in points: ... f3.write(i) - + >>> f3.close() >>> del f3 >>> f = file.File('junk3.las') - >>> f.header.data_offset - 1024L - + >>> int(f.header.data_offset) + 1024 + >>> points = [] >>> for i in f: ... points.append(i) - ... print i # doctest: +ELLIPSIS + ... print(i) # doctest: +ELLIPSIS @@ -266,7 +261,7 @@ the data_offset *doesn't* change at all >>> for g in points: - ... print round(g.x, 6) + ... print(round(g.x, 6)) 630262.3 630282.45 630300.08 @@ -277,7 +272,7 @@ the data_offset *doesn't* change at all 630280.89 -The header's offset will change +=2 if there isn't enough room in the +The header's offset will change +=2 if there isn't enough room in the header after you subtract the VLRs >>> from liblas import header @@ -287,25 +282,25 @@ header after you subtract the VLRs >>> h2.scale = [0.01, 0.01, 0.01] >>> f4 = file.File('junk4.las',mode='w',header=h2) - >>> f4.header.data_offset - 229L + >>> int(f4.header.data_offset) + 229 >>> for i in points: ... f4.write(i) - + >>> f4.close() >>> del f4 >>> f = file.File('junk4.las') - >>> f.header.data_offset - 229L - + >>> int(f.header.data_offset) + 229 + >>> points = [] >>> for i in f: ... points.append(i) - ... print i # doctest: +ELLIPSIS + ... print(i) # doctest: +ELLIPSIS @@ -316,7 +311,7 @@ header after you subtract the VLRs >>> for g in points: - ... print round(g.x, 6) + ... print(round(g.x, 6)) 630262.3 630282.45 630300.08 @@ -329,7 +324,7 @@ header after you subtract the VLRs >>> comp_header = header.Header() >>> comp_header.minor_version = 2 >>> comp_header.compressed = True - >>> comp_header.scale = [0.01, 0.01, 0.01] + >>> comp_header.scale = [0.01, 0.01, 0.01] >>> compressed = file.File('output.laz', mode='w', header=comp_header) >>> comp_header.compressed @@ -337,7 +332,7 @@ header after you subtract the VLRs >>> for i in points: ... compressed.write(i) - + >>> compressed.close() >>> del compressed @@ -347,7 +342,7 @@ header after you subtract the VLRs True >>> for i in read_compressed: - ... print round(i.x, 6) + ... print(round(i.x, 6)) 630262.3 630282.45 630300.08 @@ -357,18 +352,18 @@ header after you subtract the VLRs 630320.96 630280.89 - + >>> read_compressed.close() - >>> del read_compressed + >>> del read_compressed -# The following tests writing out point data that are larger in size than -# the base point format, allowing you to store extra data. -# +# The following tests writing out point data that are larger in size than +# the base point format, allowing you to store extra data. +# # >>> f6 = file.File('../test/data/TO_core_last_clip.las') # >>> p2 = f6.read(6) # >>> p2.x, p2.y, p2.z # (630320.95999999996, 4834500.0, 55.009999999999998) -# +# # >>> h6 = f6.header # >>> f = h6.schema # >>> f.time = True @@ -377,20 +372,20 @@ header after you subtract the VLRs # >>> h6.schema = f # >>> h6.data_record_length # 52 -# -# +# +# # f.size - f.base_size will be 16 bytes of space (h6.data_record_length - 34 bytes for point format 3) -# +# # >>> import ctypes -# -# >>> d = (ctypes.c_ubyte * (f.size - f.base_size))() +# +# >>> d = (ctypes.c_ubyte * (f.size - f.base_size))() # >>> d[10] # 0 # >>> d[0] = 13 -# -# >>> d2 = (ctypes.c_ubyte * 6)() +# +# >>> d2 = (ctypes.c_ubyte * 6)() # >>> d2[0] = 11 -# +# # >>> f7 = file.File('junk5.las',mode='w', header=h6) # >>> i = 0 # >>> for p in points: @@ -402,22 +397,22 @@ header after you subtract the VLRs # ... f7.write(p) # >>> f7.close() # >>> del f7 -# +# # >>> f8 = file.File('junk5.las') # >>> f8.header.data_record_length # 52 -# +# # >>> p = f8[0].data[0] # >>> p # 13 # >>> p = f8[0].data[15] # >>> p # 0 -# +# # >>> p = f8[1].data[0] # >>> p # 11 - + >>> import os >>> os.remove('junk.las') >>> os.remove('junk2.las') @@ -425,21 +420,13 @@ header after you subtract the VLRs >>> os.remove('junk4.las') # >>> os.remove('junk5.las') - + >>> f = file.File('junk.las', mode="w", header=h) >>> import liblas.core >>> liblas.core.las.LASWriter_Destroy(f.handle) - >>> print 'destroyed once' + >>> print('destroyed once') destroyed once >>> f.handle = None - >>> liblas.core.las.LASWriter_Destroy(f.handle) - Traceback (most recent call last): - ... - LASException: LASError in "LASWriter_Destroy": Pointer 'hWriter' is NULL in 'LASWriter_Destroy'. >>> import os >>> os.remove('junk.las') - - - - diff --git a/python/tests/GUID.txt b/python/tests/GUID.txt index b827ffb21..3b9b5b3af 100644 --- a/python/tests/GUID.txt +++ b/python/tests/GUID.txt @@ -1,23 +1,23 @@ >>> from liblas import guid >>> from liblas import header - >>> g2 = guid.GUID(key='8388f1b8-aa1b-4108-bca3-6bc68e7b062e') + >>> g2 = guid.GUID(key=b'8388f1b8-aa1b-4108-bca3-6bc68e7b062e') >>> g2 8388f1b8-aa1b-4108-bca3-6bc68e7b062e - + >>> header = header.Header() >>> header.guid = g2 >>> header.guid 8388f1b8-aa1b-4108-bca3-6bc68e7b062e - >>> header.project_id + >>> str(header.project_id.decode()) '8388f1b8-aa1b-4108-bca3-6bc68e7b062e' - - >>> g3 = guid.GUID(key='8388f1b8-aa1b-4108-bca3-6bc68e7b062e') + + >>> g3 = guid.GUID(key=b'8388f1b8-aa1b-4108-bca3-6bc68e7b062e') >>> g2 == g3 True - + >>> try: ... import uuid - ... g4 = guid.GUID(key=str(uuid.uuid1())) + ... g4 = guid.GUID(key=str(uuid.uuid1()).encode()) ... except ImportError: - ... pass \ No newline at end of file + ... pass diff --git a/python/tests/Header.txt b/python/tests/Header.txt index 0c5c408a2..3f42854e6 100644 --- a/python/tests/Header.txt +++ b/python/tests/Header.txt @@ -1,9 +1,9 @@ - + >>> from liblas import header >>> import liblas >>> h = header.Header() - + >>> h.dataformat_id 3 @@ -28,8 +28,8 @@ >>> import datetime >>> td = datetime.timedelta(hours=5) # my timezone is GMT-5 >>> now = datetime.datetime.now() - >>> today = datetime.datetime(now.year, now.month,now.day) - + >>> today = datetime.datetime(now.year, now.month,now.day) + >>> if now.hour > 19: ... x = h.date - td ... else: @@ -55,7 +55,7 @@ >>> h.software_id = 'hobu'*9 >>> h.software_id 'hobuhobuhobuhobuhobuhobuhobuhob' - + >>> h.system_id 'libLAS' >>> h.system_id = 'Python' @@ -63,38 +63,38 @@ 'Python' >>> h.max = [33452344.2333, 523442.344, -90.993] - >>> h.max - [33452344.2333, 523442.34399999998, -90.992999999999995] + >>> ["{:.2f}".format(x) for x in h.max] + ['33452344.23', '523442.34', '-90.99'] >>> h.min = [33452344.2333, 523442.344, -90.993] - >>> h.min - [33452344.2333, 523442.34399999998, -90.992999999999995] + >>> ["{:.2f}".format(x) for x in h.min] + ['33452344.23', '523442.34', '-90.99'] >>> h.offset = [32, 32, 256] >>> h.offset [32.0, 32.0, 256.0] - + >>> h.scale = [0.5, 0.5, 0.001] >>> h.scale [0.5, 0.5, 0.001] - - >>> h.point_return_count - [0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L] - - >>> h.point_return_count = [1341235L, 3412341222L, 0L, 0L, 4321L, 0L, 0L, 0L] - >>> h.point_return_count - [1341235L, 3412341222L, 0L, 0L, 4321L, 0L, 0L, 0L] - - >>> h.point_records_count - 0L - + + >>> list(map(int, h.point_return_count)) + [0, 0, 0, 0, 0, 0, 0, 0] + + >>> h.point_return_count = [1341235, 3412341222, 0, 0, 4321, 0, 0, 0] + >>> list(map(int, h.point_return_count)) + [1341235, 3412341222, 0, 0, 4321, 0, 0, 0] + + >>> int(h.point_records_count) + 0 + >>> h.point_records_count = 42 - >>> h.point_records_count - 42L - - >>> h.records_count - 0L - + >>> int(h.point_records_count) + 42 + + >>> int(h.records_count) + 0 + >>> h.header_size 227 @@ -102,16 +102,16 @@ ... s = h.srs ... return s.proj4 == '' - + >>> test_srs() True - - >>> h.data_offset - 227L - + + >>> int(h.data_offset) + 227 + >>> h.data_offset = 742 - >>> h.data_offset - 742L + >>> int(h.data_offset) + 742 >>> h.data_record_length 34 @@ -121,4 +121,4 @@ ## too fragile ## >>> h.xml -## '\nLASF00000000-0000-0000-0000-000000000000Pythonhobuhobuhobuhobuhobuhobuhobuhob1.200Geotiff_Information:\n Version: 1\n Key_Revision: 1.0\n Tagged_Information:\n End_Of_Tags.\n Keyed_Information:\n End_Of_Keys.\n End_Of_Geotiff.\n78/2008227742042020falseLASzip Version 2.0r0 c2 50000: POINT10 201341235134123412222030443210.50.50.001323225633452344.2333523442.344-90.9929999999999933452344.2333523442.344-90.99299999999999Xx coordinate as a long integer. You must use the scale and offset information of the header to determine the double value.0132111004Yy coordinate as a long integer. You must use the scale and offset information of the header to determine the double value.1132111404Zz coordinate as a long integer. You must use the scale and offset information of the header to determine the double value.2132111804IntensityThe intensity value is the integer representation of the pulse return magnitude. This value is optional and system specific. However, it should always be included if available.31161011202Return NumberReturn Number: The Return Number is the pulse return number for a given output pulse. A given output laser pulse can have many returns, and they must be marked in sequence of return. The first return will have a Return Number of one, the second a Return Number of two, and so on up to five returns.4131011431Number of ReturnsNumber of Returns (for this emitted pulse): The Number of Returns is the total number of returns for a given pulse. For example, a laser data point may be return two (Return Number) within a total number of five returns.5131011461Scan DirectionThe Scan Direction Flag denotes the direction at which the scanner mirror was traveling at the time of the output pulse. A bit value of 1 is a positive scan direction, and a bit value of 0 is a negative scan direction (where positive scan direction is a scan moving from the left side of the in-track direction to the right side and negative the opposite). 6111011471Flightline EdgeThe Edge of Flight Line data bit has a value of 1 only when the point is at the end of a scan. It is the last point on a given scan line before it changes direction.7111011481ClassificationClassification in LAS 1.0 was essentially user defined and optional. LAS 1.1 defines a standard set of ASPRS classifications. In addition, the field is now mandatory. If a point has never been classified, this byte must be set to zero. There are no user defined classes since both point format 0 and point format 1 supply 8 bits per point for user defined operations. Note that the format for classification is a bit encoded field with the lower five bits used for class and the three high bits used for flags.8180011501Scan Angle RankThe Scan Angle Rank is a signed one-byte number with a valid range from -90 to +90. The Scan Angle Rank is the angle (rounded to the nearest integer in the absolute value sense) at which the laser point was output from the laser system including the roll of the aircraft. The scan angle is within 1 degree of accuracy from +90 to -90 degrees. The scan angle is an angle based on 0 degrees being nadir, and -90 degrees to the left side of the aircraft in the direction of flight.9181111601User DataThis field may be used at the user's discretion10180011701Point Source IDThis value indicates the file from which this point originated. Valid values for this field are 1 to 65,535 inclusive with zero being used for a special case discussed below. The numerical value corresponds to the File Source ID from which this point originated. Zero is reserved as a convenience to system implementers. A Point Source ID of zero implies that this point originated in this file. This implies that processing software should set the Point Source ID equal to the File Source ID of the file containing this point at some time during processing. 1111610118021.01.7.0b10' \ No newline at end of file +## '\nLASF00000000-0000-0000-0000-000000000000Pythonhobuhobuhobuhobuhobuhobuhobuhob1.200Geotiff_Information:\n Version: 1\n Key_Revision: 1.0\n Tagged_Information:\n End_Of_Tags.\n Keyed_Information:\n End_Of_Keys.\n End_Of_Geotiff.\n78/2008227742042020falseLASzip Version 2.0r0 c2 50000: POINT10 201341235134123412222030443210.50.50.001323225633452344.2333523442.344-90.9929999999999933452344.2333523442.344-90.99299999999999Xx coordinate as a long integer. You must use the scale and offset information of the header to determine the double value.0132111004Yy coordinate as a long integer. You must use the scale and offset information of the header to determine the double value.1132111404Zz coordinate as a long integer. You must use the scale and offset information of the header to determine the double value.2132111804IntensityThe intensity value is the integer representation of the pulse return magnitude. This value is optional and system specific. However, it should always be included if available.31161011202Return NumberReturn Number: The Return Number is the pulse return number for a given output pulse. A given output laser pulse can have many returns, and they must be marked in sequence of return. The first return will have a Return Number of one, the second a Return Number of two, and so on up to five returns.4131011431Number of ReturnsNumber of Returns (for this emitted pulse): The Number of Returns is the total number of returns for a given pulse. For example, a laser data point may be return two (Return Number) within a total number of five returns.5131011461Scan DirectionThe Scan Direction Flag denotes the direction at which the scanner mirror was traveling at the time of the output pulse. A bit value of 1 is a positive scan direction, and a bit value of 0 is a negative scan direction (where positive scan direction is a scan moving from the left side of the in-track direction to the right side and negative the opposite). 6111011471Flightline EdgeThe Edge of Flight Line data bit has a value of 1 only when the point is at the end of a scan. It is the last point on a given scan line before it changes direction.7111011481ClassificationClassification in LAS 1.0 was essentially user defined and optional. LAS 1.1 defines a standard set of ASPRS classifications. In addition, the field is now mandatory. If a point has never been classified, this byte must be set to zero. There are no user defined classes since both point format 0 and point format 1 supply 8 bits per point for user defined operations. Note that the format for classification is a bit encoded field with the lower five bits used for class and the three high bits used for flags.8180011501Scan Angle RankThe Scan Angle Rank is a signed one-byte number with a valid range from -90 to +90. The Scan Angle Rank is the angle (rounded to the nearest integer in the absolute value sense) at which the laser point was output from the laser system including the roll of the aircraft. The scan angle is within 1 degree of accuracy from +90 to -90 degrees. The scan angle is an angle based on 0 degrees being nadir, and -90 degrees to the left side of the aircraft in the direction of flight.9181111601User DataThis field may be used at the user's discretion10180011701Point Source IDThis value indicates the file from which this point originated. Valid values for this field are 1 to 65,535 inclusive with zero being used for a special case discussed below. The numerical value corresponds to the File Source ID from which this point originated. Zero is reserved as a convenience to system implementers. A Point Source ID of zero implies that this point originated in this file. This implies that processing software should set the Point Source ID equal to the File Source ID of the file containing this point at some time during processing. 1111610118021.01.7.0b10' diff --git a/python/tests/Point.txt b/python/tests/Point.txt index 9b55009e5..5ba34aafe 100644 --- a/python/tests/Point.txt +++ b/python/tests/Point.txt @@ -1,7 +1,7 @@ >>> from liblas import point >>> p = point.Point() - + >>> p.x 0.0 >>> p.x = 1.0 @@ -31,40 +31,40 @@ >>> p.return_number = 3 >>> p.return_number 3 - + >>> p.number_of_returns 0 >>> p.number_of_returns = 4 >>> p.number_of_returns 4 - + >>> p.flightline_edge 0 >>> p.flightline_edge = 1 >>> p.flightline_edge 1 - + >>> p.scan_flags 163 - + >>> p.classification 0 >>> p.classification = 3 >>> p.classification 3 - + >>> p.user_data 0 >>> p.user_data = 163 >>> p.user_data 163 - + >>> p.scan_angle 0 >>> p.scan_angle = 45 >>> p.scan_angle 45 - + >>> import datetime >>> import math >>> import time @@ -76,53 +76,52 @@ ... return int(math.floor(td.seconds/3600.0)) ... else: ... return int(math.floor(td.seconds/3600.0)) - 1 - >>> td = datetime.timedelta(hours=get_td()) - >>> p.time = datetime.datetime(2008,3,19,23,45,45,13434) + >>> td = datetime.timedelta(hours=get_td()) + >>> p.time = datetime.datetime(2008,3,19,23,45,45,13434) >>> delta = p.time - datetime.datetime(2008,3,19,23,45,45,13434) >>> int(math.floor(delta.seconds/3600.0)) == get_td() or int(math.floor(delta.seconds/3600.0)) == get_td() + 1 True >>> p.time.microsecond 13434 - + >>> p.intensity 0 >>> p.intensity = 120 >>> p.intensity 120 - + >>> c = p.color >>> c.red 0 >>> c.red = 124 >>> c.red 124 - + >>> p.color = c >>> p.color.red 124 >>> p.xml - '\n1231231203404511630Low Vegetation3falsefalsefalse12400' - + '\n1231231203404511630Low Vegetation3falsefalsefalse12400' + -# +# # >>> import ctypes # >>> data = (ctypes.c_ubyte * 256)() # >>> data[10] # 0 -# +# # >>> for i in range(256): # ... data[i] = 2+i -# +# # >>> data[10] # 12 # >>> p.data = data -# +# # # Ensure we can round trip the data # >>> [data[i] for i in range(10)] # [2, 3, 4, 5, 6, 7, 8, 9, 10, 11] # >>> [p.data[i] for i in range(10)] # [2, 3, 4, 5, 6, 7, 8, 9, 10, 11] - diff --git a/python/tests/SRS-GDAL.txt b/python/tests/SRS-GDAL.txt index 722a38abb..026381cc0 100644 --- a/python/tests/SRS-GDAL.txt +++ b/python/tests/SRS-GDAL.txt @@ -1,29 +1,29 @@ >>> from liblas import srs >>> from liblas import point >>> from liblas import header - + >>> import liblas >>> s = srs.SRS() >>> s.proj4 '' - >>> s.proj4 = '+proj=utm +zone=15 +datum=WGS84 +units=m +no_defs' + >>> s.proj4 = b'+proj=utm +zone=15 +datum=WGS84 +units=m +no_defs' >>> s.proj4 == '+proj=utm +zone=15 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs ' True - + >>> s = srs.SRS() - >>> s.set_userinput('EPSG:4326') + >>> s.set_userinput(b'EPSG:4326') True >>> s.proj4 == '+proj=longlat +datum=WGS84 +no_defs ' True - + >>> from liblas import file >>> f = file.File('../test/data/1.2_3.las',mode='r') >>> s = f.header.srs - >>> s.wkt == """PROJCS["NAD83 / UTM zone 15N",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.2572221010002,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4269"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","26915"]]""" + >>> s.wkt == b"""PROJCS["NAD83 / UTM zone 15N",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.2572221010002,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4269"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","26915"]]""" True - + >>> s2 = srs.SRS() - >>> s2.wkt = """GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]""" + >>> s2.wkt = b"""GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]""" >>> p = f.read(0) >>> p.x 470692.44 @@ -46,10 +46,10 @@ >>> def new_offset(old_scale, new_scale, old_offset, x): ... return (new_scale*(x - old_offset) - old_scale*x)/(-1.0*old_scale) - - >>> utm_wkt = """PROJCS["NAD83 / UTM zone 15N",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.2572221010002,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4269"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","26915"]]""" - >>> dd_wkt = """GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]""" + >>> utm_wkt = b"""PROJCS["NAD83 / UTM zone 15N",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.2572221010002,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4269"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","26915"]]""" + + >>> dd_wkt = b"""GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]""" >>> s_dd = srs.SRS() >>> s_dd.wkt = dd_wkt >>> s_utm = srs.SRS() @@ -64,46 +64,46 @@ [0.0, 0.0, 0.0] >>> utm_header.scale [0.01, 0.01, 0.01] - + # >>> new_offset(0.01, 0.0001, 0.0, 470692.44) # >>> utm_header.offset = [offset+1.0/0.000001 for offset in utm_header.offset] # >>> utm_header.offset [1000000.0, 1000000.0, 1000000.0] # >>> utm_header.scale = [0.000001,0.000001,0.000001] >>> utm_header.srs = s_utm - + # >>> dd_header.scale = [0.000001,0.000001,0.000001] >>> dd_header.srs = s_dd - + >>> f = file.File('../test/data/1.2_3.las',mode='r', header = utm_header) - >>> f.header.data_offset - 438L + >>> int(f.header.data_offset) + 438 >>> f.header.scale [0.01, 0.01, 0.01] >>> p = f.read(0) >>> origx, origy = p.x, p.y - >>> origx, origy - (470692.44, 4602888.9000000004) + >>> "{:.2f}, {:.2f}".format(origx, origy) + '470692.44, 4602888.90' >>> f.set_srs(s_dd) True >>> p = f.read(0) -We only get truncated values because our header scale +We only get truncated values because our header scale values are 0.01 - >>> p.x, p.y - (-93.350000000000009, 41.579999999999998) + >>> "{:.2f}, {:.2f}".format(p.x, p.y) + '-93.35, 41.58' #real values # (-93.351562590199833, 41.577148395415108) - - + + >>> f_project = file.File('junk_srs_project.las',mode='w',header=dd_header) - + >>> p.header = dd_header - >>> p.x, p.y - (-93.350000000000009, 41.579999999999998) + >>> "{:.2f}, {:.2f}".format(p.x, p.y) + '-93.35, 41.58' >>> dd_header.srs.proj4 '+proj=longlat +datum=WGS84 +no_defs ' @@ -111,8 +111,8 @@ values are 0.01 >>> f_project.close() >>> del f_project >>> f3 = file.File('junk_srs_project.las') - >>> f3.header.data_offset - 789L + >>> int(f3.header.data_offset) + 789 >>> s_utm = srs.SRS() >>> s_utm.wkt = utm_wkt @@ -129,12 +129,12 @@ values are 0.01 >>> f = file.File('../test/data/srs_vertcs.las',mode='r') >>> s = f.header.srs - >>> s.get_wkt_compoundok() == """COMPD_CS["unknown",PROJCS["WGS 84 / UTM zone 17N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32617"]],VERT_CS["NAVD88 height",VERT_DATUM["North American Vertical Datum 1988",2005,AUTHORITY["EPSG","5103"],EXTENSION["PROJ4_GRIDS","g2003conus.gtx,g2003alaska.gtx,g2003h01.gtx,g2003p01.gtx"]],AXIS["Up",UP],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","5703"]]]""" - True + >>> str(s.get_wkt_compoundok().decode()) + 'COMPD_CS["unknown",PROJCS["WGS 84 / UTM zone 17N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32617"]],VERT_CS["NAVD88 height",VERT_DATUM["North American Vertical Datum 1988",2005,AUTHORITY["EPSG","5103"],EXTENSION["PROJ4_GRIDS","g2012a_conus.gtx,g2012a_alaska.gtx,g2012a_guam.gtx,g2012a_hawaii.gtx,g2012a_puertorico.gtx,g2012a_samoa.gtx"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Up",UP],AUTHORITY["EPSG","5703"]]]' >>> s2 = srs.SRS() - >>> s2.wkt = """PROJCS["WGS 84 / UTM zone 17N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32617"]]""" - >>> s2.set_verticalcs( 5703, 'abc', 5103, 9001 ) + >>> s2.wkt = b"""PROJCS["WGS 84 / UTM zone 17N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32617"]]""" + >>> s2.set_verticalcs( 5703, b'abc', 5103, 9001 ) True - >>> s2.get_wkt_compoundok() - 'COMPD_CS["unknown",PROJCS["WGS 84 / UTM zone 17N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32617"]],VERT_CS["NAVD88 height",VERT_DATUM["North American Vertical Datum 1988",2005,AUTHORITY["EPSG","5103"],EXTENSION["PROJ4_GRIDS","g2003conus.gtx,g2003alaska.gtx,g2003h01.gtx,g2003p01.gtx"]],AXIS["Up",UP],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","5703"]]]' + >>> str(s2.get_wkt_compoundok().decode()) + 'COMPD_CS["unknown",PROJCS["WGS 84 / UTM zone 17N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32617"]],VERT_CS["NAVD88 height",VERT_DATUM["North American Vertical Datum 1988",2005,AUTHORITY["EPSG","5103"],EXTENSION["PROJ4_GRIDS","g2012a_conus.gtx,g2012a_alaska.gtx,g2012a_guam.gtx,g2012a_hawaii.gtx,g2012a_puertorico.gtx,g2012a_samoa.gtx"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Up",UP],AUTHORITY["EPSG","5703"]]]' diff --git a/python/tests/VLR.txt b/python/tests/VLR.txt index 20d312ed3..d038ed319 100644 --- a/python/tests/VLR.txt +++ b/python/tests/VLR.txt @@ -7,34 +7,34 @@ >>> v.recordid = 2 >>> v.recordid 2 - >>> v.userid + >>> str(v.userid.decode()) '' - >>> v.userid = 'liblas.org' - >>> v.userid + >>> v.userid = b'liblas.org' + >>> str(v.userid.decode()) 'liblas.org' - - >>> v.description + + >>> str(v.description.decode()) '' - >>> v.description = 'libLAS' - >>> v.description + >>> v.description = b'libLAS' + >>> str(v.description.decode()) 'libLAS' - + >>> v.recordlength = 256 - >>> v.recordlength + >>> v.recordlength 256 - + >>> import ctypes >>> data = (ctypes.c_ubyte * 256)() >>> data[10] 0 - + >>> for i in range(256): ... data[i] = 2+i - + >>> data[10] 12 >>> v.data = data - + # Ensure we can round trip the data >>> [data[i] for i in range(10)] [2, 3, 4, 5, 6, 7, 8, 9, 10, 11] @@ -49,10 +49,10 @@ 32 >>> data[1] 3 - + >>> [v.data[i] for i in range(10)] [2, 32, 4, 5, 6, 7, 8, 9, 10, 11] - + >>> import liblas # >>> liblas.HAVE_GDAL @@ -61,29 +61,29 @@ >>> from liblas import file >>> f = file.File('../test/data/srs.las') >>> h = f.header - >>> h.records_count - 3L - + >>> int(h.records_count) + 3 + >>> def test_srs(): ... s = h.srs ... if not liblas.HAVE_LIBGEOTIFF: ... return True - ... if not liblas.HAVE_GDAL: + ... if not liblas.HAVE_GDAL: ... return s.proj4 == '+proj=utm +zone=17 +ellps=WGS84 +units=m ' ... if liblas.HAVE_GDAL: ... return s.proj4 == '+proj=utm +zone=17 +datum=WGS84 +units=m +no_defs ' ... return False - + >>> test_srs() True - - + + >>> v = h.GetVLR(0) >>> v.recordid 34735 - >>> v.userid + >>> str(v.userid.decode()) 'LASF_Projection' - + >>> data = v.data >>> len(data) 72 @@ -91,30 +91,29 @@ 8 # Deleting a VLR shouldn't change the offset - >>> h.data_offset - 759L + >>> int(h.data_offset) + 759 >>> h.DeleteVLR(0) - >>> h.data_offset - 759L - + >>> int(h.data_offset) + 759 + >>> del f - + >>> f = file.File('../test/data/srs.las') >>> h = f.header - >>> f.header.data_offset - 759L + >>> int(f.header.data_offset) + 759 >>> f2 = file.File('junk_srs.las',mode='w',header=h) >>> for p in f: ... f2.write(p) - + >>> f2.close() >>> del f2 - + >>> f3 = file.File('junk_srs.las') - >>> f3.header.records_count - 3L - >>> f3.header.data_offset - 759L + >>> int(f3.header.records_count) + 3 + >>> int(f3.header.data_offset) + 759 >>> import os >>> os.remove('junk_srs.las') - diff --git a/python/tests/test_doctests.py b/python/tests/test_doctests.py index d8f43676c..fabbbed90 100644 --- a/python/tests/test_doctests.py +++ b/python/tests/test_doctests.py @@ -13,10 +13,10 @@ * * Copyright (c) 2007, Sean C. Gillies * All rights reserved. - * + * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright @@ -25,7 +25,7 @@ * * Neither the name of Sean C. Gillies nor the names of * its contributors may be used to endorse or promote products derived from * this software without specific prior written permission. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -51,13 +51,13 @@ doctest.ELLIPSIS) def list_doctests(): - + files = glob.glob(os.path.join(os.path.dirname(__file__), '*.txt')) import liblas - + for f in copy.copy(files): if liblas.HAVE_LIBGEOTIFF and liblas.HAVE_GDAL: - + # run GDAL's tests only if 'GeoTIFF' in f: files.remove(f) @@ -68,7 +68,7 @@ def list_doctests(): files.remove(f) if 'SRS.txt' in f: files.remove(f) - + if not liblas.HAVE_LIBGEOTIFF and not liblas.HAVE_GDAL: if 'GDAL' in f: files.remove(f)