diff --git a/.editorconfig b/.editorconfig index f2a3aa4..e40b3f8 100644 --- a/.editorconfig +++ b/.editorconfig @@ -5,8 +5,4 @@ indent_style = space indent_size = 4 trim_trailing_whitespace = true insert_final_newline = true -dotnet_style_predefined_type_for_locals_parameters_members = true:error -dotnet_style_predefined_type_for_member_access = true:error -csharp_style_var_for_built_in_types = false:error -csharp_style_var_when_type_is_apparent = true:error -csharp_style_var_elsewhere = true:error + diff --git a/.gitignore b/.gitignore index 8fcee28..bd23669 100644 --- a/.gitignore +++ b/.gitignore @@ -190,3 +190,5 @@ FakesAssemblies/ # Others /NetTopologySuite.IO/NetTopologySuite.IO.GeoTools/NetTopologySuite.IO.GeoTools.csproj.DotSettings +/src/NetTopologySuite.IO.Esri.Core/dBASE.xlsx +/src/ShpDmsPrec diff --git a/NetTopologySuite.IO.Esri.sln b/NetTopologySuite.IO.Esri.sln new file mode 100644 index 0000000..67b5805 --- /dev/null +++ b/NetTopologySuite.IO.Esri.sln @@ -0,0 +1,171 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 16 +VisualStudioVersion = 16.0.30717.126 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{B0923F22-AF00-46DA-B5CE-73F4D398DE37}" + ProjectSection(SolutionItems) = preProject + .editorconfig = .editorconfig + .gitignore = .gitignore + dBASE.md = dBASE.md + README.md = README.md + EndProjectSection +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NetTopologySuite.IO.ShapeFile", "src\NetTopologySuite.IO.ShapeFile\NetTopologySuite.IO.ShapeFile.csproj", "{04CAE5A6-0B7B-47F8-9899-46EE805478E7}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NetTopologySuite.IO.ShapeFile.Test", "test\NetTopologySuite.IO.ShapeFile.Test\NetTopologySuite.IO.ShapeFile.Test.csproj", "{89254B6E-F130-41F3-8956-2F790E99C6F0}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NetTopologySuite.IO.ShapefileNG", "src\NetTopologySuite.IO.ShapefileNG\NetTopologySuite.IO.ShapefileNG.csproj", "{54244379-0702-40E8-8773-0973B3485293}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NetTopologySuite.IO.ShapefileNG.Test", "test\NetTopologySuite.IO.ShapefileNG.Test\NetTopologySuite.IO.ShapefileNG.Test.csproj", "{7B2FB807-A99D-40E9-AE5B-4931BE2086FA}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NetTopologySuite.IO.Esri", "src\NetTopologySuite.IO.Esri\NetTopologySuite.IO.Esri.csproj", "{960D89DB-F534-4207-931E-4BB8B4DCA483}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NetTopologySuite.IO.Esri.Test", "test\NetTopologySuite.IO.Esri.Test\NetTopologySuite.IO.Esri.Test.csproj", "{46F2F8D9-855C-40E6-A93F-DB96A3343C8E}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NetTopologySuite.IO.Esri.Core", "src\NetTopologySuite.IO.Esri.Core\NetTopologySuite.IO.Esri.Core.csproj", "{2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NetTopologySuite.IO.GDB", "src\NetTopologySuite.IO.GDB\NetTopologySuite.IO.GDB.csproj", "{09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|ARM = Debug|ARM + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|ARM = Release|ARM + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Debug|ARM.ActiveCfg = Debug|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Debug|ARM.Build.0 = Debug|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Debug|x64.ActiveCfg = Debug|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Debug|x64.Build.0 = Debug|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Debug|x86.ActiveCfg = Debug|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Debug|x86.Build.0 = Debug|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Release|Any CPU.Build.0 = Release|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Release|ARM.ActiveCfg = Release|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Release|ARM.Build.0 = Release|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Release|x64.ActiveCfg = Release|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Release|x64.Build.0 = Release|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Release|x86.ActiveCfg = Release|Any CPU + {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Release|x86.Build.0 = Release|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Debug|ARM.ActiveCfg = Debug|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Debug|ARM.Build.0 = Debug|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Debug|x64.ActiveCfg = Debug|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Debug|x64.Build.0 = Debug|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Debug|x86.ActiveCfg = Debug|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Debug|x86.Build.0 = Debug|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Release|Any CPU.Build.0 = Release|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Release|ARM.ActiveCfg = Release|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Release|ARM.Build.0 = Release|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Release|x64.ActiveCfg = Release|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Release|x64.Build.0 = Release|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Release|x86.ActiveCfg = Release|Any CPU + {89254B6E-F130-41F3-8956-2F790E99C6F0}.Release|x86.Build.0 = Release|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Debug|Any CPU.Build.0 = Debug|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Debug|ARM.ActiveCfg = Debug|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Debug|ARM.Build.0 = Debug|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Debug|x64.ActiveCfg = Debug|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Debug|x64.Build.0 = Debug|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Debug|x86.ActiveCfg = Debug|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Debug|x86.Build.0 = Debug|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Release|Any CPU.ActiveCfg = Release|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Release|Any CPU.Build.0 = Release|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Release|ARM.ActiveCfg = Release|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Release|ARM.Build.0 = Release|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Release|x64.ActiveCfg = Release|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Release|x64.Build.0 = Release|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Release|x86.ActiveCfg = Release|Any CPU + {54244379-0702-40E8-8773-0973B3485293}.Release|x86.Build.0 = Release|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Debug|ARM.ActiveCfg = Debug|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Debug|ARM.Build.0 = Debug|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Debug|x64.ActiveCfg = Debug|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Debug|x64.Build.0 = Debug|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Debug|x86.ActiveCfg = Debug|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Debug|x86.Build.0 = Debug|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Release|Any CPU.Build.0 = Release|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Release|ARM.ActiveCfg = Release|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Release|ARM.Build.0 = Release|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Release|x64.ActiveCfg = Release|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Release|x64.Build.0 = Release|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Release|x86.ActiveCfg = Release|Any CPU + {7B2FB807-A99D-40E9-AE5B-4931BE2086FA}.Release|x86.Build.0 = Release|Any CPU + {960D89DB-F534-4207-931E-4BB8B4DCA483}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {960D89DB-F534-4207-931E-4BB8B4DCA483}.Debug|Any CPU.Build.0 = Debug|Any CPU + {960D89DB-F534-4207-931E-4BB8B4DCA483}.Debug|ARM.ActiveCfg = Debug|Any CPU + {960D89DB-F534-4207-931E-4BB8B4DCA483}.Debug|x64.ActiveCfg = Debug|Any CPU + {960D89DB-F534-4207-931E-4BB8B4DCA483}.Debug|x86.ActiveCfg = Debug|Any CPU + {960D89DB-F534-4207-931E-4BB8B4DCA483}.Release|Any CPU.ActiveCfg = Release|Any CPU + {960D89DB-F534-4207-931E-4BB8B4DCA483}.Release|Any CPU.Build.0 = Release|Any CPU + {960D89DB-F534-4207-931E-4BB8B4DCA483}.Release|ARM.ActiveCfg = Release|Any CPU + {960D89DB-F534-4207-931E-4BB8B4DCA483}.Release|x64.ActiveCfg = Release|Any CPU + {960D89DB-F534-4207-931E-4BB8B4DCA483}.Release|x86.ActiveCfg = Release|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Debug|ARM.ActiveCfg = Debug|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Debug|ARM.Build.0 = Debug|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Debug|x64.ActiveCfg = Debug|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Debug|x64.Build.0 = Debug|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Debug|x86.ActiveCfg = Debug|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Debug|x86.Build.0 = Debug|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Release|Any CPU.Build.0 = Release|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Release|ARM.ActiveCfg = Release|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Release|ARM.Build.0 = Release|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Release|x64.ActiveCfg = Release|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Release|x64.Build.0 = Release|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Release|x86.ActiveCfg = Release|Any CPU + {46F2F8D9-855C-40E6-A93F-DB96A3343C8E}.Release|x86.Build.0 = Release|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Debug|ARM.ActiveCfg = Debug|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Debug|ARM.Build.0 = Debug|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Debug|x64.ActiveCfg = Debug|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Debug|x64.Build.0 = Debug|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Debug|x86.ActiveCfg = Debug|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Debug|x86.Build.0 = Debug|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Release|Any CPU.Build.0 = Release|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Release|ARM.ActiveCfg = Release|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Release|ARM.Build.0 = Release|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Release|x64.ActiveCfg = Release|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Release|x64.Build.0 = Release|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Release|x86.ActiveCfg = Release|Any CPU + {2BE3CBAE-CBF5-4B67-B1CB-CA64365A6157}.Release|x86.Build.0 = Release|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Debug|ARM.ActiveCfg = Debug|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Debug|ARM.Build.0 = Debug|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Debug|x64.ActiveCfg = Debug|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Debug|x64.Build.0 = Debug|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Debug|x86.ActiveCfg = Debug|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Debug|x86.Build.0 = Debug|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Release|Any CPU.Build.0 = Release|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Release|ARM.ActiveCfg = Release|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Release|ARM.Build.0 = Release|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Release|x64.ActiveCfg = Release|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Release|x64.Build.0 = Release|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Release|x86.ActiveCfg = Release|Any CPU + {09E5D241-0DBA-42FF-9B7C-A4E4B43DB2DA}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {06DEE0C5-480C-4A70-BC80-3791D98E6A89} + EndGlobalSection +EndGlobal diff --git a/NetTopologySuite.IO.ShapeFile.sln b/NetTopologySuite.IO.ShapeFile.sln deleted file mode 100644 index 3b72122..0000000 --- a/NetTopologySuite.IO.ShapeFile.sln +++ /dev/null @@ -1,44 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 15 -VisualStudioVersion = 15.0.27004.2009 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{B0923F22-AF00-46DA-B5CE-73F4D398DE37}" - ProjectSection(SolutionItems) = preProject - .editorconfig = .editorconfig - .gitignore = .gitignore - README.md = README.md - EndProjectSection -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NetTopologySuite.IO.ShapeFile", "src\NetTopologySuite.IO.ShapeFile\NetTopologySuite.IO.ShapeFile.csproj", "{04CAE5A6-0B7B-47F8-9899-46EE805478E7}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NetTopologySuite.IO.GDB", "src\NetTopologySuite.IO.GDB\NetTopologySuite.IO.GDB.csproj", "{6A1DF663-838E-42C0-811A-8114C3547B01}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NetTopologySuite.IO.ShapeFile.Test", "test\NetTopologySuite.IO.ShapeFile.Test\NetTopologySuite.IO.ShapeFile.Test.csproj", "{89254B6E-F130-41F3-8956-2F790E99C6F0}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Release|Any CPU = Release|Any CPU - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Debug|Any CPU.Build.0 = Debug|Any CPU - {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Release|Any CPU.ActiveCfg = Release|Any CPU - {04CAE5A6-0B7B-47F8-9899-46EE805478E7}.Release|Any CPU.Build.0 = Release|Any CPU - {6A1DF663-838E-42C0-811A-8114C3547B01}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6A1DF663-838E-42C0-811A-8114C3547B01}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6A1DF663-838E-42C0-811A-8114C3547B01}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6A1DF663-838E-42C0-811A-8114C3547B01}.Release|Any CPU.Build.0 = Release|Any CPU - {89254B6E-F130-41F3-8956-2F790E99C6F0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {89254B6E-F130-41F3-8956-2F790E99C6F0}.Debug|Any CPU.Build.0 = Debug|Any CPU - {89254B6E-F130-41F3-8956-2F790E99C6F0}.Release|Any CPU.ActiveCfg = Release|Any CPU - {89254B6E-F130-41F3-8956-2F790E99C6F0}.Release|Any CPU.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - SolutionGuid = {06DEE0C5-480C-4A70-BC80-3791D98E6A89} - EndGlobalSection -EndGlobal diff --git a/src/NetTopologySuite.IO.Esri.Core/Buffers/ArrayBuffer.cs b/src/NetTopologySuite.IO.Esri.Core/Buffers/ArrayBuffer.cs new file mode 100644 index 0000000..6fde4ad --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Buffers/ArrayBuffer.cs @@ -0,0 +1,36 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO +{ + internal class ArrayBuffer + { + private static readonly int ExpandLimit = 4 * 1024 * 1024; // 4MB * sizeof(T) + + private static int GetExpandedLength(T[] array) + { + if (array.Length < 4) + return array.Length + 4; + + if (array.Length > ExpandLimit) + return array.Length + ExpandLimit; + + return array.Length * 2; + } + + public static void Expand(ref T[] array, int minLength) + { + if (array.Length < minLength) + { + var expandedLength = Math.Max(minLength, GetExpandedLength(array)); + Array.Resize(ref array, expandedLength); + } + } + + public static void Expand(ref T[] array) + { + Array.Resize(ref array, GetExpandedLength(array)); + } + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Buffers/BinaryBuffer.cs b/src/NetTopologySuite.IO.Esri.Core/Buffers/BinaryBuffer.cs new file mode 100644 index 0000000..9db3eef --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Buffers/BinaryBuffer.cs @@ -0,0 +1,132 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; + +namespace NetTopologySuite.IO +{ + + /// + /// Represents a buffer of binary data. + /// + internal abstract class BinaryBuffer + { + protected BinaryBuffer(int initialCapacity = 256) + { + if (initialCapacity < 0) + throw new ArgumentException(nameof(initialCapacity)); + + Buffer = new byte[initialCapacity]; + UsedBufferSize = 0; + } + + + /// + /// Underlying buffer data. + /// + protected byte[] Buffer; + + protected int UsedBufferSize { get; private set; } + + /// + /// Used for debugging purposes. + /// + internal byte[] UsedBuffer + { + get + { + var bytes = new byte[UsedBufferSize]; + Array.Copy(Buffer, bytes, bytes.Length); + return bytes; + } + } + + + protected void SetUsedBufferSize(int newSize) + { + if (newSize < 0) + throw new ArgumentException(nameof(newSize)); + + if (newSize == UsedBufferSize) + return; + + if (newSize < UsedBufferSize) + { + Array.Clear(Buffer, newSize, UsedBufferSize - newSize); + } + else if (newSize > Buffer.Length) + { + ArrayBuffer.Expand(ref Buffer, newSize); + } + + UsedBufferSize = newSize; + } + + internal virtual void Reset() + { + Buffer = new byte[0]; + UsedBufferSize = 0; + } + + public override string ToString() + { + var count = Math.Min(64, UsedBufferSize); + var sb = new StringBuilder(); + for (int i = 0; i < count; i++) + { + sb.Append(i + ":" + Buffer[i]); + sb.Append(" | "); + } + if (count < UsedBufferSize) + { + sb.Append("... "); + sb.Append(UsedBufferSize - 1); + sb.Append(":"); + sb.Append(Buffer[UsedBufferSize - 1]); + } + return sb.ToString(); + } + + + [Conditional("DEBUG_BINARY")] + internal void GetBinaryDiff(string name, BinaryBufferReader other, List differences) + { + if (UsedBufferSize != other.UsedBufferSize) + { + differences.Add(name + "." + nameof(UsedBufferSize) + ": " + UsedBufferSize + " | " + other.UsedBufferSize); + } + + var end = Math.Min(UsedBufferSize, other.UsedBufferSize); + + for (int i = 0; i < end; i++) + { + if (this.Buffer[i] != other.Buffer[i]) + differences.Add(name + "[" + i + "]: '" + Buffer[i].ToChar() + "' | '" + other.Buffer[i].ToChar() + "' " + Buffer[i].ToString().PadLeft(3) + " | " + other.Buffer[i].ToString().PadLeft(3)); + } + } + + [Conditional("DEBUG_BINARY")] + internal void TraceToConsole(string header, int startIndex, int count) + { + if (!string.IsNullOrEmpty(header)) + Console.WriteLine(header); + + var end = startIndex + count; + for (int i = startIndex; i < end; i++) + { + Console.WriteLine(Buffer[i].ToText()); + } + Console.WriteLine(); + } + + [Conditional("DEBUG_BINARY")] + internal static void TraceToConsole(string msg) + { + Console.WriteLine(msg.Replace(char.MinValue, '▬')); // ⌂ ↔ ¤ + } + + + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Buffers/BinaryBufferReader.cs b/src/NetTopologySuite.IO.Esri.Core/Buffers/BinaryBufferReader.cs new file mode 100644 index 0000000..24c6986 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Buffers/BinaryBufferReader.cs @@ -0,0 +1,323 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO +{ + internal class BinaryBufferReader : BinaryBuffer + { + + public BinaryBufferReader(int initialCapacity = 256) : base(initialCapacity) + { + } + + /// + /// Gets the value at the specified index. + /// + /// The zero-based index of the element to get or set. + /// The value at the specified index. + public byte this[int index] + { + get + { + if (index < 0 || index >= UsedBufferSize) + throw new ArgumentOutOfRangeException(nameof(index)); + return Buffer [index]; + } + } + + /// + /// Gets the current position of processed data. + /// + public int Position { get; private set; } + + + /// + /// Gets the count of bytes reserved by the reader. + /// + public int Size => UsedBufferSize; + + + /// + /// Gets the current position in the buffer. + /// + public bool End => Position >= UsedBufferSize; + + internal override void Reset() + { + base.Reset(); + SetPosition(0, 0); + } + + + /// + /// Loads binary data from the stream and resets reader position. + /// + /// Binary data source stream. + /// The maximum number of bytes to be read from the stream. + public void LoadFrom(Stream source, int bytesCount) + { + if (source == null) + throw new ArgumentNullException(this.GetType().Name + " cannot read from an uninitialized source stream.", nameof(source)); + + SetUsedBufferSize(bytesCount); + source.Read(Buffer, 0, bytesCount); + SetPosition(0, 0); + } + + /// + /// Sets current position of processed data. + /// + /// Index of the first element in BinarySegment which was processed. + /// The number of processed elements. + protected void SetPosition(int startIndex, int bytesCount) + { + if (startIndex + bytesCount > UsedBufferSize) + { + throw new ArgumentException("Current " + GetType().Name + ".Position could not be greater than reserved buffer size.", nameof(Position)); + } + Position = startIndex + bytesCount; + } + + public void MoveTo(int position) + { + SetPosition(position, 0); + } + + /// + /// Moves data processing ahead a specified number of items. + /// + /// + public void Advance(int count) + { + SetPosition(Position, count); + } + + /// + /// Advances past consecutive instances of the given value. + /// + /// The value past which the reader is to advance. + /// The number of positions the reader has advanced or -1 if value was not found. + public bool TryAdvanceTo(byte value) + { + var newPosition = Array.IndexOf(Buffer, value, Position, UsedBufferSize); + if (newPosition < 1) + { + Position = UsedBufferSize; + return false; + } + else + { + Position = newPosition; + return true; + } + } + + /// + /// Advances past consecutive instances of the given value. + /// + /// The value past which the reader is to advance. + /// The number of positions the reader has advanced. + public int AdvancePast(byte value) + { + int startPosition = Position; + while (!End && Buffer[Position] == value) + { + Position++; + } + return Position - startPosition; + } + + + + /// + /// Reads a 8-bit unsigned integer and advances the current Position by one. + /// + /// The position within underlying binary data. + /// Value read from underlying data. + public byte ReadByte(int index) + { + SetPosition(index, 1); + return this[index]; + } + public byte ReadByte() + { + return ReadByte(Position); + } + + + public char ReadByteChar(int index) + { + return (char)ReadByte(index); + } + public char ReadByteChar() + { + return ReadByteChar(Position); + } + + protected byte[] ReadBytes(int startIndex, int count) + { + SetPosition(startIndex, count); + var res = new byte[count]; + for (int i = 0; i < count; i++) + { + res[i] = this[startIndex + i]; + } + return res; + } + + protected byte[] ReadReversedBytes(int startIndex, int count) + { + SetPosition(startIndex, count); + var res = new byte[count]; + for (int i = 0; i < count; i++) + { + res[count - i - 1] = this[startIndex + i]; + } + return res; + } + + + /// + /// Reads a 16-bit unsigned integer converted from two bytes at a specified position in the underlying data + /// and advances the current Position by the number of bytes read. + /// + /// The starting position within the underlying data. + /// Value read from underlying data. + public ushort ReadUInt16LittleEndian(int startIndex) + { + SetPosition(startIndex, sizeof(ushort)); + return (ushort)( + this[startIndex + 0] + | this[startIndex + 1] << 8 + ); + } + public ushort ReadUInt16LittleEndian() + { + return ReadUInt16LittleEndian(Position); + } + + + /// + /// Returns a 32-bit unsigned integer converted from two bytes at a specified position in the underlying data + /// and advances the current Position by the number of bytes read. + /// + /// The starting position within the underlying data. + /// Value read from underlying data. + public uint ReadUInt32LittleEndian(int startIndex) + { + SetPosition(startIndex, sizeof(uint)); + return (uint)( + this[startIndex + 0] + | this[startIndex + 1] << 8 + | this[startIndex + 2] << 16 + | this[startIndex + 3] << 24 + ); + } + public uint ReadUInt32LittleEndian() + { + return ReadUInt32LittleEndian(Position); + } + + + public int ReadInt32LittleEndian(int startIndex) + { + SetPosition(startIndex, sizeof(int)); + return (int)( + this[startIndex + 0] + | this[startIndex + 1] << 8 + | this[startIndex + 2] << 16 + | this[startIndex + 3] << 24 + ); + } + public int ReadInt32LittleEndian() + { + return ReadInt32LittleEndian(Position); + } + + + public int ReadInt32BigEndian(int startIndex) + { + SetPosition(startIndex, sizeof(int)); + return (int)( + this[startIndex + 3] + | this[startIndex + 2] << 8 + | this[startIndex + 1] << 16 + | this[startIndex + 0] << 24 + ); + } + public int ReadInt32BigEndian() + { + return ReadInt32BigEndian(Position); + } + + public unsafe double ReadDoubleLittleEndian(int startIndex) + { + /* + var loVal = (ulong)( + this[startIndex + 0] + | this[startIndex + 1] << 8 + | this[startIndex + 2] << 16 + | this[startIndex + 3] << 24 + ); + + var hiVal = (ulong)( + this[startIndex + 4] + | this[startIndex + 5] << 8 + | this[startIndex + 6] << 16 + | this[startIndex + 7] << 24 + ); + */ + + var loVal = ReadUInt32LittleEndian(startIndex); + var hiVal = ReadUInt32LittleEndian(); + + ulong resRef = ((ulong)hiVal) << 32 | loVal; + return *((double*)&resRef); + } + + public double ReadDoubleLittleEndian() + { + return ReadDoubleLittleEndian(Position); + } + + + public string ReadString(int startIndex, int bytesCount, Encoding encoding) + { + // var bytes = ReadBytes(length); //Do not allocate new array if it is not needed. + SetPosition(startIndex, bytesCount); + var s = encoding.GetString(this.Buffer, startIndex, bytesCount); + + TraceToConsole("ReadString(" + Position.ToString() + "): '" + s + "'"); + + if (IsNullString(s)) + { + return null; + } + return s.Trim(char.MinValue); + } + + private bool IsNullString(string s) + { + for (int i = 0; i < s.Length; i++) + { + if (s[i] != char.MinValue) + return false; + } + return true; + } + + /// + /// Rreds string from the underlying data and advances the current Position by the number of bytes read. + /// + /// + /// + /// + public string ReadString(int bytesCount, Encoding encoding) + { + return ReadString(Position, bytesCount, encoding); + } + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Buffers/BinaryBufferWriter.cs b/src/NetTopologySuite.IO.Esri.Core/Buffers/BinaryBufferWriter.cs new file mode 100644 index 0000000..6e8564c --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Buffers/BinaryBufferWriter.cs @@ -0,0 +1,214 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO +{ + internal class BinaryBufferWriter : BinaryBuffer + { + + public BinaryBufferWriter(int initialCapacity = 256) : base(initialCapacity) + { + } + + + /// + /// Gets or sets the value at the specified index. + /// + /// The zero-based index of the element to get or set. + /// The value at the specified index. + public byte this[int index] + { + get + { + if (index < 0 || index >= UsedBufferSize) + throw new ArgumentOutOfRangeException(nameof(index)); + return Buffer[index]; + } + private set + { + if (index < 0 || index >= UsedBufferSize) + throw new ArgumentOutOfRangeException(nameof(index)); + Buffer[index] = value; + } + } + + + /// + /// Gets the count of bytes written to the underlying buffer. + /// + public int Size => UsedBufferSize; + + + /// + /// Copies written bytes to the stream. + /// + /// + public void CopyTo(Stream dest) + { + if (dest == null) + throw new ArgumentNullException(this.GetType().Name + " cannot write to an uninitialized stream.", nameof(dest)); + + dest.Write(Buffer, 0, UsedBufferSize); + } + + /// + /// Sets the reserved bytes count to zero and clears the unused bytes in the underlying buffer (sets thier values to zeros). It does not resize the underlying buffer. + /// + public void Clear() + { + SetUsedBufferSize(0); + } + + /// + /// Writes a 8-bit unsigned integer. + /// + /// A 8-bit unsigned integer value. + public void WriteByte(byte value) + { + var index = UsedBufferSize; + SetUsedBufferSize(UsedBufferSize + 1); + this[index] = value; + } + + public void WriteByteChar(char c) + { + if (c > byte.MaxValue) + throw new ArgumentException("Character exceeds ASCII code table: " + c, nameof(c)); + + WriteByte((byte)c); + } + + + public void WriteBytes(byte[] source) + { + if (source == null || source.Length < 1) + throw new ArgumentNullException(nameof(source)); + + var startIndex = UsedBufferSize; + SetUsedBufferSize(UsedBufferSize + source.Length); + + for (int i = 0; i < source.Length; i++) + { + this[startIndex + i] = source[i]; + } + } + public void WriteBytes(byte value, int count) + { + var startIndex = UsedBufferSize; + SetUsedBufferSize(UsedBufferSize + count); + + for (int i = 0; i < count; i++) + { + this[startIndex + i] = value; + } + } + public void WriteNullBytes(int count) + { + WriteBytes(byte.MinValue, count); + } + + + public void WriteUInt16LittleEndian(UInt16 value) + { + var startIndex = UsedBufferSize; + SetUsedBufferSize(UsedBufferSize + sizeof(UInt16)); + + this[startIndex + 0] = (byte)value; + this[startIndex + 1] = (byte)(value >> 8); + } + + + public void WriteUInt32LittleEndian(UInt32 value) + { + var startIndex = UsedBufferSize; + SetUsedBufferSize(UsedBufferSize + sizeof(UInt32)); + + this[startIndex + 0] = (byte)value; + this[startIndex + 1] = (byte)(value >> 8); + this[startIndex + 2] = (byte)(value >> 16); + this[startIndex + 3] = (byte)(value >> 24); + + } + + + public void WriteInt32LittleEndian(Int32 value) + { + var startIndex = UsedBufferSize; + SetUsedBufferSize(UsedBufferSize + sizeof(Int32)); + + this[startIndex + 0] = (byte)value; + this[startIndex + 1] = (byte)(value >> 8); + this[startIndex + 2] = (byte)(value >> 16); + this[startIndex + 3] = (byte)(value >> 24); + + } + + + public void WriteInt32BigEndian(Int32 value) + { + var startIndex = UsedBufferSize; + SetUsedBufferSize(UsedBufferSize + sizeof(Int32)); + + this[startIndex + 3] = (byte)value; + this[startIndex + 2] = (byte)(value >> 8); + this[startIndex + 1] = (byte)(value >> 16); + this[startIndex + 0] = (byte)(value >> 24); + + } + + + + public unsafe void WriteDoubleLittleEndian(double value) + { + var startIndex = UsedBufferSize; + SetUsedBufferSize(UsedBufferSize + sizeof(double)); + + ulong valueRef = *(ulong*)&value; + + this[startIndex + 0] = (byte)valueRef; + this[startIndex + 1] = (byte)(valueRef >> 8); + this[startIndex + 2] = (byte)(valueRef >> 16); + this[startIndex + 3] = (byte)(valueRef >> 24); + this[startIndex + 4] = (byte)(valueRef >> 32); + this[startIndex + 5] = (byte)(valueRef >> 40); + this[startIndex + 6] = (byte)(valueRef >> 48); + this[startIndex + 7] = (byte)(valueRef >> 56); + } + + + /// + /// Writes string to BinaryDataWriter. + /// + /// String value to write. + /// Bytes count to be written in BinaryDataWriter. + /// Encoding used to translate string to bytes. + public void WriteString(string s, int bytesCount, Encoding encoding) + { + s = s ?? string.Empty; + + if (s.Length > bytesCount) + { + s = s.Substring(0, bytesCount); + } + + // Specific encoding can add some extra bytes for national characters. Check it. + var bytes = encoding.GetBytes(s); + while (bytes.Length > bytesCount) + { + s = s.Substring(0, s.Length - 1); + bytes = encoding.GetBytes(s); + } + + if (bytes.Length < bytesCount) + { + var fixedBytes = new byte[bytesCount]; // Filled with '\0' by default + Array.Copy(bytes, fixedBytes, bytes.Length); + bytes = fixedBytes; // NULL terminated fixed length string + } + + WriteBytes(bytes); // This will SetUsedBufferSize() + } + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/Dbf.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/Dbf.cs new file mode 100644 index 0000000..0033723 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/Dbf.cs @@ -0,0 +1,22 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Dbf +{ + internal static class Dbf + { + public readonly static int TableDescriptorSize = 32; // Number of bytes in the table header + + internal readonly static int FieldDescriptorSize = 32; // Number of bytes in the field descriptor + internal readonly static int MaxFieldCount = 255; + public readonly static byte Dbase3Version = 0x03; // dBASE III + public readonly static byte HeaderTerminatorMark = 0x0D; + + public readonly static byte DeletedRecordMark = 0x2A; // '*' + public readonly static byte ValidRecordMark = 0x20; // ' ' + public readonly static byte EndOfFileMark = 0x1A; + + public static readonly int MaxFieldNameLength = 10; + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfBinaryExtensions.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfBinaryExtensions.cs new file mode 100644 index 0000000..cf43b22 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfBinaryExtensions.cs @@ -0,0 +1,165 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; + +namespace NetTopologySuite.IO.Dbf +{ + + + internal static class DbfBinaryExtensions + { + public static void WriteDbfVersion(this BinaryBufferWriter tableDescriptor, byte version) + { + tableDescriptor.WriteByte(version); + } + public static byte ReadDbfVersion(this BinaryBufferReader tableDescriptor) + { + return tableDescriptor.ReadByte(); + } + + + public static void WriteDbfLastUpdateDate(this BinaryBufferWriter tableDescriptor, DateTime date) + { + tableDescriptor.WriteByte((byte)(date.Year - 1900)); + tableDescriptor.WriteByte((byte)date.Month); + tableDescriptor.WriteByte((byte)date.Day); + } + public static DateTime ReadDbfLastUpdateDate(this BinaryBufferReader tableDescriptor) + { + var y = tableDescriptor.ReadByte(); + var m = tableDescriptor.ReadByte(); + var d = tableDescriptor.ReadByte(); + + if (m < 1) + m = 1; + + if (m > 12) + m = 12; + + if (d < 1) + d = 1; + + if (d > 31) + d = 31; + + return new DateTime(1900 + y, m, d); + } + + + public static void WriteDbfRecordCount(this BinaryBufferWriter tableDescriptor, int recordCount) + { + tableDescriptor.WriteUInt32LittleEndian((uint)recordCount); + } + public static int ReadDbfRecordCount(this BinaryBufferReader tableDescriptor) + { + return (int)tableDescriptor.ReadUInt32LittleEndian(); + } + + + public static void WriteDbfHeaderSize(this BinaryBufferWriter tableDescriptor, int headerSize) + { + tableDescriptor.WriteUInt16LittleEndian((ushort)headerSize); + } + public static ushort ReadDbfHeaderSize(this BinaryBufferReader tableDescriptor) + { + return tableDescriptor.ReadUInt16LittleEndian(); + } + + + public static void WriteDbfRecordSize(this BinaryBufferWriter tableDescriptor, int headerSize) + { + tableDescriptor.WriteUInt16LittleEndian((ushort)headerSize); + } + public static ushort ReadDbfRecordSize(this BinaryBufferReader tableDescriptor) + { + return tableDescriptor.ReadUInt16LittleEndian(); + } + + + public static void WriteDbfEncoding(this BinaryBufferWriter tableDescriptor, Encoding encoding) + { + var ldid = DbfEncoding.GetLanguageDriverId(encoding); + tableDescriptor.WriteByte(ldid); + } + public static Encoding ReadDbfEncoding(this BinaryBufferReader tableDescriptor) + { + var ldid = tableDescriptor.ReadByte(); + return DbfEncoding.GetEncodingForLanguageDriverId(ldid); + } + + + public static void WriteDbaseFieldDescriptor(this BinaryBufferWriter fieldDescriptor, DbfField field, Encoding encoding) + { + encoding = encoding ?? Encoding.UTF8; + var name = field.Name.PadRight(Dbf.MaxFieldNameLength, char.MinValue); // Field name must have empty space zero-filled + + + fieldDescriptor.WriteString(name, Dbf.MaxFieldNameLength, encoding); + fieldDescriptor.WriteNullBytes(1); + fieldDescriptor.WriteDbaseType(field.FieldType); + fieldDescriptor.WriteNullBytes(4); + fieldDescriptor.WriteByte((byte)field.Length); + fieldDescriptor.WriteByte((byte)field.Precision); + fieldDescriptor.WriteNullBytes(14); + } + public static DbfField ReadDbaseFieldDescriptor(this BinaryBufferReader fieldDescriptor, Encoding encoding) + { + encoding = encoding ?? Encoding.UTF8; + + var name = fieldDescriptor.ReadString(Dbf.MaxFieldNameLength, encoding)?.Trim(); + fieldDescriptor.Advance(1); // Reserved (field name terminator) + var type = fieldDescriptor.ReadDbaseType(); + fieldDescriptor.Advance(4); // Reserved + var length = fieldDescriptor.ReadByte(); + var precision = fieldDescriptor.ReadByte(); + fieldDescriptor.Advance(14); // Reserved + + if (type == DbfType.Character) + { + var textField = new DbfCharacterField(name, length, encoding); + textField.Encoding = encoding; + return textField; + } + else if (type == DbfType.Date) + { + return new DbfDateField(name); + } + else if (type == DbfType.Numeric) + { + return new DbfNumericField(name, length, precision); + } + else if (type == DbfType.Float) + { + return new DbfFloatField(name, length, precision); + } + else if (type == DbfType.Logical) + { + return new DbfLogicalField(name); + } + else + { + throw new InvalidDataException("Invalid dBASE III field type: " + type); + } + } + + + private static DbfType ReadDbaseType(this BinaryBufferReader fieldDescriptor) + { + var type = fieldDescriptor.ReadByteChar(); + type = char.ToUpper(type); + + if (type == 'S') + type = 'C'; + + return (DbfType)type; + } + private static void WriteDbaseType(this BinaryBufferWriter fieldDescriptor, DbfType type) + { + fieldDescriptor.WriteByte((byte)type); + } + } + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfEncoding.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfEncoding.cs new file mode 100644 index 0000000..e49161a --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfEncoding.cs @@ -0,0 +1,149 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Text; + +namespace NetTopologySuite.IO.Dbf +{ + internal static class DbfEncoding + { + /// + /// The Latin1 Encoding + /// + internal static readonly Encoding Latin1 = Encoding.GetEncoding(28591); // ISO-8859-1 + + private static readonly IDictionary LanguageDriverIdToEncoding = new Dictionary(); + private static readonly IDictionary EncodingToLanguageDriverId = new Dictionary(); + + internal static void WriteMissingEncodingMessage(string encodingName) + { + Debug.WriteLine("DBF encoding not found: " + encodingName + ". To fix it do the following:"); + Debug.WriteLine("- Add reference to to the System.Text.Encoding.CodePages.dll to your project."); + Debug.WriteLine("- Put that line somewhere in your code:"); + Debug.WriteLine(" Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); "); + } + + static DbfEncoding() + { + // Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); // Kepp this library clear. Do not include unnecessary dependices. + + // https://support.esri.com/en/technical-article/000013192 + + AddLanguageDriverId(0, Encoding.UTF8); // For unknown LDID + AddLanguageDriverId(0x03, Encoding.Default); // OS Default + AddLanguageDriverId(0x57, Encoding.Default); // OS Default + + AddLanguageDriverId(0x01 , 437); // United States MS–DOS + AddLanguageDriverId(0x02 , 850); // International MS–DOS + AddLanguageDriverId(0x08 , 865); // Danish OEM + AddLanguageDriverId(0x09 , 437); // Dutch OEM + AddLanguageDriverId(0x0A , 850); // Dutch OEM* + AddLanguageDriverId(0x0B , 437); // Finnish OEM + AddLanguageDriverId(0x0D , 437); // French OEM + AddLanguageDriverId(0x0E , 850); // French OEM* + AddLanguageDriverId(0x0F , 437); // German OEM + AddLanguageDriverId(0x10 , 850); // German OEM* + AddLanguageDriverId(0x11 , 437); // Italian OEM + AddLanguageDriverId(0x12 , 850); // Italian OEM* + AddLanguageDriverId(0x13 , 932); // Japanese Shift-JIS + AddLanguageDriverId(0x14 , 850); // Spanish OEM* + AddLanguageDriverId(0x15 , 437); // Swedish OEM + AddLanguageDriverId(0x16 , 850); // Swedish OEM* + AddLanguageDriverId(0x17 , 865); // Norwegian OEM + AddLanguageDriverId(0x18 , 437); // Spanish OEM + AddLanguageDriverId(0x19 , 437); // English OEM (Britain) + AddLanguageDriverId(0x1A , 850); // English OEM (Britain)* + AddLanguageDriverId(0x1B , 437); // English OEM (U.S.) + AddLanguageDriverId(0x1C , 863); // French OEM (Canada) + AddLanguageDriverId(0x1D , 850); // French OEM* + AddLanguageDriverId(0x1F , 852); // Czech OEM + AddLanguageDriverId(0x22 , 852); // Hungarian OEM + AddLanguageDriverId(0x23 , 852); // Polish OEM + AddLanguageDriverId(0x24 , 860); // Portuguese OEM + AddLanguageDriverId(0x25 , 850); // Portuguese OEM* + AddLanguageDriverId(0x26 , 866); // Russian OEM + AddLanguageDriverId(0x37 , 850); // English OEM (U.S.)* + AddLanguageDriverId(0x40 , 852); // Romanian OEM + AddLanguageDriverId(0x4D , 936); // Chinese GBK (PRC) + AddLanguageDriverId(0x4E , 949); // Korean (ANSI/OEM) + AddLanguageDriverId(0x4F , 950); // Chinese Big5 (Taiwan) + AddLanguageDriverId(0x50 , 874); // Thai (ANSI/OEM) + AddLanguageDriverId(0x58 , 1252); // Western European ANSI + AddLanguageDriverId(0x59 , 1252); // Spanish ANSI + AddLanguageDriverId(0x64 , 852); // Eastern European MS–DOS + AddLanguageDriverId(0x65 , 866); // Russian MS–DOS + AddLanguageDriverId(0x66 , 865); // Nordic MS–DOS + AddLanguageDriverId(0x67 , 861); // Icelandic MS–DOS + AddLanguageDriverId(0x6A , 737); // Greek MS–DOS (437G) + AddLanguageDriverId(0x6B , 857); // Turkish MS–DOS + AddLanguageDriverId(0x6C , 863); // French–Canadian MS–DOS + AddLanguageDriverId(0x78 , 950); // Taiwan Big 5 + AddLanguageDriverId(0x79 , 949); // Hangul (Wansung) + AddLanguageDriverId(0x7A , 936); // PRC GBK + AddLanguageDriverId(0x7B , 932); // Japanese Shift-JIS + AddLanguageDriverId(0x7C , 874); // Thai Windows/MS–DOS + AddLanguageDriverId(0x86 , 737); // Greek OEM + AddLanguageDriverId(0x87 , 852); // Slovenian OEM + AddLanguageDriverId(0x88 , 857); // Turkish OEM + AddLanguageDriverId(0xC8 , 1250); // Eastern European Windows + AddLanguageDriverId(0xC9 , 1251); // Russian Windows + AddLanguageDriverId(0xCA , 1254); // Turkish Windows + AddLanguageDriverId(0xCB , 1253); // Greek Windows + AddLanguageDriverId(0xCC, 1257); // Baltic Windows + } + + private static void AddLanguageDriverId(byte ldid, Encoding encoding) + { + if (!LanguageDriverIdToEncoding.ContainsKey(ldid)) + LanguageDriverIdToEncoding.Add(ldid, encoding); + + if (!EncodingToLanguageDriverId.ContainsKey(encoding)) + EncodingToLanguageDriverId.Add(encoding, ldid); + } + + private static void AddLanguageDriverId(byte ldid, int codePage) + { + try + { + var encoding = Encoding.GetEncoding(codePage); + AddLanguageDriverId(ldid, encoding); + } + catch (NotSupportedException ex) + { + Debug.WriteLine($"Failed to get codepage for language driver:{ldid}."); + WriteMissingEncodingMessage("CP:" + codePage.ToString()); + Debug.WriteLine(ex); + } + + } + + /// + /// Get the language driver id for an encoding + /// + /// The encoding + /// A language driver id + public static byte GetLanguageDriverId(Encoding encoding) + { + if (EncodingToLanguageDriverId.TryGetValue(encoding, out var ldid)) + return ldid; + + return 0; // 0x03; + } + + + /// + /// Get the Encoding for an language driver id + /// + /// Language Driver ID + /// Encoding + public static Encoding GetEncodingForLanguageDriverId(byte ldid) + { + if (LanguageDriverIdToEncoding.TryGetValue(ldid, out var encoding)) + return encoding; + + return null; + } + + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfReader.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfReader.cs new file mode 100644 index 0000000..8674b88 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfReader.cs @@ -0,0 +1,296 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.IO; +using System.IO.MemoryMappedFiles; +using System.Linq; +using System.Runtime.ExceptionServices; +using System.Text; + + + +namespace NetTopologySuite.IO.Dbf +{ + + + /// + /// Class that allows records in a dbase file to be enumerated. + /// + public class DbfReader : ManagedDisposable, IEnumerable> + { + private int HeaderSize; + private int CurrentIndex = 0; + private readonly BinaryBufferReader Binary = new BinaryBufferReader(Dbf.TableDescriptorSize); + private Stream DbfStream; + + /// + /// Sets or returns the date this file was last updated. + /// + /// + public DateTime LastUpdateDate { get; private set; } + + + /// + /// Return the number of records in the file. + /// + /// + /// RecordCount is saved in DBF file as 32-bit unisigned integer. Record count is limited to 1 Billion (http://www.dbase.com/Knowledgebase/faq/dBASE_Limits_FAQ.html), so it is les than int.MaxValue + public int RecordCount { get; private set; } + + /// + /// Return the length of the records in bytes. + /// + /// + public int RecordSize { get; private set; } + + /// + /// Returns the fields in the dbase file. + /// + public DbfFieldCollection Fields { get; private set; } + + /// + /// Encoding used by dBASE file. + /// + public Encoding Encoding { get; private set; } + + + /// + /// Initializes new instance of the reader. + /// + /// Stream of source DBF file. + /// DBF file encoding or null if encoding should be resolved from DBF reserved bytes. + public DbfReader(Stream stream, Encoding encoding = null) + { + Initialize(stream, encoding); + } + + /// + /// Initializes new instance of the reader. + /// + /// Path to DBF file. + /// DBF file encoding or null if encoding should be resolved from DBF reserved bytes. + public DbfReader(string dbfPath, Encoding encoding = null) + { + encoding = encoding ?? GetEncoding(dbfPath); + try + { + var dbfStream = OpenManagedFileStream(dbfPath, ".dbf", FileMode.Open); + Initialize(dbfStream, encoding); + } + catch + { + DisposeManagedResources(); + throw; + } + } + + private void Initialize(Stream stream, Encoding encoding = null) + { + DbfStream = stream ?? throw new ArgumentNullException("Uninitialized dBASE stream.", nameof(stream)); + + if (DbfStream.Position != 0) + DbfStream.Seek(0, SeekOrigin.Begin); + + Binary.LoadFrom(DbfStream, Dbf.TableDescriptorSize); + + var version = Binary.ReadDbfVersion(); + if (version != Dbf.Dbase3Version) + throw new NotSupportedException("Unsupported dBASE version: " + version); + + LastUpdateDate = Binary.ReadDbfLastUpdateDate(); + RecordCount = Binary.ReadDbfRecordCount(); + HeaderSize = Binary.ReadDbfHeaderSize(); // 2 bytes (table descriptor header + field descriptor headers + header terminator char 0x0d) + RecordSize = Binary.ReadDbfRecordSize(); + Binary.Advance(17); + + Encoding = encoding ?? Binary.ReadDbfEncoding() ?? Encoding.UTF8; // null => Try to read encoding from DBF's reserved bytes + Binary.Advance(2); + + // --- File header is done, read field descriptor header now --- + + var fieldsHeaderSize = HeaderSize - Dbf.TableDescriptorSize - 1; // Header ends with header terminator char 0x0d + var fieldCount = fieldsHeaderSize / Dbf.FieldDescriptorSize; + + if (fieldsHeaderSize % Dbf.FieldDescriptorSize != 0) + throw new NotSupportedException("Invalid dBASE III file format."); + + //Binary.TraceToConsole("File Header", 0, Dbf.TableDescriptorSize); + + Binary.LoadFrom(DbfStream, fieldsHeaderSize); + Fields = new DbfFieldCollection(fieldCount); + for (int i = 0; i < fieldCount; i++) + { + Fields.Add(Binary.ReadDbaseFieldDescriptor(Encoding)); + //Binary.TraceToConsole("Field Header: " + Fields[Fields.Count -1].Name, i * Dbf.FieldDescriptorSize, Dbf.FieldDescriptorSize); + } + + // Last byte is a marker for the end of the DBF file header (including field definitions). + // Trond Benum: This fails for some presumeably valid test shapefiles, so don't raise error. + var headerTerminator = DbfStream.ReadByte(); + if (headerTerminator != Dbf.HeaderTerminatorMark) + { + Debug.WriteLine("Invalid dBASE III header terminator mark: " + headerTerminator); + } + + if (DbfStream.Position != HeaderSize) + { + //reader.BaseStream.Seek(headerSize, SeekOrigin.Begin); + throw new FileLoadException("Reading dBASE III file header failed."); + } + } + + + internal void Restart() + { + DbfStream.Seek(HeaderSize, SeekOrigin.Begin); + } + + + private Encoding GetEncoding(string dbfPath) + { + var cpgText = ReadFileText(dbfPath, ".cpg"); // Esri + + if (string.IsNullOrEmpty(cpgText)) + cpgText = ReadFileText(dbfPath, ".cst"); // GeoServer.org + + if (string.IsNullOrEmpty(cpgText)) + return null; + + try + { + return Encoding.GetEncoding(cpgText); + } + catch (Exception) + { + DbfEncoding.WriteMissingEncodingMessage(cpgText); + return null; + } + } + + private string ReadFileText(string filePath, string fileExtension) + { + filePath = Path.ChangeExtension(filePath, fileExtension); + if (!File.Exists(filePath)) + return null; + + return File.ReadAllText(filePath).Trim(); + } + + /// + /// Reads field values from underlying stream and advances the enumerator to the next record. + /// + /// Indicates if the record was marked as deleted. + /// + /// true if the enumerator was successfully advanced to the next record; + /// false if the enumerator has passed the end of the table. + /// + public bool Read(out bool deleted) + { + BinaryBuffer.TraceToConsole("=== record " + CurrentIndex + " Stream.Position: " + DbfStream.Position + " " + new string('=', 50)); + + if (CurrentIndex >= RecordCount) + { + deleted = false; + return false; + } + + Binary.LoadFrom(DbfStream, RecordSize); + + var deletedFlag = Binary.ReadByte(); + deleted = deletedFlag == Dbf.DeletedRecordMark; + + for (int i = 0; i < Fields.Count; i++) + { + BinaryBuffer.TraceToConsole(Fields[i].Name + " BinaryBuffer.Position: " + Binary.Position); + Fields[i].ReadValue(Binary); + } + + CurrentIndex++; + return true; + } + + + /// + /// Reads values from underlying stream and advances the enumerator to the next record. + /// + /// Array of field values. If null specified new array will be created. + /// Indicates if the reacord was marked as deleted. + /// + /// true if the enumerator was successfully advanced to the next element; + /// false if the enumerator has passed the end of the collection. + /// + public bool Read(out IReadOnlyDictionary values, out bool deleted) + { + if (!Read(out deleted)) + { + //values = null; // This would cause recreating array in next iteration + values = DbfField.EmptyFieldValues; + return false; + } + + values = Fields.GetValues(); + return true; + } + + [Conditional("DEBUG_BINARY")] + internal void GetBinaryDiff(DbfReader other, List differences) + { + Binary.GetBinaryDiff("DBF", other.Binary, differences); + } + + /// + protected override void DisposeManagedResources() + { + Binary?.Reset(); + base.DisposeManagedResources(); + } + + IEnumerator> IEnumerable>.GetEnumerator() + { + return new DbfEnumerator(this); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return new DbfEnumerator(this); + } + + private class DbfEnumerator : IEnumerator> + { + private readonly DbfReader Owner; + public IReadOnlyDictionary Current { get; private set; } + object IEnumerator.Current => Current; + + public DbfEnumerator(DbfReader owner) + { + Owner = owner; + } + + public void Reset() + { + Owner.Restart(); + } + + public bool MoveNext() + { + var succeed = Owner.Read(out var fieldValues, out var deleted); + if (deleted) + { + return MoveNext(); + } + + Current = fieldValues; + return succeed; + } + + public void Dispose() + { + // Nothing to dispose + } + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfType.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfType.cs new file mode 100644 index 0000000..61288a4 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfType.cs @@ -0,0 +1,46 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Dbf +{ + + + /// + /// dBASE field type + /// + public enum DbfType : byte + { + /// + /// Character + /// + Character = (byte)'C', + + /// + /// Date - 8 bytes (stored as a string in the format YYYYMMDD). + /// + Date = (byte)'D', + + /// + /// Numeric - positive or negative numbers. Number stored as a string. Can store integer and decimal numbers but Esri software uses it only for integer values. + /// + Numeric = (byte)'N', + + /// + /// Positive or negative numbers. Identical to Numeric; maintained for compatibility. Used by Esri software. + /// + Float = (byte)'F', + + /// + /// Logical - one character ('T','F') + /// + Logical = (byte)'L', + + // + // Binary - not dBASE III. This will hold the WKB for a geometry object. + // + // Binary = (byte)'B', + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfWriter.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfWriter.cs new file mode 100644 index 0000000..21b9408 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/DbfWriter.cs @@ -0,0 +1,207 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.IO.MemoryMappedFiles; +using System.Linq; +using System.Runtime.ExceptionServices; +using System.Text; + +namespace NetTopologySuite.IO.Dbf +{ + + + /// + /// Class that allows records in a dbase file to be enumerated. + /// + public class DbfWriter : ManagedDisposable + { + private readonly BinaryBufferWriter Binary = new BinaryBufferWriter(Dbf.TableDescriptorSize); + private Stream DbfStream; + + /// + /// Returns the fields in the dbase file. + /// + public DbfFieldCollection Fields { get; private set; } + + /// + /// Encoding of dBASE file + /// + public Encoding Encoding { get; private set; } + + /// + /// Return the length of the records in bytes. + /// + /// + public int RecordSize { get; private set; } + + + /// + /// Return the number of records in the file. + /// + /// + /// RecordCount is saved in DBF file as 32-bit unisigned integer. Record count is limited to 1 Billion (http://www.dbase.com/Knowledgebase/faq/dBASE_Limits_FAQ.html), so it is les than int.MaxValue + public int RecordCount { get; private set; } = 0; + + + /// + /// Initializes new DbaseStreamReader + /// + /// Stream of source DBF file. + /// dBASE field definitions. + /// DBF file encoding or null if encoding should be resolved from DBF reserved bytes. + public DbfWriter(Stream stream, IReadOnlyList fields, Encoding encoding) + { + Encoding = encoding ?? Encoding.UTF8; + IntializeFields(fields); + WriteHeader(stream); + } + + + /// + /// Initializes new DbaseStreamReader + /// + /// Path to DBF file. + /// dBASE field definitions. + /// DBF file encoding or null if encoding should be resolved from DBF reserved bytes. + public DbfWriter(string dbfPath, IReadOnlyList fields, Encoding encoding) + { + Encoding = encoding ?? Encoding.UTF8; + IntializeFields(fields); + WriteCpgEncoding(dbfPath, encoding); + try + { + WriteHeader(OpenManagedFileStream(dbfPath, ".dbf", FileMode.Create)); + } + catch + { + DisposeManagedResources(); + throw; + } + } + + private void IntializeFields(IReadOnlyList fields) + { + if (fields == null || fields.Count < 1) + throw new ArgumentException("dBASE file must contain at least one field.", nameof(fields)); + + if (fields.Count > Dbf.MaxFieldCount) + throw new ArgumentException($"dBASE file must contain no more than {Dbf.MaxFieldCount} fields.", nameof(fields)); + + Fields = new DbfFieldCollection(fields.Count); + for (int i = 0; i < fields.Count; i++) + { + if (fields[i] is DbfCharacterField textField) + textField.Encoding = Encoding; + Fields.Add(fields[i]); + } + } + + private void WriteHeader(Stream stream) + { + DbfStream = stream ?? throw new ArgumentNullException("Uninitialized dBASE stream.", nameof(stream)); + if (DbfStream.Position != 0) + DbfStream.Seek(0, SeekOrigin.Begin); + + var headerSize = Dbf.TableDescriptorSize + Dbf.FieldDescriptorSize * Fields.Count + 1; // 2 bytes (table descriptor header + field descriptor headers + header terminator char 0x0d) + if (headerSize > UInt16.MaxValue) + throw new InvalidDataException("dBASE III header size exceeded " + UInt16.MaxValue.ToString() + " bytes."); + + RecordSize = Fields.Sum(f => f.Length) + 1; // Sum of lengths of all fields + 1 (deletion flag) + + // Table descriptor + Binary.Clear(); + Binary.WriteDbfVersion(Dbf.Dbase3Version); + Binary.WriteDbfLastUpdateDate(DateTime.Now); //.WriteBytes(GetLastUpdateDate()); + + Binary.WriteDbfRecordCount(1); // Write dummy recordCount. This will be replaced at the end of writing. + Binary.WriteDbfHeaderSize(headerSize); + Binary.WriteDbfRecordSize(RecordSize); + Binary.WriteNullBytes(17); + Binary.WriteDbfEncoding(Encoding); + Binary.WriteNullBytes(2); + + // write field description array + foreach (var field in Fields) + { + Binary.WriteDbaseFieldDescriptor(field, Encoding); + } + + // Now header BinaryDataWriter should be at last byte position + Binary.WriteByte(Dbf.HeaderTerminatorMark); + + Binary.CopyTo(DbfStream); + } + + + private void WriteCpgEncoding(string dbfPath, Encoding encoding) + { + if (encoding == null) + return; + + string cpgPath = Path.ChangeExtension(dbfPath, ".cpg"); + var cpgText = encoding.WebName.ToUpperInvariant(); + File.WriteAllText(cpgPath, cpgText); + } + + + /// + /// Writes field values to the underlying stream and advances the position to the next record. + /// + /// + /// true if the enumerator was successfully advanced to the next element; + /// false if the enumerator has passed the end of the collection. + /// + public void Write() + { + Binary.Clear(); + Binary.WriteByte(Dbf.ValidRecordMark); + + for (int i = 0; i < Fields.Count; i++) + { + Fields[i].WriteValue(Binary); + } + RecordCount++; + Binary.CopyTo(DbfStream); + } + + + /// + /// Writes record values to the underlying stream and advances the position to the next record. + /// + /// + /// true if the enumerator was successfully advanced to the next element; + /// false if the enumerator has passed the end of the collection. + /// + public void Write(IReadOnlyDictionary values) + { + Fields.SetValues(values); + Write(); + } + + private void FinalizeWriting() + { + if (DbfStream == null || DbfStream.Position < Dbf.TableDescriptorSize + Dbf.FieldDescriptorSize) // DBF must have at least one field + return; + + DbfStream.WriteByte(Dbf.EndOfFileMark); + + DbfStream.Seek(4, SeekOrigin.Begin); + Binary.Clear(); + Binary.WriteDbfRecordCount(RecordCount); + Binary.CopyTo(DbfStream); + } + + /// + protected override void DisposeManagedResources() + { + FinalizeWriting(); + Binary?.Reset(); + base.DisposeManagedResources(); // FinalizeWriting() is using underlying file streams. Dispose them at the end. + } + } + + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfCharacterField.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfCharacterField.cs new file mode 100644 index 0000000..540e963 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfCharacterField.cs @@ -0,0 +1,83 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Dbf +{ + + /// + /// Character field definition. + /// + public class DbfCharacterField : DbfField + { + private static readonly int MaxFieldLength = 254; + + /// + /// Initializes a new instance of the field class. + /// + /// Field name. + /// Field length. + public DbfCharacterField(string name, int length = 254) : this(name, length, null) + { + } + + internal DbfCharacterField(string name, int length, Encoding encoding) + : base(name, DbfType.Character, Math.Min(length, MaxFieldLength), 0) + { + _encoding = encoding; + } + + /// + /// String representation of current field value. + /// + public string StringValue { get; set; } + + /// + public override object Value + { + get { return StringValue; } + set { StringValue = value?.ToString(); } + } + + internal override void ReadValue(BinaryBufferReader recordData) + { + StringValue = recordData.ReadString(Length, Encoding)?.TrimEnd(); + } + + internal override void WriteValue(BinaryBufferWriter recordData) + { + if (StringValue == null) + { + recordData.WriteNullBytes(Length); + } + else if (StringValue.Length < this.Length) + { + // Length: 4 + // 1.2 => "1.2 "; + recordData.WriteString(StringValue.PadRight(this.Length, ' '), Length, Encoding); // PadRigth for text + } + else + { + recordData.WriteString(StringValue, Length, Encoding); + } + } + + private Encoding _encoding = null; + internal Encoding Encoding + { + get { return _encoding ?? Encoding.UTF8; } + set + { + if (value == null) + throw new ArgumentException($"Field {GetType().Name} cannot have unassigned encoding."); + + if (_encoding != null && _encoding.CodePage != value.CodePage) + throw new ArgumentException($"Cannot change field {Name} ecnoding. It is already assinged to other {nameof(DbfWriter)} with different encoding."); + + _encoding = value; + } + } + } + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfDateField.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfDateField.cs new file mode 100644 index 0000000..3aa8d2b --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfDateField.cs @@ -0,0 +1,81 @@ +using System; +using System.IO; +using System.Collections.Generic; +using System.Text; +using System.Globalization; + +namespace NetTopologySuite.IO.Dbf +{ + + /// + /// Date field definition. + /// + public class DbfDateField : DbfField + { + private static readonly int FieldLength = 8; // This width is fixed and cannot be changed + private static readonly string DateFormat = "yyyyMMdd"; + private static readonly byte[] DefaultValue = GetBytes(' ', FieldLength); + + /// + /// Initializes a new instance of the field class. + /// + /// Field name. + public DbfDateField(string name) + : base(name, DbfType.Date, FieldLength, 0) + { + } + + /// + /// Date representation of current field value. + /// + public DateTime? DateValue { get; set; } + + /// + public override object Value + { + get { return DateValue; } + set { DateValue = (DateTime?)value; } + } + + + internal override void ReadValue(BinaryBufferReader recordData) + { + var valueText = recordData.ReadString(Length, Encoding.ASCII)?.Trim(); + if (string.IsNullOrEmpty(valueText)) + { + DateValue = null; + } + else + { + DateValue = DateTime.ParseExact(valueText, DateFormat, CultureInfo.InvariantCulture); + } + } + + internal override void WriteValue(BinaryBufferWriter recordData) + { + if (DateValue.HasValue) + { + recordData.WriteString(DateValue.Value.ToString(DateFormat, CultureInfo.InvariantCulture), Length, Encoding.ASCII); + } + else + { + // ArcMap 10.6 can create different null date representation in one .shp file! + // My test file pt_utf8.shp have field named 'date' with such binary data: + // === record 0 Stream.Position: 673 + // date BinaryBuffer.Position: 183 + // ReadString(191): '▬▬▬▬▬▬▬▬' // '▬' == char.MinValue == (char)0 + // === record 1 Stream.Position: 1145 + // date BinaryBuffer.Position: 183 + // ReadString(191): ' ' + + // According to https://desktop.arcgis.com/en/arcmap/latest/manage-data/shapefiles/geoprocessing-considerations-for-shapefile-output.htm + // Null value substitution for Date field is 'Stored as zero'. Storing zero (null) values is also consistent with Numeric and Float field. + + //recordData.WriteBytes(DefaultValue); + recordData.WriteNullBytes(Length); + } + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfField.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfField.cs new file mode 100644 index 0000000..979d64f --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfField.cs @@ -0,0 +1,178 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Text; + +namespace NetTopologySuite.IO.Dbf +{ + + + /// + /// dBASE field definition. + /// + public abstract class DbfField + { + + /// + /// Field Name. + /// + public string Name { get; } + + /// + /// Field Type. + /// + public DbfType FieldType { get; } + + /// + /// Length of the data in bytes. + /// + public int Length { get; } + + /// + /// Field decimal count in Binary, indicating where the decimal is. + /// + public int Precision { get; } + + + /// + /// Initializes a new instance of the field class. + /// + /// Field name (max 10 characters). + /// Field type. + /// Field length. + /// Decimal places count. + internal DbfField(string name, DbfType type, int length, int precision) + { + if (string.IsNullOrEmpty(name)) + throw new ArgumentNullException("Empty dBASE field name.", nameof(name)); + + if (name.Length > Dbf.MaxFieldNameLength) + throw new ArgumentException($"dBASE III field name cannot be longer than {Dbf.MaxFieldNameLength} characters.", nameof(name)); + + // ArcMap does support number at the begining. + //var beginsWithLetter = IsValidFieldNameLetter(name[0]); + //if (!beginsWithLetter) + // throw new ArgumentNullException($"Invalid dBASE field name: {name}. Field name must begin with a letter.", nameof(name)); + + + /* + foreach (var c in name) + { + if (!IsValidFieldNameChar(c)) + throw new ArgumentNullException($"Invalid dBASE field name: {name}. Field name must contain only letter, number or undersocre (_) character.", nameof(name)); + } + */ + + if (length < 1) + throw new ArgumentException($"Ivalid dBASE field length: {length}.", nameof(length)); + + if (precision < 0) + precision = 0; // throw new ArgumentException($"Ivalid dBASE III field decimal places count: {precision}.", nameof(precision)); + + Name = name; + FieldType = type; + Length = length; + Precision = precision; + } + + private bool IsValidFieldNameChar(char c) + { + return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || (c >= '0' && c <= '9'); + } + + /// + /// + /// + /// + public static DbfField ShapeField() + { + return null; // new DbaseBinaryField("SHAPE" new DbaseField("Geometry", 'B', 0, 0); + } + + /// + /// + /// + /// + public static DbfField IdField() + { + return null; // new DbaseField("Row", 'I', 0, 0); + } + + /// + public override string ToString() + { + return string.Format("{0} | {1} ({2}, {3})", Name.PadLeft(10), FieldType, Length, Precision); + } + + internal Exception GetFieldValueError(object value, string additionalDescription = "") + { + return new ArgumentException($"Invalid {Name} [{FieldType}:{Length}] field value: {value}." + additionalDescription); + } + + internal static byte[] GetBytes(char c, int length) + { + return Enumerable.Repeat((byte)c, length).ToArray(); + } + + internal abstract void ReadValue(BinaryBufferReader recordData); + internal abstract void WriteValue(BinaryBufferWriter recordData); + + /// + /// Current field value. + /// + /// + /// is used by shapefile readers and writers to hold current record field value. + /// + public abstract object Value { get; set; } + + + + internal static readonly IReadOnlyDictionary EmptyFieldValues = new ReadOnlyDictionary(new Dictionary()); + + /// + /// Creates dBASE field determined using specified value. + /// + /// Field name. + /// Field value type. + /// dBase field definition. + public static DbfField Create(string name, Type type) + { + if (type == null) + throw new ArgumentException("Cannot determine dBASE field type for value."); + + if (type == typeof(string)) + return new DbfCharacterField(name); + + if (type == typeof(bool) || type == typeof(bool?)) + return new DbfLogicalField(name); + + if (type == typeof(DateTime) || type == typeof(DateTime?)) + return new DbfDateField(name); + + if (type == typeof(sbyte) || type == typeof(sbyte?) || type == typeof(byte) || type == typeof(byte?)) + return new DbfNumericField(name, 4, 0); + + if (type == typeof(short) || type == typeof(short?) || type == typeof(ushort) || type == typeof(ushort?)) + return new DbfNumericField(name, 6, 0); + + if (type == typeof(int) || type == typeof(int?) || type == typeof(uint) || type == typeof(uint?)) + return new DbfNumericField(name, 11, 0); + + if (type == typeof(long) || type == typeof(long?) || type == typeof(ulong) || type == typeof(ulong?)) + return new DbfNumericField(name, DbfNumericField.MaxFieldLength, 0); + + if (type == typeof(decimal) || type == typeof(decimal?)) + return new DbfNumericField(name, DbfNumericField.MaxFieldLength, DbfNumericField.MaxFieldPrecision); + + if (type == typeof(double) || type == typeof(float) || type == typeof(double?) || type == typeof(float?)) + return new DbfFloatField(name); + + throw new ArgumentException($"Unsupported dBASE field value: {type} ({type.GetType().Name})"); + } + + } + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfFieldCollection.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfFieldCollection.cs new file mode 100644 index 0000000..6f201e6 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfFieldCollection.cs @@ -0,0 +1,106 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Text; + +namespace NetTopologySuite.IO.Dbf +{ + /// + /// Collection of dBASE field definitions. + /// + public class DbfFieldCollection : IReadOnlyList + { + + internal DbfFieldCollection(int count) + { + Fields = new List(count); + } + + private readonly List Fields; + private readonly Dictionary FieldDictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); + + /// + /// Gets the field at the specified index. + /// + /// The zero-based index of the field to get. + /// The field at the specified index. + public DbfField this[int index] => Fields[index]; + + /// + /// Gets the field with specified name. + /// + /// The of the field. + /// The field with specified name or null if name was not found. + public DbfField this[string name] + { + get + { + if (FieldDictionary.TryGetValue(name, out var field)) + return field; + return null; + } + } + + + /// + /// Gets the number of fields contained in the collection. + /// + public int Count => Fields.Count; + + IEnumerator IEnumerable.GetEnumerator() + { + return Fields.GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return Fields.GetEnumerator(); + } + + internal void Add(DbfField field) + { + Fields.Add(field); + FieldDictionary.Add(field.Name, field); + } + + /// + /// Reads current fields values. + /// + /// Array of field values. + public IReadOnlyDictionary GetValues() + { + var values = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var field in Fields) + { + values[field.Name] = field.Value; + } + return new ReadOnlyDictionary(values); + } + + /// + /// Sets current filed values. + /// + /// + public void SetValues(IReadOnlyDictionary values) + { + if (values == null) + throw new ArgumentNullException("dBASE record must contain values.", nameof(values)); + + if (values.Count < Fields.Count) + throw new ArgumentNullException("Invalid dBASE record value count: " + values.Count + ". Expected: " + Fields.Count, nameof(values)); + + foreach (var field in Fields) + { + if (values.TryGetValue(field.Name, out var value)) + { + field.Value = value; + } + else + { + throw new ArgumentException("Required dBASE attribute value is missing for '" + field.Name + "' field."); + } + } + } + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfFloatField.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfFloatField.cs new file mode 100644 index 0000000..9f21718 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfFloatField.cs @@ -0,0 +1,86 @@ +using System; +using System.IO; +using System.Collections.Generic; +using System.Text; +using System.Globalization; + +namespace NetTopologySuite.IO.Dbf +{ + + /// + /// Numeric field definition. + /// + public class DbfFloatField : DbfNumericField + { + // Exponential notation requires 8 additional digits: + // 1 digit for plus/minus mark "-" + // 1 digit for first number digit "1" + // 1 digit for decimal separator "." + // 5 digits for exponent component "e+004" + // -1.2346e+004 => Length: 12, DecimalDigits: 4 => 12 - 4 = 8 + private static readonly int ExponentialNotationDigitCount = 8; + + private static readonly int DefaultFieldLength = 19; // That uses ArcMap 10.6 when creates 'Double' field. + private static readonly int DefaultFieldPrecision = 11; // That uses ArcMap 10.6 when creates 'Double' field. + + private readonly string NumberFormat; + + + /// + /// Initializes a new instance of the field class. + /// + /// Field name. + public DbfFloatField(string name) : this(name, DefaultFieldLength, DefaultFieldPrecision) + { + } + + internal DbfFloatField(string name, int length, int precision) + : base(name, DbfType.Float, length, precision) + { + // Esri uses exponential notation for float fields: + // -12345.6789 (E10) => -1.2345678900E+004 + // -12345.6789 (e4) => -1.2346e+004 + + // Esri writes specific precision to field definition but then ignores it by using exponential notation. + // Exponential notation expresses numbers placing decimal separator always after first non-zero digit. + NumberFormat = "e" + (Length - ExponentialNotationDigitCount).ToString(); + } + + /// + public override object Value + { + get { return NumericValue; } + set + { + if (value == null) + { + NumericValue = null; + } + else + { + NumericValue = Convert.ToDouble(value); + } + } + } + + /// + protected override double StringToNumber(string number) + { + return double.Parse(number, CultureInfo.InvariantCulture); + } + + /// + protected override string NumberToString(double number) + { + var numberString = number.ToString(NumberFormat, CultureInfo.InvariantCulture); +#if DEBUG + // Esri uses two digits in exponential component "e+01" and ads one space at the begining. " -1.11111000000e+02" + // .NET always uses three digits in exponential component "e+001" (there is no space for space). "-1.11111000000e+002" + numberString = numberString.Replace("e+0", "e+").Replace("e-0", "e-"); +#endif + return numberString; + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfLogicalField.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfLogicalField.cs new file mode 100644 index 0000000..e8ef4d1 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfLogicalField.cs @@ -0,0 +1,83 @@ +using System; +using System.IO; +using System.Linq; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Dbf +{ + /// + /// Logical field definition. + /// + public class DbfLogicalField : DbfField + { + private readonly static byte DefaultValue = (byte)' '; // (byte)'?'; Initialized to 0x20 (space) otherwise T or F (http://www.dbase.com/KnowledgeBase/int/db7_file_fmt.htm) + private readonly static byte TrueValue = (byte)'T'; + private readonly static byte FalseValue = (byte)'F'; + private readonly static int FieldLength = 1; // This width is fixed and cannot be changed + + private readonly static string TrueValues = "TtYy"; + private readonly static string FalseValues = "FfNn"; + + + /// + /// Initializes a new instance of the field class. + /// + /// Field name. + public DbfLogicalField(string name) + : base(name, DbfType.Logical, FieldLength, 0) + { + } + + /// + /// Logical representation of current field value. + /// + public bool? LogicalValue { get; set; } + + /// + public override object Value + { + get { return LogicalValue; } + set { LogicalValue = (bool?)value; } + } + + internal override void ReadValue(BinaryBufferReader recordData) + { + var logicalValue = recordData.ReadByteChar(); + + if (TrueValues.Contains(logicalValue)) + { + LogicalValue = true; + } + else if (FalseValues.Contains(logicalValue)) + { + LogicalValue = false; + } + else + { + LogicalValue = null; + } + } + + + internal override void WriteValue(BinaryBufferWriter recordData) + { + if (!LogicalValue.HasValue) + { + recordData.WriteByte(DefaultValue); + } + else if (LogicalValue.Value) + { + recordData.WriteByte(TrueValue); + } + else + { + recordData.WriteByte(FalseValue); + } + } + + } + + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfNumericField.cs b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfNumericField.cs new file mode 100644 index 0000000..6c2cc81 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Dbf/Fields/DbfNumericField.cs @@ -0,0 +1,231 @@ +using System; +using System.IO; +using System.Collections.Generic; +using System.Text; +using System.Globalization; + +namespace NetTopologySuite.IO.Dbf +{ + + /// + /// Numeric field definition. + /// + public abstract class DbfNumericField : DbfField where T : struct, IConvertible, IFormattable + { + internal static readonly int MaxFieldLength = 19; // Decimal point and any sign is included in field width, if present. + internal static readonly int MaxFieldPrecision = 17; // dBASE specs states it could be max 15, but Esri uses up to 17. + + internal DbfNumericField(string name, DbfType type, int length, int precision) + : base(name, type, GetProperLength(length), GetProperPrecision(length, precision)) + { + } + + /// + /// Initializes a new instance of the field class. + /// + /// Field name. + /// Field length. + /// Decmial places count. + protected DbfNumericField(string name, int length, int precision) + : this(name, DbfType.Numeric, length, precision) + { + + } + + /// + /// Numeric representation of current field value. + /// + public T? NumericValue { get; set; } + + internal static int GetProperLength(int length) + { + if (length < 1) + return 1; + return Math.Min(length, MaxFieldLength); + } + + private static int GetProperPrecision(int length, int precision) + { + if (precision < 0) + return 0; + + length = GetProperLength(length); + if (length < 2) + return 0; + + precision = Math.Min(precision, length - 2); // -0.12345 => legth: 8, decimalCount: 5 => should be (length - 3). But Esri allows decimalCount: 6 => (length - 2). + return Math.Min(precision, MaxFieldPrecision); + } + + internal override void ReadValue(BinaryBufferReader recordData) + { + var valueText = recordData.ReadString(Length, Encoding.ASCII)?.Trim(); + if (string.IsNullOrEmpty(valueText)) + { + NumericValue = null; + return; + } + + NumericValue = StringToNumber(valueText); + } + + internal override void WriteValue(BinaryBufferWriter recordData) + { + if (!NumericValue.HasValue) + { + recordData.WriteNullBytes(Length); + return; + } + + var valueText = NumberToString(NumericValue.Value); + + // Length: 4 + // 1.2 => " 1.2"; + if (valueText.Length < this.Length) + { + recordData.WriteString(valueText.PadLeft(this.Length, ' '), Length, Encoding.ASCII); // PadLeft for Values + } + else + { + if (valueText.Length > this.Length) + { + throw GetFieldValueError(valueText, "Field value out of range."); + } + recordData.WriteString(valueText, Length, Encoding.ASCII); + } + } + + + /// + /// Converts the number to its string representation. + /// + /// Number to convert. + /// A string that is equivalent to the specified numer. + protected abstract string NumberToString(T number); + + /// + /// Converts the string representation of a number to its number equivalent. + /// + /// A string that contains a number to convert. + /// A number that is equivalent to the specified string representation of numeric value. + protected abstract T StringToNumber(string number); + } + + + + /// + public class DbfNumericField : DbfNumericField + { + internal static readonly int MaxInt32FieldLength = int.MaxValue.ToString().Length; // 10 (int.MinValue => -2147483648 => 11, but Esri uses 10) + private readonly string NumberFormat = "0"; // For integers + + /// + public DbfNumericField(string name, int length, int precision) + : base(name, DbfType.Numeric, length, precision) + { + if (Precision > 0) + NumberFormat = "0." + new string('0', Precision); + } + + /// + /// Initializes a new instance of the integer field class. + /// + /// Field name. + public DbfNumericField(string name) + : this(name, MaxInt32FieldLength, 0) + { + } + + /// + protected override decimal StringToNumber(string number) + { + // https://desktop.arcgis.com/en/arcmap/latest/manage-data/shapefiles/geoprocessing-considerations-for-shapefile-output.htm + // Null values are not supported in shapefiles. If a feature class containing nulls is converted to a shapefile, or a database table is converted to a dBASE file, the null values will be changed: + // Number: -1.7976931348623158e+308 (IEEE standard for the maximum negative value) + + return decimal.Parse(number, CultureInfo.InvariantCulture); + } + + /// + protected override string NumberToString(decimal number) + { + var valueString = number.ToString(NumberFormat, CultureInfo.InvariantCulture); + + var decSepPos = valueString.IndexOf('.'); + + // Length: 4 + // 12345.6 + if (decSepPos >= Length) + { + throw GetFieldValueError(valueString, "Field value out of range."); + } + + // Length: 4 + // 123456 + if (decSepPos < 0 && valueString.Length > Length) + { + throw GetFieldValueError(valueString, "Field value out of range."); + } + + if (valueString.Length > Length) + { + valueString = valueString.Substring(0, Length); + } + + return valueString; + } + + /// + public override object Value + { + get { return NumericValue; } + set + { + if (value == null) + { + NumericValue = null; + } + else + { + NumericValue = Convert.ToDecimal(value); + } + } + } + + /// + /// representation of field value. + /// + public int? Int32Value + { + get => ConvertValue(Convert.ToInt32); + set => NumericValue = ConvertToDecimal(value); + } + + /// + /// representation of field value. + /// + public double? DoubleValue + { + get => ConvertValue(Convert.ToDouble); + set => NumericValue = ConvertToDecimal(value); + } + + private T? ConvertValue(Func converTo) where T : struct + { + if (NumericValue.HasValue) + return converTo(NumericValue.Value); + else + return null; + } + + private decimal? ConvertToDecimal(IConvertible value) + { + if (value == null) + return null; + else + return value.ToDecimal(CultureInfo.InvariantCulture); + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/NetTopologySuite.IO.Esri.Core.csproj b/src/NetTopologySuite.IO.Esri.Core/NetTopologySuite.IO.Esri.Core.csproj new file mode 100644 index 0000000..8243411 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/NetTopologySuite.IO.Esri.Core.csproj @@ -0,0 +1,19 @@ + + + + netstandard2.0 + NetTopologySuite.IO.Esri + true + + + + true + + + + true + TRACE;DEBUG + + + + diff --git a/src/NetTopologySuite.IO.Esri.Core/Primitives/ByteExtensions.cs b/src/NetTopologySuite.IO.Esri.Core/Primitives/ByteExtensions.cs new file mode 100644 index 0000000..4c325bb --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Primitives/ByteExtensions.cs @@ -0,0 +1,40 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO +{ + internal static class ByteExtensions + { + public static string ToText(this byte b) + { + return b.ToChar() + " " + b.ToString().PadLeft(3); + } + + public static char ToChar(this byte b) + { + if (b == 0) + return '▬'; + + if (b < 32) + return '¤'; // ⌂ ↔ ¤ + + return (char)b; + } + + public static string CompareToText(this byte b1, byte b2) + { + var sb = new StringBuilder(20); + + sb.Append(b1.ToString().PadLeft(3)); + sb.Append(" | "); + sb.Append(b2.ToString().PadLeft(3)); + + sb.Append(" '" + b2.ToText()); + sb.Append("' | '"); + sb.Append(b2.ToText() + "'"); + + return sb.ToString(); + } + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Primitives/ManagedDisposable.cs b/src/NetTopologySuite.IO.Esri.Core/Primitives/ManagedDisposable.cs new file mode 100644 index 0000000..676ea62 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Primitives/ManagedDisposable.cs @@ -0,0 +1,90 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.IO.MemoryMappedFiles; +using System.Text; + +namespace NetTopologySuite.IO +{ + /// + /// Base class witha a mechanism for releasing managed resources. + /// + public abstract class ManagedDisposable : IDisposable + { + private bool IsDisposed = false; + private readonly List ManagedResources = new List(); + + + internal Stream OpenManagedFileStream(string path, string ext, FileMode mode) + { + path = Path.ChangeExtension(path, ext); + + if (mode != FileMode.Open && mode != FileMode.Create) + throw new ArgumentException(nameof(OpenManagedFileStream) + "() suports only " + nameof(FileMode.Open) + " and " + nameof(FileMode.Create) + " file modes.", nameof(mode)); + + if (mode == FileMode.Open && !File.Exists(path)) + throw new FileNotFoundException("File not found.", path); + + FileAccess access = (mode == FileMode.Open) ? FileAccess.Read : FileAccess.Write; + FileShare share = (mode == FileMode.Open) ? FileShare.Read : FileShare.None; + FileOptions options = (mode == FileMode.Open) ? FileOptions.SequentialScan : FileOptions.None; + + + var stream = new FileStream(path, mode, access, share, 4096, options); + AddManagedResource(stream); + return stream; + } + + private void Dispose(bool disposing) + { + if (IsDisposed) + return; + + if (disposing) + { + // Dispose managed state (managed objects) + DisposeManagedResources(); + } + + IsDisposed = true; + } + + /// + /// Adds a resource which will be disposed at the end of life of this instance. + /// + /// + protected void AddManagedResource(IDisposable resource) + { + ManagedResources.Add(resource); + } + + + /// + /// Disposes managed objects assosiated with this instance. + /// + protected virtual void DisposeManagedResources() + { + for (int i = ManagedResources.Count - 1; i >= 0; i--) + { + try + { + ManagedResources[i]?.Dispose(); + } + catch (Exception ex) + { + Debug.WriteLine(GetType().Name + " failed to dispose managed resources."); + Debug.WriteLine(ex.Message); + } + } + } + + /// + public void Dispose() + { + // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method + Dispose(disposing: true); + GC.SuppressFinalize(this); + } + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/README.md b/src/NetTopologySuite.IO.Esri.Core/README.md new file mode 100644 index 0000000..179170a --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/README.md @@ -0,0 +1,267 @@ +# NetTopologySuite.IO.Esri.Core + +This library provides forward-only readers and writers for [Esri shapefiles](https://support.esri.com/en/white-paper/279). +It is vanilla .NET Standard 2.0 library without any dependencies. + + +## Dbf + +Shapefile feature attributes are held in a dBASE format file (.dbf extension). Each attribute record +has a one-to-one relationship with the associated shape record. Classes whose name starts +with `Dbf` (eg. `DbfReader`) provide direct access to dBASE files. + +```c# +using (var dbf = new DbfReader(dbfPath)) +{ + foreach (var field in dbf.Fields) + { + Console.WriteLine(field); + } + + foreach (var record in dbf) + { + Console.WriteLine("Record ID: " + record["Id"]); + foreach (var attr in record) + { + Console.WriteLine($" {attr.Key}: {attr.Value}"); + } + } +} +``` + + +## Shp + +The main file (.shp extension) is a variable-record-length file in which each record describes +a shape with a list of its vertices. Classes whose name starts with `Shp` (eg. `ShpPointReader`) +provide direct access to main file. + +```c# +using (var shpStream = File.OpenRead(shpPath)) +using (var shp = new ShpPointReader(shpStream)) +{ + Console.WriteLine(shp.ShapeType); + while (shp.Read()) + { + Console.WriteLine(shp.Shape); + } +} +``` + + +## Shx + +The index file (.shx extension) stores the offset and content length for each record in SHP file. +As there is no additional value, this file is ignored during reading shapefiles. +Writing SHX data is handled directly by `ShpWriter` classes. + + +## Shapefile + +All three files described above form a shapefile. Unified access to shapefile triplet +is provided through classes whose name starts with `Shapefile` (eg. `ShapefilePointReader`). +Under the hood they are decorators wrapping `Dbf` and `Shp` classes. + +### Reading shapefiles using c# code + +```c# +// Intuitive but GC heavy method +using (var shp = new ShapefilePointReader(shpPath)) +{ + Console.WriteLine(shp.ShapeType); + foreach (var field in shp.Fields) + { + Console.WriteLine(field); + } + + foreach (var feature in shp) + { + Console.WriteLine("Record ID: " + feature.Attributes["Id"]); + foreach (var attr in feature.Attributes) + { + Console.WriteLine($" {attr.Key}: {attr.Value}"); + } + Console.WriteLine($" SHAPE: {feature.Shape}"); + } +} +``` + +```c# +// Fast and GC friendly method +using (var shp = ShapefileReader.Open(shpPath)) +{ + Console.WriteLine(shp.ShapeType); + foreach (var field in shp.Fields) + { + Console.WriteLine(field); + } + + while (shp.Read(out var deleted)) + { + if (deleted) + continue; + + Console.WriteLine("Record ID: " + shp.Fields["Id"].Value); + foreach (var field in shp.Fields) + { + Console.WriteLine($" {field.Name}: {field.Value}"); + } + Console.WriteLine($" SHAPE: {shp.Shape}"); + } +} +``` + + +### Writing shapefiles using c# code + + +```c# +// Intuitive but GC heavy method +var features = new List(); +for (int i = 1; i < 5; i++) +{ + var attributes = new Dictionary(); + attributes["date"] = new DateTime(2000, 1, i + 1); + attributes["float"] = i * 0.1; + attributes["int"] = i; + attributes["logical"] = i % 2 == 0; + attributes["text"] = i.ToString("0.00"); + + var line = new List(); + line.Add(new ShpCoordinates(i, i + 1, i)); + line.Add(new ShpCoordinates(i, i, i)); + line.Add(new ShpCoordinates(i + 1, i, i)); + + var shapeParts = new List>(); + shapeParts.Add(line); + + var feature = new ShapefileFeature(shapeParts, attributes); + features.Add(feature); +} + +var dateField = DbfField.Create("date", typeof(DateTime)); +var floatField = DbfField.Create("float", typeof(double)); +var intField = DbfField.Create("int", typeof(int)); +var LogicalField = DbfField.Create("logical", typeof(bool)); +var textField = DbfField.Create("text", typeof(string)); + +using (var shp = new ShapefileMultiPartWriter(shpPath, ShapeType.PolyLine, dateField, floatField, intField, LogicalField, textField)) +{ + shp.Write(features); +} + +foreach (var feature in ShapefileReader.ReadAll(shpPath)) +{ + Console.WriteLine(feature.Attributes); + Console.WriteLine(feature.Shape); +} +``` + +```c# +// Fast and GC friendly method +var dateField = new DbfDateField("date"); +var floatField = new DbfFloatField("float"); +var intField = new DbfNumericField("int"); +var LogicalField = new DbfLogicalField("logical"); +var textField = new DbfCharacterField("text"); + +using (var shp = new ShapefileMultiPartWriter(shpPath, ShapeType.PolyLine, dateField, floatField, intField, LogicalField, textField)) +{ + for (int i = 1; i < 5; i++) + { + // Avoid expensive boxing and unboxing value types + dateField.DateValue = new DateTime(2000, 1, i + 1); + floatField.NumericValue = i * 0.1; + intField.NumericValue = i; + LogicalField.LogicalValue = i % 2 == 0; + textField.StringValue = i.ToString("0.00"); + + // Avoid realocating new ShpCoordinates[] array over and over. + shp.Shape.Clear(); + shp.Shape.StartNewPart(); + shp.Shape.AddPoint(i, i + 1, i); + shp.Shape.AddPoint(i, i, i); + shp.Shape.AddPoint(i + 1, i, i); + + shp.Write(); + Console.WriteLine("Feature number " + i + " was written."); + } +} +``` + + +## Performance +This library was designed with performance in mind. Thats why preferred implementation +forces reading/writing whole shapefile using once initialized ShpShapeBuilder +and once initialized attributes array buffer. That way .NET garbage collector +doesn't have to dispose every single Point instance to reclaim the memory, +and after that alocate new memory for next Point instance. +It's like using [ArrayPool.Shared](https://docs.microsoft.com/en-us/dotnet/api/system.buffers.arraypool-1.shared) +but without the cost of renting and returning. + +There is a lot of other optimizations, to name a few of them: +- Using [structs over classes](https://adamsitnik.com/Value-Types-vs-Reference-Types/) + for storing `ShpCoordinates`. +- Using the `ShpShapeBuilder` which under the hood is a buffer with smart resizing capabilities. +- Using dedicated `BinaryBuffer` class which avoids file I/O operations + by reading/writing a whole shape record data at once instead of reading/writing + every single coordinate one by one. Again - without resource costly memory realocating. +- The `BinaryBuffer` have also custom bit-converter functions with support + for big-endian and little-endian byte order. It's implementation avoids realocating + new byte[8] array again and again, any time you want to write single coordinate. + This avoid bottleneck GC and much faster than + [BitConverter](https://docs.microsoft.com/en-us/dotnet/api/system.bitconverter.getbytes#System_BitConverter_GetBytes_System_Double_) . + +See also https://github.com/dotnet/runtime/issues/7291 + +## Encoding + +The .NET Framework supports a large number of character encodings and code pages. +On the other hand, .NET Core only supports +[limited list](https://docs.microsoft.com/en-us/dotnet/api/system.text.codepagesencodingprovider.instance#remarks) of encodings. +To retrieve an encoding that is present in the .NET Framework on the Windows +desktop but not in .NET Core, you need to do the following: +1. Add to your project reference to to the [System.Text.Encoding.CodePages.dll](https://www.nuget.org/packages/System.Text.Encoding.CodePages/). +2. Put the following line somewhere in your code: + `Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);` + + +## Validation + +For performance reasons this library does not provide any kind of validation +during reading or writing shapefiles. If you write new shapefile it is your +responsibility to write properly formated data. Eg. if you try to write +polygon with only two points it will not throw any error. + +- If you read a shapefile from trustworthy source (eg. authored using ArcMap) + you will proceed very fast. If not, it is up to you to validate every + single shape (`ShpPointCollection`) you read. +- If you write shapefile based on trustworthy geometries (eg. from PostGIS database) + you will proceed very fast. If not, it is up to you to validate + every singe shape (`ShpPointCollection`) you write. + +However, there are some quirks. Where it is possible the library is forgiving. +If you read corrupted shapefile containing polygon with only two points +it will return `NullShape` (empty `ShpPointCollection`) instead of throwing an error. + + +## Test + +This library was tested with shapefiles created by ArcMap 10.6. +Library read those files to memory and then wrote it back to files. +Then output files was checked byte by byte for differences. +At the moment the only inconsistency spoted is related to Date fields. +ArcMap 10.6 can create different null date representation in one .shp file! +Test file pt_utf8.shp have field named 'date' with such binary data: +``` +=== record 0 Stream.Position: 673 +... +date BinaryBuffer.Position: 183 +ReadString(191): '▬▬▬▬▬▬▬▬' // '▬' == char.MinValue == (char)0 +=== record 1 Stream.Position: 1145 +... +date BinaryBuffer.Position: 183 +ReadString(191): ' ' +``` +According to [Esri documentation](https://desktop.arcgis.com/en/arcmap/latest/manage-data/shapefiles/geoprocessing-considerations-for-shapefile-output.htm) +Null value substitution for Date field is *'Stored as zero'*. So this library saves null dates as zero (null) bytes which is also consistent with Numeric and Float fields. diff --git a/src/NetTopologySuite.IO.Esri.Core/ShapeType.cs b/src/NetTopologySuite.IO.Esri.Core/ShapeType.cs new file mode 100644 index 0000000..72f2285 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/ShapeType.cs @@ -0,0 +1,247 @@ +using System; + +namespace NetTopologySuite.IO.Shapefile +{ + + + /// + /// Shapefile geometry types. + /// + public enum ShapeType : int + { + + /// + /// Null Shape + /// + /// + /// Indicates a null shape, with no geometric data for the shape. Each feature type (point, line, polygon, etc.) + /// supports nulls - it is valid to have points and null points in the same shapefile. Often null shapes are place + /// holders - they are used during shapefile creation and are populated with geometric data soon after they are created. + /// + NullShape = 0, + + /// + /// Point in the X, Y space. + /// + Point = 1, + + /// + /// PolyLine in the X, Y space. + /// + /// + /// A PolyLine is an ordered set of vertices that consists of one or more parts. + /// A part is a connected sequence of two or more points. + /// Parts may or may not be connected to one another. Parts may or may not intersect one another. + /// + PolyLine = 3, + + /// + /// Polygon in the X, Y space. + /// + /// + /// A polygon consists of one or more rings. A ring is a connected sequence of four or more points that form a closed, non-self-intersecting loop. + /// A polygon may contain multiple outer rings. The order of vertices or orientation for a ring indicates which side of the ring + /// is the interior of the polygon. The neighborhood to the right of an observer walking along + /// the ring in vertex order is the neighborhood inside the polygon. Vertices of rings defining + /// holes in polygons are in a counterclockwise direction. Vertices for a single, ringed + /// polygon are, therefore, always in clockwise order. The rings of a polygon are referred to + /// as its parts. + /// + Polygon = 5, + + /// + /// MultiPoint in the X, Y space. + /// + /// + /// A MultiPoint represents a set of points. + /// + MultiPoint = 8, + + + + + /// + /// Measured Point in the X, Y, Z space. + /// + /// Named as PointZ in ESRI Shapefile Technical Description. + PointZM = 11, + + /// + /// Measured PolyLine in the X, Y, Z space. + /// + /// Named as PolyLineZ in ESRI Shapefile Technical Description. + PolyLineZM = 13, + + /// + /// Measured Polygon in the X, Y, Z space. + /// + /// Named as PolygonZ in ESRI Shapefile Technical Description. + PolygonZM = 15, + + /// + /// Measured MultiPoint in the X, Y, Z space. + /// + /// Named as MultiPointZ in ESRI Shapefile Technical Description. + MultiPointZM = 18, + + + + + /// + /// Measured Point in the X, Y space. + /// + PointM = 21, + + /// + /// Measured LineString in the X, Y space. + /// + PolyLineM = 23, + + /// + /// Measured Polygon in the X, Y space. + /// + PolygonM = 25, + + /// + /// Measured MultiPoint in the X, Y space. + /// + MultiPointM = 28, + + /// + /// MultiPatch + /// + [Obsolete("This shape type is not supported.")] + MultiPatch = 31, + + + + // GeoTools + + /// + /// Point in the X, Y, Z space. + /// + /// + /// This shape type is not conformant with ESRI Shapefile Technical Description. Use PointZM instead. + /// + [Obsolete("This shape type is not conformant with ESRI Shapefile Technical Description. Use PointZM instead.")] + PointZ = 9, + + /// + /// PolyLine in the X, Y, Z space. + /// + /// + /// This shape type is not conformant with ESRI Shapefile Technical Description. Use PointZM instead. + /// + [Obsolete("This shape type is not conformant with ESRI Shapefile Technical Description. Use PolyLineZM instead.")] + PolyLineZ = 10, + + /// + /// Polygon in the X, Y, Z space. + /// + /// + /// This shape type is not conformant with ESRI Shapefile Technical Description. Use PolygonZM instead. + /// + [Obsolete("This shape type is not conformant with ESRI Shapefile Technical Description. Use PointZM instead.")] + PolygonZ = 19, + + /// + /// MultiPoint in the X, Y, Z space. + /// + /// + /// This shape type is not conformant with ESRI Shapefile Technical Description. Use MultiPointZM instead. + /// + [Obsolete("This shape type is not conformant with ESRI Shapefile Technical Description. Use PointZM instead.")] + MultiPointZ = 20, + + } + + + +#pragma warning disable CS0618 // Type or member is obsolete + + /// + /// Helper methods for ShapeType enumeration. + /// + public static class ShapeTypeExtensions + { + /// + /// Indicates if the geometric data for the shape associated with this type contains Z coordinate. + /// + public static bool HasZ(this ShapeType type) + { + return type == ShapeType.PointZ + || type == ShapeType.MultiPointZ + || type == ShapeType.PolyLineZ + || type == ShapeType.PolygonZ + + || type == ShapeType.PointZM + || type == ShapeType.MultiPointZM + || type == ShapeType.PolyLineZM + || type == ShapeType.PolygonZM; + } + + /// + /// Indicates if the geometric data for the shape associated with this type contains Measure value. + /// + public static bool HasM(this ShapeType type) + { + return type == ShapeType.PointM + || type == ShapeType.MultiPointM + || type == ShapeType.PolyLineM + || type == ShapeType.PolygonM + + || type == ShapeType.PointZM + || type == ShapeType.MultiPointZM + || type == ShapeType.PolyLineZM + || type == ShapeType.PolygonZM; + } + + + /// + /// Indicates if shape associated with this type is a Point. + /// + public static bool IsPoint(this ShapeType type) + { + return type == ShapeType.Point + || type == ShapeType.PointM + || type == ShapeType.PointZM + || type == ShapeType.PointZ; + } + + /// + /// Indicates if shape associated with this type is a MultiPoint. + /// + public static bool IsMultiPoint(this ShapeType type) + { + return type == ShapeType.MultiPoint + || type == ShapeType.MultiPointM + || type == ShapeType.MultiPointZM + || type == ShapeType.MultiPointZ; + } + + /// + /// Indicates if shape associated with this type is a PolyLine. + /// + public static bool IsPolyLine(this ShapeType type) + { + return type == ShapeType.PolyLine + || type == ShapeType.PolyLineM + || type == ShapeType.PolyLineZM + || type == ShapeType.PolyLineZ; + } + + /// + /// Indicates if shape associated with this type is a Polygon. + /// + public static bool IsPolygon(this ShapeType type) + { + return type == ShapeType.Polygon + || type == ShapeType.PolygonM + || type == ShapeType.PolygonZM + || type == ShapeType.PolygonZ; + } + } +#pragma warning restore CS0618 // Type or member is obsolete + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shapefile.cs b/src/NetTopologySuite.IO.Esri.Core/Shapefile.cs new file mode 100644 index 0000000..0762282 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shapefile.cs @@ -0,0 +1,47 @@ +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections.Generic; +using System.IO; +using System.IO.MemoryMappedFiles; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile +{ + + + /// + /// Base shapefile class. + /// + public abstract class Shapefile : ManagedDisposable + { + internal const int FileCode = 9994; // 0x0000270A; + internal const int Version = 1000; + + internal static readonly int FileHeaderSize = 100; + internal static readonly int RecordHeaderSize = 2 * sizeof(int); + + + /// + /// Minimal Measure value considered as not "no-data". + /// + /// + /// Any floating point number smaller than –10E38 is considered by a shapefile reader + /// to represent a "no data" value. This rule is used only for measures (M values). + ///
+ /// http://www.esri.com/library/whitepapers/pdfs/shapefile.pdf (page 2, bottom) + ///
+ internal static readonly double MeasureMinValue = -10e38; + + /// + /// Shape type. + /// + public abstract ShapeType ShapeType { get; } + + /// + /// Shapefile attribute definitions with current attribute values. + /// + public abstract DbfFieldCollection Fields { get; } + + } + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shapefile/Readers/ShapefileMultiPartReader.cs b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Readers/ShapefileMultiPartReader.cs new file mode 100644 index 0000000..82feb7d --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Readers/ShapefileMultiPartReader.cs @@ -0,0 +1,31 @@ +using NetTopologySuite.IO.Dbf; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Polygon or PolyLine shapefile reader. + /// + public class ShapefileMultiPartReader : ShapefileReader + { + /// + public ShapefileMultiPartReader(Stream shpStream, Stream dbfStream, Encoding encoding = null) + : base(shpStream, dbfStream, encoding) + { } + + /// + public ShapefileMultiPartReader(string shpPath, Encoding encoding = null) + : base(shpPath, encoding) + { } + + + internal override ShpReader CreateShpReader(Stream shpStream) + { + return new ShpMultiPartReader(shpStream); + } + } + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shapefile/Readers/ShapefileMultiPointReader.cs b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Readers/ShapefileMultiPointReader.cs new file mode 100644 index 0000000..dba2a30 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Readers/ShapefileMultiPointReader.cs @@ -0,0 +1,35 @@ +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// MultiPoint shapefile reader. + /// + public class ShapefileMultiPointReader : ShapefileReader + { + + /// + public ShapefileMultiPointReader(Stream shpStream, Stream dbfStream, Encoding encoding = null) + : base(shpStream, dbfStream, encoding) + { } + + + /// + public ShapefileMultiPointReader(string shpPath, Encoding encoding = null) + : base(shpPath, encoding) + { } + + + internal override ShpReader CreateShpReader(Stream shpStream) + { + return new ShpMultiPointReader(shpStream); + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shapefile/Readers/ShapefilePointReader.cs b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Readers/ShapefilePointReader.cs new file mode 100644 index 0000000..ec04095 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Readers/ShapefilePointReader.cs @@ -0,0 +1,95 @@ +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + + /// + /// Point shapefile reader. + /// + public class ShapefilePointReader : ShapefileReader, IEnumerable + { + /// + public ShapefilePointReader(Stream shpStream, Stream dbfStream, Encoding encoding = null) + : base(shpStream, dbfStream, encoding) + { } + + /// + public ShapefilePointReader(string shpPath, Encoding encoding = null) + : base(shpPath, encoding) + { } + + + internal override ShpReader CreateShpReader(Stream shpStream) + { + return new ShpPointReader(shpStream); + } + + /// + /// Reads next feature record containing shape geometry and its attributes. + /// + /// Shapefile point feature. + /// Indicates if this reacord was marked as deleted. + /// Value indicating if reading next record was successful. + public bool Read(out ShapefilePointFeature feature, out bool deleted) + { + if (!Read(out deleted)) + { + feature = new ShapefilePointFeature(ShpCoordinates.NullPoint, DbfField.EmptyFieldValues); + return false; + } + + feature = new ShapefilePointFeature(Shape[0], Fields.GetValues()); + return true; + } + + /// + public IEnumerator GetEnumerator() + { + return new ShapefilePointEnumerator(this); + } + + private class ShapefilePointEnumerator : IEnumerator + { + private readonly ShapefilePointReader Owner; + public ShapefilePointFeature Current { get; private set; } + object IEnumerator.Current => Current; + + public ShapefilePointEnumerator(ShapefilePointReader owner) + { + Owner = owner; + } + + public void Reset() + { + Owner.Restart(); + } + + public bool MoveNext() + { + ShapefilePointFeature feature; + var succeed = Owner.Read(out feature, out var deleted); + + if (deleted) + { + return MoveNext(); + } + Current = feature; + return succeed; + } + + public void Dispose() + { + // Nothing to dispose + } + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shapefile/Readers/ShapefileReader.cs b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Readers/ShapefileReader.cs new file mode 100644 index 0000000..edcf00d --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Readers/ShapefileReader.cs @@ -0,0 +1,257 @@ +using NetTopologySuite.IO.Dbf; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Base class for reading a shapefile. + /// + public abstract class ShapefileReader : Shapefile, IEnumerable + { + private readonly ShpReader ShpReader; + private readonly DbfReader DbfReader; + + + /// + /// Initializes a new instance of the reader class. + /// + /// SHP file stream. + /// DBF file stream. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + public ShapefileReader(Stream shpStream, Stream dbfStream, Encoding encoding) + { + DbfReader = new DbfReader(dbfStream, encoding); + + ShpReader = CreateShpReader(shpStream); + ShapeType = ShpReader.ShapeType; + + } + + /// + /// Initializes a new instance of the reader class. + /// + /// Path to SHP file. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + public ShapefileReader(string shpPath, Encoding encoding = null) + { + try + { + DbfReader = new DbfReader(Path.ChangeExtension(shpPath, ".dbf"), encoding); + + var shpStream = OpenManagedFileStream(shpPath, ".shp", FileMode.Open); + ShpReader = CreateShpReader(shpStream); + ShapeType = ShpReader.ShapeType; + + var prjFile = Path.ChangeExtension(shpPath, ".prj"); + if (File.Exists(prjFile)) + Projection = File.ReadAllText(prjFile); + } + catch + { + DisposeManagedResources(); + throw; + } + } + + internal abstract ShpReader CreateShpReader(Stream shpStream); + + /// + public override DbfFieldCollection Fields => DbfReader.Fields; + + /// + public override ShapeType ShapeType { get; } = ShapeType.NullShape; + + /// + /// Current shape read from the underlying SHP file. + /// + public ShpShapeBuilder Shape => ShpReader.Shape; + + + /// + /// Encoding used by the shapefile. + /// + public Encoding Encoding => DbfReader.Encoding; + + + /// + /// The minimum bounding rectangle orthogonal to the X and Y (and potentially the M and Z). + /// + public ShpBoundingBox BoundingBox => ShpReader.BoundingBox; + + + /// + /// Well-known text representation of coordinate reference system metadata from .prj file. + /// + /// + /// https://support.esri.com/en/technical-article/000001897 + /// /> + public string Projection { get; } = null; + + /// + /// Reads feature shape and attributes from underlying SHP and DBF files and writes them to and properties. + /// + /// Indicates if the record was marked as deleted. + /// + /// true if the enumerator was successfully advanced to the next record; + /// false if the enumerator has passed the end of the table. + /// + public bool Read(out bool deleted) + { + var readShpSucceed = ShpReader.Read(); + var readDbfSucceed = DbfReader.Read(out deleted); + + if (readDbfSucceed != readShpSucceed) + throw new FileLoadException("Corrupted shapefile data. " + + "The dBASE table must contain feature attributes with one record per feature. " + + "There must be one-to-one relationship between geometry and attributes."); + + return readDbfSucceed; + } + + + /// + protected override void DisposeManagedResources() + { + ShpReader?.Dispose(); + DbfReader?.Dispose(); + + base.DisposeManagedResources(); // This will dispose streams used by ShpReader and DbfReader. Do it at the end. + } + + /// + /// Moves reader back to its initial position. + /// + public void Restart() + { + DbfReader.Restart(); + ShpReader.Restart(); + } + + #region *** Enumerator *** + + IEnumerator IEnumerable.GetEnumerator() + { + return new ShapefileEnumerator(this); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return new ShapefileEnumerator(this); + } + + private class ShapefileEnumerator : IEnumerator + { + private readonly ShapefileReader Owner; + public ShapefileFeature Current { get; private set; } + object IEnumerator.Current => Current; + + public ShapefileEnumerator(ShapefileReader owner) + { + Owner = owner; + } + + public void Reset() + { + Owner.Restart(); + } + + public bool MoveNext() + { + if (!Owner.Read(out var deleted)) + { + return false; + } + + if (deleted) + { + return MoveNext(); + } + + Current = new ShapefileFeature(Owner.Shape.GetParts(), Owner.Fields.GetValues()); + return true; + } + + public void Dispose() + { + // Nothing to dispose + } + } + + #endregion + + + private readonly static BinaryBufferReader ShpHeader = new BinaryBufferReader(Shapefile.FileHeaderSize); + + /// + /// Opens shapefile reader. + /// + /// Path to shapefile. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + /// Shapefile reader. + public static ShapefileReader Open(string shpPath, Encoding encoding = null) + { + var shapeType = Core.ShpReader.GetShapeType(shpPath); + + if (shapeType.IsPoint()) + { + return new ShapefilePointReader(shpPath, encoding); + } + else if (shapeType.IsMultiPoint()) + { + return new ShapefileMultiPointReader(shpPath, encoding); + } + else if (shapeType.IsPolyLine() || shapeType.IsPolygon()) + { + return new ShapefileMultiPartReader(shpPath, encoding); + } + else + { + throw new FileLoadException("Unsupported shapefile type: " + shapeType, shpPath); + } + } + + + /// + /// Reads all features from shapefile. + /// + /// Path to shapefile. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + /// Shapefile features. + public static ShapefileFeature[] ReadAll(string shpPath, Encoding encoding = null) + { + using (var shp = Open(shpPath, encoding)) + { + return shp.ToArray(); + } + } + + [Conditional("DEBUG_BINARY")] + internal void GetBinaryDiff(ShapefileReader other, List differences) + { + ShpReader.GetBinaryDiff(other.ShpReader, differences); + DbfReader.GetBinaryDiff(other.DbfReader, differences); + + differences.Add("SHP Records"); + int recNo = 1; + while (Read(out var del) && other.Read(out var delOther)) + { + differences.Add("Record number: " + recNo++); + if (del != delOther) + { + differences.Add("Deleted: " + del + " | " + delOther); + } + ShpReader.GetBinaryDiff(other.ShpReader, differences); + } + + } + } + + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shapefile/ShapefileFeature.cs b/src/NetTopologySuite.IO.Esri.Core/Shapefile/ShapefileFeature.cs new file mode 100644 index 0000000..2711a38 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shapefile/ShapefileFeature.cs @@ -0,0 +1,66 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Geometry and attribute information for the spatial feature in shapefile. + /// + public class ShapefileFeature + { + /// + /// Feature shape points. + /// + public IReadOnlyList> Shape { get; } + + /// + /// Feature attributes. + /// + public IReadOnlyDictionary Attributes { get; } + + + /// + /// Initializes new ShapefileFeature struct. + /// + /// Feature shape points. + /// Feature attributes. + public ShapefileFeature(IReadOnlyList> shape, IReadOnlyDictionary attributes) + { + Shape = shape; + Attributes = attributes; + } + } + + + + + /// + /// Geometry and attribute information for the point feature in shapefile. + /// + public class ShapefilePointFeature + { + /// + /// Feature point. + /// + public ShpCoordinates Shape { get; } + + /// + /// Feature attributes. + /// + public IReadOnlyDictionary Attributes { get; } + + + /// + /// Initializes new ShapefileFeature struct. + /// + /// Feature shape points. + /// Feature attributes. + public ShapefilePointFeature(ShpCoordinates shape, IReadOnlyDictionary attributes) + { + Shape = shape; + Attributes = attributes; + } + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shapefile/Writers/ShapefileMultiPartWriter.cs b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Writers/ShapefileMultiPartWriter.cs new file mode 100644 index 0000000..da60d5c --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Writers/ShapefileMultiPartWriter.cs @@ -0,0 +1,35 @@ +using NetTopologySuite.IO.Dbf; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Polygon or PolyLine shapefile writer. + /// + public class ShapefileMultiPartWriter : ShapefileWriter + { + /// + public ShapefileMultiPartWriter(Stream shpStream, Stream shxStream, Stream dbfStream, ShapeType type, IReadOnlyList fields, Encoding encoding = null) + : base(shpStream, shxStream, dbfStream, type, fields, encoding) + { } + + /// + public ShapefileMultiPartWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding = null, string projection = null) + : base(shpPath, type, fields, encoding, projection) + { } + + /// + public ShapefileMultiPartWriter(string shpPath, ShapeType type, params DbfField[] fields) + : base(shpPath, type, fields) + { + } + + internal override ShpWriter CreateShpWriter(Stream shpStream, Stream shxStream) + { + return new ShpMultiPartWriter(shpStream, shxStream, ShapeType); + } + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shapefile/Writers/ShapefileMultiPointWriter.cs b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Writers/ShapefileMultiPointWriter.cs new file mode 100644 index 0000000..525a4f0 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Writers/ShapefileMultiPointWriter.cs @@ -0,0 +1,37 @@ +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// MultiPoint shapefile writer. + /// + public class ShapefileMultiPointWriter : ShapefileWriter + { + /// + public ShapefileMultiPointWriter(Stream shpStream, Stream shxStream, Stream dbfStream, ShapeType type, IReadOnlyList fields, Encoding encoding = null) + : base(shpStream, shxStream, dbfStream, type, fields, encoding) + { } + + /// + public ShapefileMultiPointWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding = null, string projection = null) + : base(shpPath, type, fields, encoding, projection) + { } + + /// + public ShapefileMultiPointWriter(string shpPath, ShapeType type, params DbfField[] fields) + : base(shpPath, type, fields) + { + } + + + internal override ShpWriter CreateShpWriter(Stream shpStream, Stream shxStream) + { + return new ShpMultiPointWriter(shpStream, shxStream, ShapeType); + } + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shapefile/Writers/ShapefilePointWriter.cs b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Writers/ShapefilePointWriter.cs new file mode 100644 index 0000000..e277b7d --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Writers/ShapefilePointWriter.cs @@ -0,0 +1,75 @@ +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Point shapefile writer. + /// + public class ShapefilePointWriter : ShapefileWriter + { + /// + public ShapefilePointWriter(Stream shpStream, Stream shxStream, Stream dbfStream, ShapeType type, IReadOnlyList fields, Encoding encoding = null) + : base(shpStream, shxStream, dbfStream, type, fields, encoding) + { } + + /// + public ShapefilePointWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding = null, string projection = null) + : base(shpPath, type, fields, encoding, projection) + { } + + /// + public ShapefilePointWriter(string shpPath, ShapeType type, params DbfField[] fields) + : base(shpPath, type, fields) + { + } + + private ShpPointWriter PointWriter; + + internal override ShpWriter CreateShpWriter(Stream shpStream, Stream shxStream) + { + PointWriter = new ShpPointWriter(shpStream, shxStream, ShapeType); + return PointWriter; + } + + /// + /// Writes feature record. + /// + /// Feature record. + public void Write(ShapefilePointFeature feature) + { + Point = feature.Shape; + Fields.SetValues(feature.Attributes); + + Write(); + } + + /// + /// Writes feature record collection. + /// + /// Feature record collection + public void Write(IEnumerable features) + { + foreach (var feature in features) + { + Write(feature); + } + } + + /// + /// Point shape. + /// + public ShpCoordinates Point + { + get => PointWriter.Point; + set => PointWriter.Point = value; + } + + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shapefile/Writers/ShapefileWriter.cs b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Writers/ShapefileWriter.cs new file mode 100644 index 0000000..75a0438 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shapefile/Writers/ShapefileWriter.cs @@ -0,0 +1,183 @@ +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Base class for writing a shapefile. + /// + public abstract class ShapefileWriter : Shapefile + { + private readonly ShpWriter ShpWriter; + private readonly DbfWriter DbfWriter; + + /// + /// Initializes a new instance of the writer class. + /// + /// SHP file stream. + /// SHX file stream. + /// DBF file stream. + /// Shape type. + /// Shapefile fields definitions. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + internal ShapefileWriter(Stream shpStream, Stream shxStream, Stream dbfStream, ShapeType type, IReadOnlyList fields, Encoding encoding) + { + DbfWriter = new DbfWriter(dbfStream, fields, encoding); + + + ShpWriter = CreateShpWriter(shpStream, shxStream); + ShapeType = type; + } + + + /// + /// Initializes a new instance of the writer class. + /// + /// Path to SHP file. + /// Shape type. + /// Shapefile attribute definitions. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + /// Projection metadata for the shapefile (.prj file). + internal ShapefileWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding, string projection) + { + try + { + DbfWriter = new DbfWriter(Path.ChangeExtension(shpPath, ".dbf"), fields, encoding); + + ShapeType = type; + var shpStream = OpenManagedFileStream(shpPath, ".shp", FileMode.Create); + var shxStream = OpenManagedFileStream(shpPath, ".shx", FileMode.Create); + ShpWriter = CreateShpWriter(shpStream, shxStream); + + if (!string.IsNullOrWhiteSpace(projection)) + File.WriteAllText(Path.ChangeExtension(shpPath, ".prj"), projection); + } + catch + { + DisposeManagedResources(); + throw; + } + + } + + + /// + /// Initializes a new instance of the writer class. + /// + /// Path to SHP file. + /// Shape type. + /// Shapefile attribute definitions. + internal ShapefileWriter(string shpPath, ShapeType type, params DbfField[] fields) : this(shpPath, type, fields, null, null) + { + } + + internal abstract ShpWriter CreateShpWriter(Stream shpStream, Stream shxStream); + + /// + public override ShapeType ShapeType { get; } = ShapeType.NullShape; + + /// + public override DbfFieldCollection Fields => DbfWriter.Fields; + + /// + /// Current shape to be written to underlying SHP file. + /// + public ShpShapeBuilder Shape => ShpWriter.Shape; + + + /// + /// Wrties and values to underlying SHP and DBF files. + /// + public void Write() + { + ShpWriter.Write(); + DbfWriter.Write(); + } + + /// + /// Writes next feature record containing shape geometry and its attributes. + /// + /// Shape parts. + /// Attributes associated with the feature. + public void Write(IEnumerable> shapeParts, IReadOnlyDictionary attributes) + { + Shape.Clear(); + foreach (var part in shapeParts) + { + Shape.StartNewPart(); + foreach (var pt in part) + { + Shape.AddPoint(pt); + } + } + + Fields.SetValues(attributes); + + Write(); + } + + /// + /// Writes feature to the shapefile. + /// + public void Write(ShapefileFeature feature) + { + Write(feature.Shape, feature.Attributes); + } + + /// + /// Writes features to the shapefile. + /// + public void Write(IEnumerable features) + { + foreach (var feature in features) + { + Write(feature); + } + } + + /// + protected override void DisposeManagedResources() + { + ShpWriter?.Dispose(); + DbfWriter?.Dispose(); + + base.DisposeManagedResources(); // This will dispose streams used by ShpWriter and DbfWriter. Do it at the end. + } + + + /// + /// Opens shapefile writer. + /// + /// Path to shapefile. + /// Shape type. + /// Shapefile fields definitions. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + /// Projection metadata for the shapefile (.prj file). + /// Shapefile writer. + public static ShapefileWriter Open(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding = null, string projection = null) + { + if (type.IsPoint()) + { + return new ShapefilePointWriter(shpPath, type, fields, encoding, projection); + } + else if (type.IsMultiPoint()) + { + return new ShapefileMultiPointWriter(shpPath, type, fields, encoding, projection); + } + else if (type.IsPolyLine() || type.IsPolygon()) + { + return new ShapefileMultiPartWriter(shpPath, type, fields, encoding, projection); + } + else + { + throw new FileLoadException("Unsupported shapefile type: " + type, shpPath); + } + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/ShapefileException.cs b/src/NetTopologySuite.IO.Esri.Core/ShapefileException.cs new file mode 100644 index 0000000..4c33fdd --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/ShapefileException.cs @@ -0,0 +1,36 @@ +using System; +using System.Runtime.Serialization; + +namespace NetTopologySuite.IO.Shapefile +{ + /// + /// The exception that is thrown when a non-fatal application error occurs related to Topology functionality. + /// + public class ShapefileException : ApplicationException + { + /// + /// Initializes a new instance of the ShapefileException class. + /// + public ShapefileException() { } + + /// + /// Initializes a new instance of the ShapefileException class with a specified error message. + /// + /// A message that describes the error. + public ShapefileException(string message) : base(message) { } + + /// + /// Initializes a new instance of the ShapefileException class with serialized data. + /// + /// The object that holds the serialized object data. + /// The contextual information about the source or destination. + public ShapefileException(SerializationInfo info,StreamingContext context) : base(info, context) { } + + /// + /// Initializes a new instance of the ShapefileException class with a specified error message and a reference to the inner exception that is the cause of this exception. + /// + /// The error message that explains the reason for the exception. + /// The exception that is the cause of the current exception. If the innerException parameter is not a null reference, the current exception is raised in a catch block that handles the inner exception + public ShapefileException(string message, Exception innerException) : base(message, innerException) { } + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/Readers/ShpMultiPartReader.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/Readers/ShpMultiPartReader.cs new file mode 100644 index 0000000..03e1230 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/Readers/ShpMultiPartReader.cs @@ -0,0 +1,33 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Polygon or MultiPart SHP file reader. + /// + public class ShpMultiPartReader : ShpReader + { + /// + public ShpMultiPartReader(Stream shpStream) : base(shpStream) + { + if (!ShapeType.IsPolygon() && !ShapeType.IsPolyLine()) + throw GetUnsupportedShapeTypeException(); + } + + internal override void ReadShape(BinaryBufferReader shapeBinary) + { + shapeBinary.AdvancePastXYBoundingBox(); + var partCount = shapeBinary.ReadPartCount(); + var pointCount = shapeBinary.ReadPointCount(); + + shapeBinary.ReadPartOfsets(partCount, Shape); + shapeBinary.ReadPoints(pointCount, HasZ, HasM, Shape); + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/Readers/ShpMultiPointReader.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/Readers/ShpMultiPointReader.cs new file mode 100644 index 0000000..199353f --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/Readers/ShpMultiPointReader.cs @@ -0,0 +1,31 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// MultiPoint SHP file reader. + /// + public class ShpMultiPointReader : ShpReader + { + /// + public ShpMultiPointReader(Stream shpStream) : base(shpStream) + { + if (!ShapeType.IsMultiPoint()) + throw GetUnsupportedShapeTypeException(); + } + + internal override void ReadShape(BinaryBufferReader shapeBinary) + { + shapeBinary.AdvancePastXYBoundingBox(); + var pointCount = shapeBinary.ReadPointCount(); + + shapeBinary.ReadPoints(pointCount, HasZ, HasM, Shape); + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/Readers/ShpPointReader.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/Readers/ShpPointReader.cs new file mode 100644 index 0000000..e777b18 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/Readers/ShpPointReader.cs @@ -0,0 +1,36 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Point SHP file reader. + /// + public class ShpPointReader : ShpReader + { + private ShpCoordinates PointBuffer = new ShpCoordinates(0.0, 0.0); // This sets Z=NaN, M=NaN (default constructor sets it to 0.0) + + /// + public ShpPointReader(Stream shpStream) : base(shpStream) + { + if (!ShapeType.IsPoint()) + throw GetUnsupportedShapeTypeException(); + } + + internal override void ReadShape(BinaryBufferReader shapeBinary) + { + shapeBinary.ReadPoint(HasZ, HasM, ref PointBuffer); + base.Shape.AddPoint(PointBuffer.X, PointBuffer.Y, PointBuffer.Z, PointBuffer.M); + } + + /// + /// Point shape. + /// + public ShpCoordinates Point => base.Shape.Points[0]; + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/Readers/ShpReader.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/Readers/ShpReader.cs new file mode 100644 index 0000000..6540df7 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/Readers/ShpReader.cs @@ -0,0 +1,180 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Base class class for reading a fixed-length file header and variable-length records from a *.SHP file. + /// + public abstract class ShpReader : ManagedDisposable + { + private readonly Stream ShpStream; + private readonly int ShpEndPosition; + private readonly BinaryBufferReader Header = new BinaryBufferReader(Shapefile.FileHeaderSize); + private readonly BinaryBufferReader RecordContent = new BinaryBufferReader(); + internal readonly bool HasM; + internal readonly bool HasZ; + + /// + /// Shapefile Spec:
+ /// The one-to-one relationship between geometry and attributes is based on record number. + /// Attribute records in the dBASE file must be in the same order as records in the main file. + ///
+ /// + /// DBF does not have recor number attribute. + /// + private int RecordNumber = 1; + + + /// + /// Initializes a new instance of the reader class. + /// + /// SHP file stream. + public ShpReader(Stream shpStream) + { + ShpStream = shpStream ?? throw new ArgumentNullException("Uninitialized SHP stream.", nameof(shpStream)); + + if (ShpStream.Position != 0) + ShpStream.Seek(0, SeekOrigin.Begin); + + Header.LoadFrom(ShpStream, Shapefile.FileHeaderSize); + Header.ReadShpFileHeader(out var type, out var fileLength, BoundingBox); + ShpEndPosition = fileLength - 1; + + ShapeType = type; + HasM = type.HasM(); + HasZ = type.HasZ(); + + Debug.Assert(RecordContent.End, "Shapefile header", "Unexpected SHP binary reader position."); + } + + + internal void Restart() + { + ShpStream.Seek(Shapefile.FileHeaderSize, SeekOrigin.Begin); + } + + /// + /// Shape type. + /// + public ShapeType ShapeType { get; } = ShapeType.NullShape; + + /// + /// Current shape read from the underlying stream. + /// + public ShpShapeBuilder Shape { get; } = new ShpShapeBuilder(); + + /// + /// The minimum bounding rectangle orthogonal to the X and Y (and potentially the M and Z). + /// + public ShpBoundingBox BoundingBox { get; } = new ShpBoundingBox(); + + /// + /// Reads content of the from the underlying stream. + /// + /// Value indicating if reading next record was successful. + public bool Read() + { + Shape.Clear(); + + if (!ReadRecordContent()) + return false; + + var type = RecordContent.ReadGeometryType(); + if (type == ShapeType.NullShape) + { + return true; // Empty points collection + } + else if (type != ShapeType) + { + throw GetInvalidRecordTypeException(type); + } + + ReadShape(RecordContent); + return true; + } + + internal abstract void ReadShape(BinaryBufferReader shapeBinary); + + + internal bool ReadRecordContent() + { + if (ShpStream.Position >= ShpEndPosition) + return false; + + Header.LoadFrom(ShpStream, Shapefile.RecordHeaderSize); + Header.ReadShpRecordHeader(out var recordNumber, out var contentLength); + + RecordContent.LoadFrom(ShpStream, contentLength); + + Debug.Assert(recordNumber == RecordNumber++, "Shapefile record", $"Unexpected SHP record number: {recordNumber} (expected {RecordNumber})."); + return true; + } + + /// + protected override void DisposeManagedResources() + { + Header?.Reset(); + RecordContent?.Reset(); + + //Debug.Assert(ShpStream.Position == ShpEndPosition + 1, "Shapefile reader", "Unexpected SHP file length. This may happen when reading SHP file was not finished."); + base.DisposeManagedResources(); + } + + + internal Exception GetInvalidRecordTypeException(ShapeType type) + { + return new FileLoadException($"Ivalid shapefile record type. {GetType().Name} does not support { type } shapes."); + } + + internal Exception GetUnsupportedShapeTypeException() + { + throw new FileLoadException(GetType().Name + $" does not support {ShapeType} shapes."); + } + + internal static ShapeType GetShapeType(Stream shpStream) + { + var binary = new BinaryBufferReader(Shapefile.FileHeaderSize); + binary.LoadFrom(shpStream, Shapefile.FileHeaderSize); + + var fileCode = binary.ReadInt32BigEndian(); + if (fileCode != Shapefile.FileCode) + throw new FileLoadException("Invalid shapefile format."); + + binary.Advance(28); + return binary.ReadGeometryType(); + } + + /// + /// Reads shape type information from SHP file. + /// + /// Path to SHP file. + /// Shape type. + public static ShapeType GetShapeType(string shpPath) + { + if (Path.GetExtension(shpPath).ToLowerInvariant() != ".shp") + throw new FileLoadException("Specified file must have .shp extension."); + + using (var shpStream = new FileStream(shpPath, FileMode.Open, FileAccess.Read, FileShare.Read)) + { + return GetShapeType(shpStream); + } + } + + + [Conditional("DEBUG_BINARY")] + internal void GetBinaryDiff(ShpReader other, List differences) + { + Header.GetBinaryDiff("SHP Header", other.Header, differences); + RecordContent.GetBinaryDiff("SHP RecordContent", other.RecordContent, differences); + } + + + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/ShpBinaryExtensions.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/ShpBinaryExtensions.cs new file mode 100644 index 0000000..e6c29c2 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/ShpBinaryExtensions.cs @@ -0,0 +1,345 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + + internal static class ShpBinaryExtensions + { + public static void WriteShpFileHeader(this BinaryBufferWriter binary, ShapeType type, int fileLength, ShpBoundingBox extent, bool hasZ, bool hasM) + { + binary.WriteInt32BigEndian(Shapefile.FileCode); + binary.WriteBytes(byte.MinValue, 20); + + binary.Write16BitWords(fileLength); // in 16-bit words, including the header + binary.WriteInt32LittleEndian(Shapefile.Version); + binary.WriteGeometryType(type); + + binary.WriteXYBoundingBox(extent); + binary.WriteZRange(extent, hasZ); + binary.WriteMRange(extent, hasM); + } + + public static void ReadShpFileHeader(this BinaryBufferReader binary, out ShapeType type, out int fileLength, ShpBoundingBox boundingBox) + { + var fileCode = binary.ReadInt32BigEndian(); + binary.Advance(20); + + fileLength = binary.Read16BitWords(); // in 16-bit words, including the header + var version = binary.ReadInt32LittleEndian(); + type = binary.ReadGeometryType(); + + binary.ReadXYBoundingBox(boundingBox); + binary.ReadZRange(boundingBox, type.HasZ()); + binary.ReadMRange(boundingBox, type.HasM()); + + if (fileCode != Shapefile.FileCode) + throw new FileLoadException("Invalid shapefile format."); + + + Debug.Assert(version == Shapefile.Version, "Shapefile version", $"Ivalid SHP version: {version} (expected: 1000)."); + } + + + public static void WriteShpRecordHeader(this BinaryBufferWriter shpRecordHeader, int recrodNumber, int contentLength) + { + shpRecordHeader.WriteInt32BigEndian(recrodNumber); + shpRecordHeader.Write16BitWords(contentLength); + } + public static void ReadShpRecordHeader(this BinaryBufferReader shpRecordHeader, out int recrodNumber, out int contentLength) + { + recrodNumber = shpRecordHeader.ReadInt32BigEndian(); + contentLength = shpRecordHeader.Read16BitWords(); + } + + + public static void WriteGeometryType(this BinaryBufferWriter binary, ShapeType type) + { + binary.WriteInt32LittleEndian((int)type); + } + public static ShapeType ReadGeometryType(this BinaryBufferReader binary) + { + return (ShapeType)binary.ReadInt32LittleEndian(); + } + + + public static void ReadXYCoordinate(this BinaryBufferReader shpRecordData, out double x, out double y) + { + x = shpRecordData.ReadDoubleLittleEndian(); + y = shpRecordData.ReadDoubleLittleEndian(); + } + private static void WriteXYCoordinate(this BinaryBufferWriter shpRecordData, double x, double y) + { + // Avoid performance costs (if you trying to pas NaN as X,Y then you're wrong). + // x = x.ToValidShpOrdinate(0.0); + // x = x.ToValidShpOrdinate(0.0); + + shpRecordData.WriteDoubleLittleEndian(x); + shpRecordData.WriteDoubleLittleEndian(y); + } + public static void WriteXYOrdinate(this BinaryBufferWriter shpRecordData, double x, double y, ShpBoundingBox shpExtent) + { + shpRecordData.WriteXYCoordinate(x, y); + shpExtent.X.Expand(x); + shpExtent.Y.Expand(y); + } + + + + private static void ReadXYCoordinates(this BinaryBufferReader shpRecordData, int count, ShpShapeBuilder points) + { + for (int i = 0; i < count; i++) + { + shpRecordData.ReadXYCoordinate(out var x, out var y); + points.AddPoint(x, y); + } + } + public static void WriteXYCoordinates(this BinaryBufferWriter shpRecordData, ShpShapeBuilder points) + { + for (int i = 0; i < points.PointCount; i++) + { + shpRecordData.WriteXYCoordinate(points[i].X, points[i].Y); + } + } + + + public static double ReadZCoordinate(this BinaryBufferReader shpRecordData) + { + return shpRecordData.ReadDoubleLittleEndian(); + } + private static void WriteZCoordinate(this BinaryBufferWriter shpRecordData, double z) + { + shpRecordData.WriteDoubleLittleEndian(z.ToValidShpCoordinate()); + } + + + private static void AdvancePastZRange(this BinaryBufferReader binary) + { + binary.Advance(2 * sizeof(double)); + } + private static void ReadZRange(this BinaryBufferReader binary, ShpBoundingBox shpExtent, bool hasZ) + { + if (hasZ) + { + shpExtent.Z.Expand(binary.ReadZCoordinate()); + shpExtent.Z.Expand(binary.ReadZCoordinate()); + } + } + public static void WriteZRange(this BinaryBufferWriter binary, ShpBoundingBox shpExtent, bool hasZ) + { + if (hasZ) + { + binary.WriteZCoordinate(shpExtent.Z.Min); + binary.WriteZCoordinate(shpExtent.Z.Max); + } + else + { + binary.WriteZCoordinate(0.0); // ArcMap uses zero as default. + binary.WriteZCoordinate(0.0); + } + } + + + private static void ReadZCoordinates(this BinaryBufferReader shpRecordData, int count, ShpShapeBuilder points) + { + for (int i = 0; i < count; i++) + { + points.Points[i].Z = shpRecordData.ReadZCoordinate(); + } + } + public static void WriteZCoordinates(this BinaryBufferWriter shpRecordData, ShpShapeBuilder points) + { + for (int i = 0; i < points.PointCount; i++) + { + shpRecordData.WriteZCoordinate(points.Points[i].Z); + } + } + + + public static double ReadMValue(this BinaryBufferReader shpRecordData) + { + var m = shpRecordData.ReadDoubleLittleEndian(); + if (m < Shapefile.MeasureMinValue) + return double.NaN; + + return m; + } + private static void WriteMValue(this BinaryBufferWriter shpRecordData, double m) + { + shpRecordData.WriteDoubleLittleEndian(m.ToValidShpMeasure()); + } + + + private static void AdvancePastMRange(this BinaryBufferReader binary) + { + binary.Advance(2 * sizeof(double)); + } + private static void ReadMRange(this BinaryBufferReader binary, ShpBoundingBox shpExtent, bool hasM) + { + if (hasM) + { + shpExtent.M.Expand(binary.ReadMValue()); + shpExtent.M.Expand(binary.ReadMValue()); + } + } + public static void WriteMRange(this BinaryBufferWriter binary, ShpBoundingBox shpExtent, bool hasM) + { + if (hasM) + { + binary.WriteMValue(shpExtent.M.Min); + binary.WriteMValue(shpExtent.M.Max); + } + else + { + binary.WriteMValue(0.0); // ArcMap uses zero as default. + binary.WriteMValue(0.0); + } + } + + + private static void ReadMValues(this BinaryBufferReader shpRecordData, int count, ShpShapeBuilder points) + { + for (int i = 0; i < count; i++) + { + points.Points[i].M = shpRecordData.ReadMValue(); + + } + } + public static void WriteMValues(this BinaryBufferWriter shpRecordData, ShpShapeBuilder points) + { + for (int i = 0; i < points.PointCount; i++) + { + shpRecordData.WriteMValue(points.Points[i].M); + } + } + + public static void AdvancePastXYBoundingBox(this BinaryBufferReader boundingBoxBinary) + { + boundingBoxBinary.Advance(4 * sizeof(double)); + } + public static void ReadXYBoundingBox(this BinaryBufferReader boundingBoxBinary, ShpBoundingBox boundingBox) + { + boundingBoxBinary.ReadXYCoordinate(out var xMin, out var yMin); + boundingBox.X.Expand(xMin); + boundingBox.Y.Expand(yMin); + + boundingBoxBinary.ReadXYCoordinate(out var xMax, out var yMax); + boundingBox.X.Expand(xMax); + boundingBox.Y.Expand(yMax); + } + public static void WriteXYBoundingBox(this BinaryBufferWriter boundingBoxBinary, ShpBoundingBox shpExtent) + { + boundingBoxBinary.WriteXYCoordinate(shpExtent.X.Min.ToValidShpCoordinate(), shpExtent.Y.Min.ToValidShpCoordinate()); + boundingBoxBinary.WriteXYCoordinate(shpExtent.X.Max.ToValidShpCoordinate(), shpExtent.Y.Max.ToValidShpCoordinate()); + } + + + public static int ReadPartCount(this BinaryBufferReader binary) + { + return binary.ReadInt32LittleEndian(); + } + public static void WritePartCount(this BinaryBufferWriter binary, int count) + { + binary.WriteInt32LittleEndian(count); + } + + + public static int ReadPointCount(this BinaryBufferReader binary) + { + return binary.ReadInt32LittleEndian(); + } + public static void WritePointCount(this BinaryBufferWriter binary, int count) + { + binary.WriteInt32LittleEndian(count); + } + + + public static void ReadPartOfsets(this BinaryBufferReader binary, int partCount, ShpShapeBuilder points) + { + for (int partIndex = 0; partIndex < partCount; partIndex++) + { + points.AddPartOffset(binary.ReadInt32LittleEndian()); + } + } + public static void WritePartOffsets(this BinaryBufferWriter binary, ShpShapeBuilder points) + { + for (int partIndex = 0; partIndex < points.PartCount; partIndex++) + { + binary.WriteInt32LittleEndian(points.GetPartOffset(partIndex)); + } + } + + + public static void ReadPoint(this BinaryBufferReader shpRecordData, bool hasZ, bool hasM, ref ShpCoordinates point) + { + shpRecordData.ReadXYCoordinate(out point.X, out point.Y); + + if (hasZ) + { + point.Z = shpRecordData.ReadZCoordinate(); + } + if (hasM) + { + point.M = shpRecordData.ReadMValue(); + } + } + public static void WritePoint(this BinaryBufferWriter shpRecordData, bool hasZ, bool hasM, ShpCoordinates point) + { + shpRecordData.WriteXYCoordinate(point.X, point.Y); + + if (hasZ) + { + shpRecordData.WriteZCoordinate(point.Z); + } + + if (hasM) + { + shpRecordData.WriteMValue(point.M); + } + } + + + public static void ReadPoints(this BinaryBufferReader shpRecordData, int pointCount, bool hasZ, bool hasM, ShpShapeBuilder points) + { + shpRecordData.ReadXYCoordinates(pointCount, points); + + if (hasZ) + { + shpRecordData.AdvancePastZRange(); + shpRecordData.ReadZCoordinates(pointCount, points); + } + + if (hasM) + { + shpRecordData.AdvancePastMRange(); + shpRecordData.ReadMValues(pointCount, points); + } + } + public static void WritePoints(this BinaryBufferWriter shpRecordData, bool hasZ, bool hasM, ShpShapeBuilder points) + { + shpRecordData.WriteXYCoordinates(points); + + if (hasZ) + { + shpRecordData.WriteZRange(points.Extent, hasZ); + shpRecordData.WriteZCoordinates(points); + } + + if (hasM) + { + shpRecordData.WriteMRange(points.Extent, hasM); + shpRecordData.WriteMValues(points); + } + } + + } + + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/ShpBoundingBox.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/ShpBoundingBox.cs new file mode 100644 index 0000000..66775a7 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/ShpBoundingBox.cs @@ -0,0 +1,156 @@ +using System; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// The Bounding Box in the main file header storing the actual extent of the shapes in the file. + /// + public class ShpBoundingBox + { + /// + /// Minimum and maximum X coordinate values. + /// + public ShpRange X { get; } + + /// + /// Minimum and maximum Y coordinate values. + /// + public ShpRange Y { get; } + + /// + /// Minimum and maximum Z coordinate values. + /// + public ShpRange Z { get; } + + /// + /// Minimum and maximum Measure values. + /// + public ShpRange M { get; } + + /// + /// Initializes new class instance. + /// + public ShpBoundingBox() + { + X = new ShpRange(); + Y = new ShpRange(); + Z = new ShpRange(); + M = new ShpRange(); + } + + /// + /// Clears the bounding box. + /// + public void Clear() + { + X.Clear(); + Y.Clear(); + Z.Clear(); + M.Clear(); + } + + + /// + /// Expands the boundign box by other bounding box. + /// + /// Bounding box used to expand this instance. + public void Expand(ShpBoundingBox other) + { + X.Expand(other.X); + Y.Expand(other.Y); + Z.Expand(other.Z); + M.Expand(other.M); + } + + /// + /// Expands the boundign box by point coordinates. + /// + /// Point coordinates. + public void Expand(ShpCoordinates point) + { + X.Expand(point.X); + Y.Expand(point.Y); + Z.Expand(point.Z); + M.Expand(point.M); + } + } + + + /// + /// Minimum and maximum values of coordinate. + /// + public class ShpRange + { + /// + /// Initializes empty class instance. + /// + public ShpRange() + { + Clear(); + } + + /// + /// Mimimum value. + /// + public double Min { get; private set; } + + /// + /// Maximum value. + /// + public double Max { get; private set; } + + /// + /// Specifies if this is emty. + /// + public bool IsEmpty => double.IsNaN(Min) || double.IsNaN(Max); + + /// + /// Expands the range by specified value. + /// + /// + public void Expand(double value) + { + if (double.IsNaN(value) || value == double.MinValue) // ArcMap 10.6 saves empty point coordinates as doubule.MinValue + return; + + if (double.IsNaN(Min)) // NaN > value => false; + { + Min = value; + } + else if (Min > value) // Min > NaN => false; + { + Min = value; + } + + if (double.IsNaN(Max)) // NaN < value => false; + { + Max = value; + } + else if (Max < value) // Max < NaN => false; + { + Max = value; + } + } + + /// + /// Expands the range by other range. + /// + /// Range used to expand this instance. + public void Expand(ShpRange other) + { + Expand(other.Min); + Expand(other.Max); + } + + /// + /// Clears the range. + /// + public void Clear() + { + this.Min = double.NaN; + this.Max = double.NaN; + } + } + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/ShpCoordinateExtensions.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/ShpCoordinateExtensions.cs new file mode 100644 index 0000000..88f867c --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/ShpCoordinateExtensions.cs @@ -0,0 +1,78 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Helper methods for ordinate converting as specified in the ESRI Shapefile Technical Description. + /// + public static class ShpCoordinateExtensions + { + + /// + /// Converts Z ordinate to value as specified in the ESRI Shapefile Technical Description. + /// + /// Z value. + /// + /// • 0.0 if specified value is
+ /// • if specified value is
+ /// • if specified value is
+ /// otherwise it returns specified value. + ///
+ /// + /// SHP Spec: Floating point numbers must be numeric values. Positive infinity, negative infinity, + /// and Not-a-Number(NaN) values are not allowed in shapefiles. + /// + public static double ToValidShpCoordinate(this double coordinate) + { + if (double.IsNaN(coordinate) || coordinate == double.MinValue) // ArcMap 10.6 saves empty point as doubule.MinValue coordinates + return 0.0; + + if (double.IsPositiveInfinity(coordinate)) + return double.MaxValue; + + if (double.IsNegativeInfinity(coordinate)) + return double.MinValue + double.Epsilon; // ArcMap 10.6 saves empty point as doubule.MinValue coordinates - add Epsilon. + + return coordinate; + } + + + /// + /// Converts Measure to value as specified in the ESRI Shapefile Technical Description. + /// + /// Measure value. + /// + /// • value less than –10E38 if specified value is
+ /// • if specified value is
+ /// • -10E38 if specified value is
+ /// otherwise it returns specified value. + ///
+ /// + /// SHP Spec: Floating point numbers must be numeric values. Positive infinity, negative infinity, + /// and Not-a-Number(NaN) values are not allowed in shapefiles. Nevertheless, shapefiles + /// support the concept of "no data" values, but they are currently used only for measures. + /// Any floating point number smaller than –10E38 is considered by a shapefile reader to represent a "no data" value. + /// + public static double ToValidShpMeasure(this double m) + { + if (double.IsNaN(m)) + return double.MinValue; // Shapefile.MeasureMinValue - 1; (Esri uses double.MinValue) + + if (double.IsPositiveInfinity(m)) + return double.MaxValue; + + if (double.IsNegativeInfinity(m)) + return Shapefile.MeasureMinValue; + + return m; + } + } + + + + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/ShpCoordinates.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/ShpCoordinates.cs new file mode 100644 index 0000000..10f406e --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/ShpCoordinates.cs @@ -0,0 +1,211 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + + + /// + /// Point represented as X, Y, Z, M coordinates. + /// + public struct ShpCoordinates + { + /// + /// Represents the smallest positive coordinate value that is greater than zero. + /// + /// + /// This field is equal to 0.000000002777...7 in linear units and 0°00'00.00001" in decimal degrees (equivalent of ~0.3mm in metric units). + /// + public static double Epsilon = 0.00001 / 60.0 / 60.0; + + /// + /// Point representation of . + /// + public static ShpCoordinates NullPoint = new ShpCoordinates(double.NaN, double.NaN); + + /// + /// X coordinate + /// + public double X; + + + /// + /// Y coordinate + /// + public double Y; + + + /// + /// Z coordinate + /// + public double Z; + + + /// + /// Measure + /// + public double M; + + + /// + /// Initializes a new instance of the class. + /// + /// X coordinate + /// Y coordinate + /// Z coordinate + /// Measure + public ShpCoordinates(double x, double y, double z, double m) + { + X = x; + Y = y; + Z = z; + + if (m < Shapefile.MeasureMinValue) + M = double.NaN; + else + M = m; + } + + + /// + /// Initializes a new instance of the class. + /// + /// X coordinate + /// Y coordinate + /// Measure + public ShpCoordinates(double x, double y, double m) + { + X = x; + Y = y; + Z = double.NaN; + M = m; + } + + /// + /// Initializes a new instance of the class. + /// + /// X coordinate + /// Y coordinate + public ShpCoordinates(double x, double y) + { + X = x; + Y = y; + Z = double.NaN; + M = double.NaN; + } + + + + /// + /// Indicates whether this and a specified object are equal. + /// + /// The object to compare with the current . + /// true if obj is and have the same coordinates; otherwise, false. + public override bool Equals(object other) + { + return other is ShpCoordinates point && Equals(point); + } + + /// + /// Indicates whether this and a specified point are equal. + /// + /// The point to compare with the current . + /// true if both points have the same coordinates; otherwise, false. + public bool Equals(ShpCoordinates other) + { + return Equals(other.X, other.Y, other.Z, other.M); + } + + /// + /// Indicates whether this and a specified coordinates are equal. + /// + /// X coordinate. + /// Y coordinate. + /// Z coordinate. + /// Measure value. + /// true if specified coordinates are equal to this point coordinates; otherwise, false. + public bool Equals(double x, double y, double z, double m) + { + return Equals(X, x) && Equals(Y, y) && Equals(Z, z) && Equals(M, m); + } + + private bool Equals(double v1, double v2) + { + if (v1 == v2) + { + return true; + } + + // ESRI Shapefile Technical Description, page 9: + // The rings are closed (the first and last vertex of a ring MUST be the same). + + //if (Math.Abs(v1- v2) < Epsilon) + //{ + // return true; + //} + + return double.IsNaN(v1) && double.IsNaN(v2); + } + + /// + public override int GetHashCode() + { + return X.GetHashCode(); + } + + /// + public override string ToString() + { + if (IsNull) + return ShapeType.NullShape.ToString(); + + var fmt = (X < 1000 & Y < 1000) ? "" : "0.000"; // for latitudes and longitudes use full precision + var sb = new StringBuilder(); + sb.Append("X:" + X.ToString(fmt)); + sb.Append(", Y:"); + sb.Append(Y.ToString(fmt)); + if (HasZ) + { + sb.Append(", Z:"); + sb.Append(Z.ToString(fmt)); + } + if (HasM) + { + sb.Append(", M:"); + sb.Append(M.ToString(fmt)); + } + + return sb.ToString(); + } + + + /// + /// Indicates the point with no geometric data. + /// + /// + /// According to ESRI Shapefile Technical Description point feature + /// supports null - it is valid to have points and null points in the same shapefile. + /// + public bool IsNull => double.IsNaN(X) || double.IsNaN(Y) || X == double.MinValue || Y == double.MinValue; // ArcMap writes empty geometry as double.MinValue coordinates. + + + /// + /// Indicates wheter this inttance of contains Z coordinate. + /// + public bool HasZ => !double.IsNaN(Z); + + + /// + /// Indicates wheter this inttance of contains M coordinate. + /// + public bool HasM => !double.IsNaN(M); + } + + + + + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/ShpShapeBuilder.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/ShpShapeBuilder.cs new file mode 100644 index 0000000..058295f --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/ShpShapeBuilder.cs @@ -0,0 +1,371 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Shape coordiantes collection. + /// + public class ShpShapeBuilder : IReadOnlyList> + { + internal ShpCoordinates[] Points; + private int[] Offsets; + internal readonly ShpBoundingBox Extent = new ShpBoundingBox(); + + + /// + /// Creates ne instance of ShpPoints. + /// + public ShpShapeBuilder() + { + Reset(); + } + + + /// + /// Point count. + /// + public int PointCount { get; private set; } + + + /// + /// Part count. + /// + public int PartCount { get; private set; } + + + /// + /// Gets the point at specified index. + /// + /// The zero-based index of the point to get. + /// Editable reference to the poinat at the specified index. + public ShpCoordinates this[int index] + { + get + { + if (index < 0 || index >= PointCount) + throw new ArgumentOutOfRangeException(nameof(index)); + return Points[index]; + } + // set { } // This would involve recalculating Extent at every single point setting. + } + + internal bool FirstPointIsNull => PointCount < 1; // || Points[0].IsNull; + + + /// + /// Clear all parts and points. + /// + /// This method does not resize the internal buffer capacity. + public void Clear() + { + PointCount = 0; + PartCount = 0; + //Offsets[0] = 0; // First offset must always be present and must always be zero. + Extent.Clear(); + + // Assure that for ShpPointReader (NullShape case) + Points[0].X = double.NaN; + Points[0].Y = double.NaN; + Points[0].Z = double.NaN; + Points[0].M = double.NaN; + } + + /// + /// Clear all data from underlying buffer and resize internal buffer capacity to its initial state. + /// + public void Reset() + { + Points = new ShpCoordinates[1]; + Offsets = new int[1]; + + Clear(); // Reset new instances created above + } + + + /// + /// Start new PolyLine or Polygon part. + /// + public void StartNewPart() + { + if (PointCount < 1) + { + AddPartOffset(0); + return; + } + + if (PartCount < 1) // && PointCount > 0 + { + AddPartOffset(0); // First offset must always be zero. + + } + + // At this moment we always have a least one part. + if (Offsets[PartCount - 1] == PointCount) + { + return; // Do not duplicate the same part offset + } + + + AddPartOffset(PointCount); + } + + + internal void AddPartOffset(int offset) + { + if (Offsets.Length <= PartCount) + { + ArrayBuffer.Expand(ref Offsets); + } + + Offsets[PartCount] = offset; + Debug.Assert(PartCount < 1 || Offsets[PartCount - 1] < Offsets[PartCount], + GetType().Name + "." + nameof(AddPartOffset) + "()", + "Corrupted SHP file - iInvalid part offset sequence. Preceding part offset must be less than succeeding part offset."); + + PartCount++; + } + + /// + /// Gets index to first point in part. + /// + /// Part index. + /// Index to first point in part. + public int GetPartOffset(int partIndex) + { + if (partIndex < 0 || partIndex >= PartCount) + throw new IndexOutOfRangeException(nameof(partIndex)); + + return Offsets[partIndex]; + } + + /// + /// Adds a point. + /// + /// The point to be added. + public void AddPoint(ShpCoordinates point) + { + AddPoint(point.X, point.Y, point.Z, point.M); + } + + + /// + /// Adds a point. + /// + /// X coordinate. + /// Y coordinate. + /// Z coordinate. + /// Measure value. + public void AddPoint(double x, double y, double z, double m) + { + if (Points.Length <= PointCount) + { + ArrayBuffer.Expand(ref Points); + } + + Points[PointCount].X = x; + Points[PointCount].Y = y; + Points[PointCount].Z = z; + Points[PointCount].M = m; + PointCount++; + + Extent.X.Expand(x); + Extent.Y.Expand(y); + Extent.Z.Expand(z); + if (m >= Shapefile.MeasureMinValue) // ! + { + Extent.M.Expand(m); + } + } + + + /// + /// Adds a point. + /// + /// X coordinate. + /// Y coordinate. + /// Measure value. + public void AddPoint(double x, double y, double m) + { + AddPoint(x, y, double.NaN, m); + } + + + /// + /// Adds a point. + /// + /// X coordinate. + /// Y coordinate. + public void AddPoint(double x, double y) + { + AddPoint(x, y, double.NaN, double.NaN); + } + + + /// + /// Gets point count held by specific part. + /// + /// Part index. + /// Point count held by specific part. + public int GetPointCount(int partIndex) + { + if (partIndex < 0) + throw new IndexOutOfRangeException(nameof(partIndex)); + + if (partIndex < PartCount - 1) + return Offsets[partIndex + 1] - Offsets[partIndex]; + + if (partIndex == 0 && PartCount < 1) + return PointCount; + + if (partIndex == PartCount - 1) + return PointCount - Offsets[partIndex]; + + throw new IndexOutOfRangeException(nameof(partIndex)); + } + + + /// + /// Get all points divided into one or more PolyLine or Polygon parts. + /// + /// All points divided into parts. + /// A part is a connected sequence of two or more points. + public IReadOnlyList> GetParts() + { + var parts = new ShpCoordinates[Math.Max(PartCount, 1)][]; + for (int partIndex = 0; partIndex < parts.Length; partIndex++) + { + parts[partIndex] = GetPart(partIndex); + } + return parts; + } + + private ShpCoordinates[] GetPart(int partIndex) + { + var pointCount = GetPointCount(partIndex); + var partPoints = new ShpCoordinates[pointCount]; + Array.Copy(Points, Offsets[partIndex], partPoints, 0, partPoints.Length); + + return partPoints; + } + + /// + /// Determines whether the specified ShpPointCollection is equal to the current collection. + /// + /// The collection to compare with the current object. + /// true if the specified collection is equal to the current collection; otherwise, false. + public bool Equals(ShpShapeBuilder other) + { + if (PointCount != other.PointCount || PartCount != other.PartCount) + return false; + + // Start from Parts. There will be usually less to check. + for (int partIndex = 0; partIndex < PartCount; partIndex++) + { + if (Offsets[partIndex] != other.Offsets[partIndex]) + return false; + } + + for (int i = 0; i < PointCount; i++) + { + if (!Points[i].Equals(other.Points[i])) + return false; + } + + return true; + } + + /// + /// Creates copy of this point collection. + /// + /// ShpPoint collection. + public ShpShapeBuilder Copy() + { + var copy = new ShpShapeBuilder(); + + copy.Points = new ShpCoordinates[Math.Max(1, PointCount)]; + copy.PointCount = PointCount; + Array.Copy(Points, copy.Points, PointCount); + + copy.Offsets = new int[Math.Max(1, PartCount)]; + copy.PartCount = PartCount; + Array.Copy(Offsets, copy.Offsets, PartCount); + + copy.Extent.Expand(Extent); + return copy; + } + + /// + public override string ToString() + { + if (PartCount > 0) + return $"MultiPart [{PartCount}:{PointCount}]"; + + if (PointCount < 1) + return ShapeType.NullShape.ToString(); + + if (PointCount == 1) + return $"Point [{Points[0]}]"; + + return $"MultiPoint [{PointCount}]"; + + } + + + #region *** IReadOnlyCollection *** + + int IReadOnlyCollection>.Count => PartCount; + IReadOnlyList IReadOnlyList>.this[int partIndex] => GetPart(partIndex); + + IEnumerator> IEnumerable>.GetEnumerator() + { + return new ShpPartEnumerator(this); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return new ShpPartEnumerator(this); + } + + private class ShpPartEnumerator : IEnumerator> + { + private readonly ShpShapeBuilder Owner; + private int CurrentIndex; + + public IReadOnlyList Current { get; private set; } + object IEnumerator.Current => Current; + + public ShpPartEnumerator(ShpShapeBuilder owner) + { + Owner = owner ?? throw new ArgumentNullException(nameof(owner)); + Reset(); + } + + public void Reset() + { + CurrentIndex = 0; + } + + public bool MoveNext() + { + if (Owner.PartCount < 1 || CurrentIndex >= Owner.PartCount) + return false; + + Current = Owner.GetPart(CurrentIndex++); + return true; + } + + public void Dispose() + { + // Nothing to dispose + } + } + + #endregion + } + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/Writers/ShpMultiPartWriter.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/Writers/ShpMultiPartWriter.cs new file mode 100644 index 0000000..755d8ee --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/Writers/ShpMultiPartWriter.cs @@ -0,0 +1,40 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Polygon or MultiPart SHP file writer. + /// + public class ShpMultiPartWriter : ShpWriter + { + /// + public ShpMultiPartWriter(Stream shpStream, Stream shxStream, ShapeType type) : base(shpStream, shxStream, type) + { + if (!ShapeType.IsPolygon() && !ShapeType.IsPolyLine()) + throw GetUnsupportedShapeTypeException(); + } + + internal override bool IsNull(ShpShapeBuilder shape) + { + return shape == null || shape.PartCount < 1 || shape.PointCount < 2; + } + + internal override void WriteShapeToBinary(BinaryBufferWriter shpRecordBinary) + { + shpRecordBinary.WriteXYBoundingBox(Shape.Extent); + shpRecordBinary.WritePartCount(Shape.PartCount); + shpRecordBinary.WritePointCount(Shape.PointCount); + + shpRecordBinary.WritePartOffsets(Shape); + shpRecordBinary.WritePoints(HasZ, HasM, Shape); + + Extent.Expand(Shape.Extent); + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/Writers/ShpMultiPointWriter.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/Writers/ShpMultiPointWriter.cs new file mode 100644 index 0000000..524d6a3 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/Writers/ShpMultiPointWriter.cs @@ -0,0 +1,37 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// MultiPoint SHP file writer. + /// + public class ShpMultiPointWriter : ShpWriter + { + /// + public ShpMultiPointWriter(Stream shpStream, Stream shxStream, ShapeType type) : base(shpStream, shxStream, type) + { + if (!ShapeType.IsMultiPoint()) + throw GetUnsupportedShapeTypeException(); + } + + internal override bool IsNull(ShpShapeBuilder shape) + { + return shape == null || shape.PointCount < 1; + } + + internal override void WriteShapeToBinary(BinaryBufferWriter shpRecordBinary) + { + shpRecordBinary.WriteXYBoundingBox(Shape.Extent); + shpRecordBinary.WritePointCount(Shape.PointCount); + shpRecordBinary.WritePoints(HasZ, HasM, Shape); + + Extent.Expand(Shape.Extent); + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/Writers/ShpPointWriter.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/Writers/ShpPointWriter.cs new file mode 100644 index 0000000..bc86595 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/Writers/ShpPointWriter.cs @@ -0,0 +1,47 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Point SHP file writer. + /// + public class ShpPointWriter : ShpWriter + { + /// + public ShpPointWriter(Stream shpStream, Stream shxStream, ShapeType type) : base(shpStream, shxStream, type) + { + if (!ShapeType.IsPoint()) + throw GetUnsupportedShapeTypeException(); + } + + internal override bool IsNull(ShpShapeBuilder shape) + { + return shape == null || shape.PointCount < 1 || shape.FirstPointIsNull; + } + + internal override void WriteShapeToBinary(BinaryBufferWriter shpRecordBinary) + { + shpRecordBinary.WritePoint(HasZ, HasM, Point); + Extent.Expand(Shape.Extent); + } + + /// + /// Point shape. + /// + public ShpCoordinates Point + { + get { return Shape.Points[0]; } + set + { + Shape.Clear(); + Shape.AddPoint(value); // This updates extent. + } + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shp/Writers/ShpWriter.cs b/src/NetTopologySuite.IO.Esri.Core/Shp/Writers/ShpWriter.cs new file mode 100644 index 0000000..a04f14f --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shp/Writers/ShpWriter.cs @@ -0,0 +1,129 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + + /// + /// Base class class for writing a fixed-length file header and variable-length records to a *.SHP file. + /// + public abstract class ShpWriter : ManagedDisposable + { + private readonly Stream ShpStream; + private readonly Stream ShxStream; + private readonly BinaryBufferWriter Header = new BinaryBufferWriter(Shapefile.FileHeaderSize); + private readonly BinaryBufferWriter RecordContent = new BinaryBufferWriter(Shapefile.FileHeaderSize); + private int RecordNumber = 1; // Shapefile specs: Record numbers begin at 1. + internal readonly bool HasM; + internal readonly bool HasZ; + + + /// + /// Initializes a new instance of the writer class. + /// + /// SHP file stream. + /// SHX file stream. + /// Shape type. + public ShpWriter(Stream shpStream, Stream shxStream, ShapeType type) + { + ShpStream = shpStream ?? throw new ArgumentNullException("Uninitialized SHP stream.", nameof(shpStream)); + ShxStream = shxStream ?? throw new ArgumentNullException("Uninitialized SHX stream.", nameof(shxStream)); + + ShapeType = type; + HasM = type.HasM(); + HasZ = type.HasZ(); + + // This advances streams position past header to to records start position + WriteFileHeader(ShpStream); + WriteFileHeader(ShxStream); + } + + internal abstract bool IsNull(ShpShapeBuilder shape); + + internal ShpBoundingBox Extent { get; private set; } = new ShpBoundingBox(); + + + /// + /// Shape type. + /// + public ShapeType ShapeType { get; } = ShapeType.NullShape; + + /// + /// Current shape to be written to the underlying stream. + /// + public ShpShapeBuilder Shape { get; } = new ShpShapeBuilder(); + + /// + /// Writes the to the underlying stream and then clears Shape's content. + /// + public void Write() + { + RecordContent.Clear(); + if (IsNull(Shape)) + { + RecordContent.WriteGeometryType(ShapeType.NullShape); + } + else + { + RecordContent.WriteGeometryType(ShapeType); + WriteShapeToBinary(RecordContent); + Extent.Expand(Shape.Extent); + } + + WriteRecordContent(); + Shape.Clear(); + } + + internal abstract void WriteShapeToBinary(BinaryBufferWriter shpRecordBinary); + + internal void WriteRecordContent() + { + Header.Clear(); + Header.WriteShxRecord((int)ShpStream.Position, RecordContent.Size); + Header.CopyTo(ShxStream); // SHX Record + + Header.Clear(); + Header.WriteShpRecordHeader(RecordNumber, RecordContent.Size); + Header.CopyTo(ShpStream); // SHP Record header + RecordContent.CopyTo(ShpStream); // SHP Record content + + RecordNumber++; + } + + private void WriteFileHeader(Stream stream) + { + if (stream == null) + return; + + Header.Clear(); + Header.WriteShpFileHeader(ShapeType, (int)stream.Length, Extent, HasZ, HasM); + + stream.Seek(0, SeekOrigin.Begin); + Header.CopyTo(stream); + } + + /// + protected override void DisposeManagedResources() + { + if (ShpStream != null && ShpStream.Position > Shapefile.FileHeaderSize) + { + WriteFileHeader(ShpStream); + WriteFileHeader(ShxStream); + } + + RecordContent?.Reset(); + Header?.Reset(); + + base.DisposeManagedResources(); // This will dispose owned ShpStream and ShxStream. + } + + internal Exception GetUnsupportedShapeTypeException() + { + throw new FileLoadException(GetType().Name + $" does not support {ShapeType} shapes."); + } + } + + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shx/IO/ShxReader.cs b/src/NetTopologySuite.IO.Esri.Core/Shx/IO/ShxReader.cs new file mode 100644 index 0000000..fdbb5b0 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shx/IO/ShxReader.cs @@ -0,0 +1,12 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + // public class ShxReader { } + + // SHX index file stores the offset and content length for each record in SHP file. + // As there is no additional value, this library ingnores SHX files during reading shapefiles. + +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shx/IO/ShxWriter.cs b/src/NetTopologySuite.IO.Esri.Core/Shx/IO/ShxWriter.cs new file mode 100644 index 0000000..c3e7f72 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shx/IO/ShxWriter.cs @@ -0,0 +1,10 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + // public class ShxWriter { } + + // Writing SHX data is handled directly by ShpWriter class. +} diff --git a/src/NetTopologySuite.IO.Esri.Core/Shx/ShxBinary.cs b/src/NetTopologySuite.IO.Esri.Core/Shx/ShxBinary.cs new file mode 100644 index 0000000..0aca9b3 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/Shx/ShxBinary.cs @@ -0,0 +1,60 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile.Core +{ + internal static class ShxBinaryExtensions + { + // ESRI Shapefile Technical Description: The index file header is identical in organization to the SHP file header. + + public static void WriteShxFileHeader(this BinaryBufferWriter binary, ShapeType type, int fileLength, ShpBoundingBox extent, bool hasZ, bool hasM) + { + binary.WriteShpFileHeader(type, fileLength, extent, hasZ, hasM); + } + + public static void ReadShxFileHeader(this BinaryBufferReader binary, out ShapeType type, out int fileLength) + { + binary.ReadShpFileHeader(out type, out fileLength, new ShpBoundingBox()); + } + + + // SHX Record + + public static void WriteShxRecord(this BinaryBufferWriter shxRecord, int offset, int length) + { + shxRecord.Write16BitWords(offset); + shxRecord.Write16BitWords(length); + } + + public static void ReadShxRecord(this BinaryBufferReader shxRecord, out int offset, out int length) + { + offset = shxRecord.Read16BitWords(); + length = shxRecord.Read16BitWords(); + } + + + // ESRI Shapefile Technical Description: + + // The value for file length is the total length of the file in 16-bit words, + // including the fifty 16-bit words that make up the header (which is 100 bytes long). + + // The content length for a record is the length of the record contents section measured in 16-bit words.Each record + // Therefore, contributes (4 + content length) 16-bit words toward the total length of the file, as stored at Byte 24 in the file header. + + // The file length stored in the index file header is the total length of the index file in 16-bit + // words(the fifty 16-bit words of the header plus 4 times the number of records). + + // The offset of a record in the main file is the number of 16-bit words from the start of the main fileto the first byte of the record header for the record. + // Thus, the offset for the first record in the main file is 50, given the 100-byte header. + + public static void Write16BitWords(this BinaryBufferWriter binary, int value) + { + binary.WriteInt32BigEndian(value / 2); + } + public static int Read16BitWords(this BinaryBufferReader binary) + { + return binary.ReadInt32BigEndian() * 2; + } + } +} diff --git a/src/NetTopologySuite.IO.Esri.Core/dBASE.md b/src/NetTopologySuite.IO.Esri.Core/dBASE.md new file mode 100644 index 0000000..731df29 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri.Core/dBASE.md @@ -0,0 +1,93 @@ +# dBASE III format specification + + + +## File Header + +| Byte | Data type | Description | +| :------------------: | :----------------: | :----------------------------------------- | +| 1 ‑ 32 | byte[32] | Table descriptor (see below). | +| 32 ‑ 64 | byte[32] | Field Descriptor (see below). | +| 65 ‑ 96 | byte[32] | Field Descriptor. | +| ... | ... | Field Descriptors. | +| (n‑32) - n | byte[32] | Field Descriptor. Maximum [255 fields][limits]. | +| n+1 | byte | 0x0D stored as the file header terminator. | + +
+ +## Table descriptor + +| Byte | Data type | Sample content | Description | +| :---: | :----------------: | :---------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| 1 | byte | 0x03 | dBASE file version. | +| 2-4 | byte[3] | 121|01|16 | Date of last DBF file update in YYMMDD format. Each byte contains the number as a binary. Year starts from 1900, therefore possible year values are 1900-2155. | +| 5-8 | uint | 123 | Number of records in the table. Records limit in a table: [1 Billion][limits]. Unsigned 32 bit integer (little-endian). | +| 9-10 | ushort | 161 | Number of bytes in the file header (table descriptor header + field descriptor headers + header terminator char 0x0d). Unsigned 16 bit integer (little-endian). | +| 11-12 | ushort | 123 | Number of bytes in the record. Unsigned 16 bit integer (little-endian). | +| 13-29 | byte[17] | --- | Reserved. Filled with zeros. | +| 30 | byte | 0x23 | Encoding flag based on Language Driver ID (LDID). | +| 31-32 | byte[2] | --- | Reserved. Filled with zeros. | + +
+ +## Field Descriptor + +| Byte | Data type | Sample content | Description | +| :---: | :----------------: | :------------: | :----------------------------------------------------------------------------------------------------------------------------------------------- | +| 1-10 | byte[10] | "NAME" | Field name may contain any letter, number or the undersocre (_) character. Max [10 characters][dBaseIV] long. Empty space [zero-filled][header]. | +| 11 | byte | --- | Reserved. Filled with zero. | +| 12 | byte | 'N' | Field type in ASCII (C, D, F, N, L). | +| 13-16 | byte[4] | --- | Reserved. Filled with zeros. | +| 17 | byte | 19 | Field length. | +| 18 | byte | 15 | Decimal places count for numeric field. | +| 19-32 | byte[14] | --- | Reserved. Filled with zeros. | + +
+ +## Data Types + +| Symbol | Data type | Length | Decimal places | Sample content | Description | +| :----: | :-------: | :---------: | :------------: | :------------: | :-------------------------------------------------------------------------------------------- | +| C | Character | 1-254 | 0 | "NAME" | All OEM code page characters - padded with blanks to the width of the field. | +| D | Date | 8 | 0 | "20210116" | Date stored as a string in the format YYYYMMDD (8 bytes). Any date from AD 1 to AD 9999. | +| N | Numeric | [1-19][len] | [1-15][len] | "12.3456" | Number stored as a string, right justified, and padded with blanks to the width of the field. | +| F | Float | [1-19][len] | [1-15][len] | "12.3456" | Identical to Numeric. [Maintained for compatibility][field-types]. | +| L | Logical | 1 | 0 | 'T' | Initialized to 0x20 (space) otherwise 'T' or 'F'. | + +
+ +## Data records + +The records follow the header in the table file. Data records are preceded by one byte, +that is, a space (0x20) if the record is not deleted, an asterisk (0x2A) if the record is deleted. +Fields are packed into records without field separators or record terminators. +The end of the file is marked by a single byte, with the end-of-file marker, +an OEM code page character value of 26 (0x1A). + +
+ +## Reference + +- [Data File Header Structure for the dBASE Version 7 Table File][header] +- [dBASE Field types][field-types] +- [Understanding DBF Essentials][len] +- [Converting DBF file of dBASE II/III into micro CDS/ISIS](http://web.simmons.edu/~chen/nit/NIT%2789/89-127-han.html) +- [Data types in dBase](https://www.promotic.eu/en/pmdoc/Subsystems/Db/dBase/DataTypes.htm) +- [dBASE IV Basics][dBaseIV] +- [Null values][null] +- [Null value substitution in shapefiles and dBASE (.dbf) files][shp-null] +- [What are the limits to dBASE databases?][limits] +- [Xbase Data file (*.dbf)][xbase] +- [DBF and DBT/FPT file structure][dbf-fpt-format] + + + +[header]: http://www.dbase.com/KnowledgeBase/int/db7_file_fmt.htm +[field-types]: https://www.dbase.com/help/11_2/Design_Tables/IDH_TABLEDES_FIELD_TYPES.htm +[len]: http://www.sfu.ca/sasdoc/sashtml/accpc/z0214453.htm +[dBaseIV]: https://www.osti.gov/servlets/purl/10180088 +[null]: http://www.dbase.com/help/2_80/Plus-en.htm#Language_Definition/IDH_LDEF_NULLVALUES.htm +[shp-null]: https://desktop.arcgis.com/en/arcmap/latest/manage-data/shapefiles/geoprocessing-considerations-for-shapefile-output.htm +[limits]: http://www.dbase.com/Knowledgebase/faq/dBASE_Limits_FAQ.html +[xbase]: https://www.clicketyclick.dk/databases/xbase/format/dbf.html +[dbf-fpt-format]: http://www.independent-software.com/dbase-dbf-dbt-file-format.html \ No newline at end of file diff --git a/src/NetTopologySuite.IO.Esri.Core/dBASE.png b/src/NetTopologySuite.IO.Esri.Core/dBASE.png new file mode 100644 index 0000000..a621aae Binary files /dev/null and b/src/NetTopologySuite.IO.Esri.Core/dBASE.png differ diff --git a/src/NetTopologySuite.IO.Esri/AssemblyInfo.cs b/src/NetTopologySuite.IO.Esri/AssemblyInfo.cs new file mode 100644 index 0000000..6b392e1 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/AssemblyInfo.cs @@ -0,0 +1,12 @@ +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +[assembly: ComVisible(false)] +[assembly: Guid("b03e19e2-1a40-48bf-8b59-4b6bf11aea9d")] + +[assembly: InternalsVisibleTo("NetTopologySuite.IO.ShapeFile.Test, PublicKey=" + + "0024000004800000940000000602000000240000525341310004000001000100e5a9697e3d378d" + + "e4bdd1607b9a6ea7884823d3909f8de55b573416d9adb0ae25eebc39007d71a7228c500d6e846d" + + "54dcc2cd839056c38c0a5e86b73096d90504f753ea67c9b5e61ecfdb8edf0f1dfaf0455e9a0f9e" + + "124e16777baefcda2af9a5a9e48f0c3502891c79444dc2d75aa50b75d148e16f1401dcb18bc163" + + "8cc764a9")] diff --git a/src/NetTopologySuite.IO.Esri/Extensions/FeatureExtensions.cs b/src/NetTopologySuite.IO.Esri/Extensions/FeatureExtensions.cs new file mode 100644 index 0000000..280059e --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/Extensions/FeatureExtensions.cs @@ -0,0 +1,215 @@ +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile +{ + /// + /// Feature helper methods. + /// + public static class FeatureExtensions + { + /// + /// Writes features to the shapefile. + /// + /// Features to be written. + /// Path to shapefile. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + /// Projection metadata for the shapefile (.prj file). + public static void SaveToShapefile(this IEnumerable features, string shpPath, Encoding encoding = null, string projection = null) + { + if (features == null) + throw new ArgumentNullException(nameof(features)); + + var firstFeature = features.FirstOrDefault(); + if (firstFeature == null) + throw new ArgumentException(nameof(ShapefileWriter) + " requires at least one feature to be written."); + + var fields = firstFeature.Attributes.GetDbfFields(); + var shapeType = features.FindNonEmptyGeometry().GetShapeType(); + + using (var shpWriter = ShapefileWriter.Open(shpPath, shapeType, fields, encoding, projection)) + { + foreach (var feature in features) + { + shpWriter.Write(feature); + } + } + } + + + internal static Feature ToFeature(this Geometry geometry, AttributesTable attributes) + { + var feature = new Feature(geometry, attributes); + feature.BoundingBox = geometry.EnvelopeInternal; + return feature; + } + + + internal static AttributesTable GetAttributesTable(this DbfFieldCollection fields) + { + return new AttributesTable(fields.GetValues()); + } + + + internal static DbfField[] GetDbfFields(this IAttributesTable attributes) + { + var names = attributes.GetNames(); + var fields = new DbfField[names.Length]; + + for (int i = 0; i < names.Length; i++) + { + var name = names[i]; + var type = attributes.GetType(name); + fields[i] = DbfField.Create(name, type); + } + return fields; + } + + /// + /// Gets default for specified geometry. + /// + /// A Geometry object. + /// Shape type. + public static ShapeType GetShapeType(this Geometry geometry) + { + geometry = FindNonEmptyGeometry(geometry); + + if (geometry == null || geometry.IsEmpty) + return ShapeType.NullShape; + + var ordinates = geometry.GetOrdinates(); + + if (geometry is Point) + return GetPointType(ordinates); + + if (geometry is MultiPoint) + return GetMultiPointType(ordinates); + + if (geometry is LineString || geometry is MultiLineString) + return GetPolyLineType(ordinates); + + if (geometry is Polygon || geometry is MultiPolygon) + return GetPolygonType(ordinates); + + throw new ArgumentException("Unsupported shapefile geometry: " + geometry.GetType().Name); + } + + + private static Geometry FindNonEmptyGeometry(Geometry geometry) + { + if (geometry == null || geometry.IsEmpty) + return null; + + var geomColl = geometry as GeometryCollection; + + // Shapefile specification distinguish between Point and MultiPoint. + // That not the case for PolyLine and Polygon. + if (geometry is MultiPoint || geomColl == null) + { + return geometry; + } + + for (int i = 0; i < geomColl.Count; i++) + { + var geom = geomColl[i]; + + // GeometryCollection -> MultiPolygon -> Polygon + if (geom is GeometryCollection) + geom = FindNonEmptyGeometry(geom); + + if (geom != null && !geom.IsEmpty) + return geom; + } + return null; + } + + internal static Geometry FindNonEmptyGeometry(this IEnumerable features) + { + if (features == null) + return null; + + foreach (var feature in features) + { + var geometry = FindNonEmptyGeometry(feature.Geometry); + if (geometry != null) + return geometry; + } + + return null; + } + + + private static Ordinates GetOrdinates(this Geometry geometry) + { + if (geometry == null) + throw new ArgumentNullException(nameof(geometry)); + + if (geometry is Point point) + return point.CoordinateSequence.Ordinates; + + if (geometry is LineString line) + return line.CoordinateSequence.Ordinates; + + if (geometry is Polygon polygon) + return polygon.Shell.CoordinateSequence.Ordinates; + + if (geometry.NumGeometries > 0) + return GetOrdinates(geometry.GetGeometryN(0)); + + throw new ArgumentException("Unsupported shapefile geometry: " + geometry.GetType().Name); + } + + + private static ShapeType GetPointType(Ordinates ordinates) + { + if (ordinates == Ordinates.XYM) + return ShapeType.PointM; + + if (ordinates == Ordinates.XYZ || ordinates == Ordinates.XYZM) + return ShapeType.PointZM; + + return ShapeType.Point; + } + + + private static ShapeType GetMultiPointType(Ordinates ordinates) + { + if (ordinates == Ordinates.XYM) + return ShapeType.MultiPointM; + + if (ordinates == Ordinates.XYZ || ordinates == Ordinates.XYZM) + return ShapeType.MultiPointZM; + + return ShapeType.MultiPoint; + } + + + private static ShapeType GetPolyLineType(Ordinates ordinates) + { + if (ordinates == Ordinates.XYM) + return ShapeType.PolyLineM; + + if (ordinates == Ordinates.XYZ || ordinates == Ordinates.XYZM) + return ShapeType.PolyLineZM; + + return ShapeType.PolyLine; + } + + + private static ShapeType GetPolygonType(Ordinates ordinates) + { + if (ordinates == Ordinates.XYM) + return ShapeType.PolygonM; + + if (ordinates == Ordinates.XYZ || ordinates == Ordinates.XYZM) + return ShapeType.PolygonZM; + + return ShapeType.Polygon; + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/Extensions/ShpExtensions.cs b/src/NetTopologySuite.IO.Esri/Extensions/ShpExtensions.cs new file mode 100644 index 0000000..99e2a47 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/Extensions/ShpExtensions.cs @@ -0,0 +1,120 @@ +using NetTopologySuite.Geometries; +using System; +using System.Collections.Generic; +using System.Text; +using NetTopologySuite.IO.Shapefile.Core; + +namespace NetTopologySuite.IO.Shapefile +{ + internal static class ShpExtensions + { + public static Point ToPoint(this ShpCoordinates shpPoint, bool hasZ, bool hasM) + { + if (hasZ && hasM) + { + return new Point(shpPoint.X, shpPoint.Y, shpPoint.Z) { M = shpPoint.M }; + } + else if (hasZ) + { + return new Point(shpPoint.X, shpPoint.Y, shpPoint.Z); + } + else if (hasM) + { + return new Point(shpPoint.X, shpPoint.Y) { M = shpPoint.M }; + } + else + { + return new Point(shpPoint.X, shpPoint.Y); + } + } + + + public static void SetCoordinates(this CoordinateSequence sequence, int index, ShpCoordinates coords, bool hasZ, bool hasM) + { + sequence.SetX(index, coords.X); + sequence.SetY(index, coords.Y); + + if (hasZ) + sequence.SetZ(index, coords.Z); + + if (hasM) + sequence.SetM(index, coords.M); + } + + + public static CoordinateSequence GetPartCoordinates(this ShpShapeBuilder shape, int partIndex, bool hasZ, bool hasM, bool closeSequence = false) + { + var partOffset = shape.GetPartOffset(partIndex); + var shpPartPointCount = shape.GetPointCount(partIndex); + + if (closeSequence) + { + var firstPoint = shape[partOffset]; + var lastPoint = shape[partOffset + shpPartPointCount - 1]; + + closeSequence = (shpPartPointCount > 2) && !firstPoint.Equals(lastPoint); + } + + var sequencePointCount = closeSequence ? shpPartPointCount + 1 : shpPartPointCount; + var points = CreateCoordinateSequence(sequencePointCount, hasZ, hasM); + + for (int i = 0; i < shpPartPointCount; i++) + { + var p = shape[partOffset + i]; + points.SetCoordinates(i, p, hasZ, hasM); + } + + if (closeSequence) + { + var firstPoint = shape[partOffset]; + var lastPointIndex = sequencePointCount - 1; + points.SetCoordinates(lastPointIndex, firstPoint, hasZ, hasM); + } + + return points; + } + + + private static CoordinateSequence CreateCoordinateSequence(int size, bool hasZ, bool hasM) + { + if (hasZ && hasM) + return GeometryFactory.Default.CoordinateSequenceFactory.Create(size, Ordinates.XYZM); + + if (hasZ) + return GeometryFactory.Default.CoordinateSequenceFactory.Create(size, Ordinates.XYZ); + + if (hasM) + return GeometryFactory.Default.CoordinateSequenceFactory.Create(size, Ordinates.XYM); + + return GeometryFactory.Default.CoordinateSequenceFactory.Create(size, Ordinates.XY); + } + + + internal static void AddPoint(this ShpShapeBuilder shape, Point point) + { + if (point == null || point == Point.Empty) + return; + + shape.AddPoint(point.X, point.Y, point.Z, point.M); + } + + + internal static void AddPart(this ShpShapeBuilder shape, CoordinateSequence coordinates) + { + shape.StartNewPart(); + + for (int i = 0; i < coordinates.Count; i++) + { + var x = coordinates.GetX(i); + var y = coordinates.GetY(i); + var z = coordinates.GetZ(i); + var m = coordinates.GetM(i); + shape.AddPoint(x, y, z, m); + } + } + + } + + + +} diff --git a/src/NetTopologySuite.IO.Esri/NetTopologySuite.IO.Esri.csproj b/src/NetTopologySuite.IO.Esri/NetTopologySuite.IO.Esri.csproj new file mode 100644 index 0000000..829bf12 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/NetTopologySuite.IO.Esri.csproj @@ -0,0 +1,38 @@ + + + + $(ProjectDir)..\ + netstandard2.0 + NetTopologySuite.IO + true + + + + NetTopologySuite.IO.ShapeFile + + + + NetTopologySuite.IO.ShapeFile + NTS - Topology Suite - ShapeFile IO Libraries + NetTopologySuite - Team + NetTopologySuite - Team + LGPL-2.1-or-later + This package contains several IO libraries to cope with spatial data from various sources. + NTS;Shapefile + + + + + + + + + + + + + + + + + diff --git a/src/NetTopologySuite.IO.Esri/README.md b/src/NetTopologySuite.IO.Esri/README.md new file mode 100644 index 0000000..1fcbb7e --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/README.md @@ -0,0 +1,66 @@ +# NetTopologySuite.IO.Esri + +This library provides forward-only readers and writers for [Esri shapefiles](https://support.esri.com/en/white-paper/279). + +## Getting started + +**Writing features to a shapefile** + +```c# +var features = new List(); +for (int i = 1; i < 5; i++) +{ + var attributes = new AttributesTable(); + attributes.Add("date", new DateTime(2000, 1, i + 1)); + attributes.Add("float", i * 0.1); + attributes.Add("int", i); + attributes.Add("logical", i % 2 == 0); + attributes.Add("text", i.ToString("0.00")); + + var lineCoords = new List(); + lineCoords.Add(new CoordinateZ(i, i + 1, i)); + lineCoords.Add(new CoordinateZ(i, i, i)); + lineCoords.Add(new CoordinateZ(i + 1, i, i)); + var line = new LineString(lineCoords.ToArray()); + + var feature = new Feature(line, attributes); + features.Add(feature); +} + +features.SaveToShapefile(shpPath); +``` + +**Reading features from a shapefile** + +```c# +foreach (var feature in ShapefileReader.ReadAll(shpPath)) +{ + Console.WriteLine("Record ID: " + feature.Attributes["Id"]); + foreach (var attrName in feature.Attributes.GetNames()) + { + Console.WriteLine($" {attrName}: {feature.Attributes[attrName]}"); + } + Console.WriteLine($" SHAPE: {feature.Geometry}"); +} +``` + + +**Reading a SHP file geometries** + +```c# +foreach (var geom in ShapefileReader.ReadAllGeometries(shpPath)) +{ + Console.WriteLine(geom); +} +``` + +## Encoding + +The .NET Framework supports a large number of character encodings and code pages. +On the other hand, .NET Core only supports +[limited list](https://docs.microsoft.com/en-us/dotnet/api/system.text.codepagesencodingprovider.instance#remarks) of encodings. +To retrieve an encoding that is present in the .NET Framework on the Windows +desktop but not in .NET Core, you need to do the following: +1. Add to your project reference to to the [System.Text.Encoding.CodePages.dll](https://www.nuget.org/packages/System.Text.Encoding.CodePages/). +2. Put the following line somewhere in your code: + `Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);` \ No newline at end of file diff --git a/src/NetTopologySuite.IO.Esri/Readers/ShapefileMultiPointReader.cs b/src/NetTopologySuite.IO.Esri/Readers/ShapefileMultiPointReader.cs new file mode 100644 index 0000000..dfdbcbd --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/Readers/ShapefileMultiPointReader.cs @@ -0,0 +1,77 @@ +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile +{ + /// + /// MultiPoint shapefile reader. + /// + public class ShapefileMultiPointReader : ShapefileReader + { + + /// + internal ShapefileMultiPointReader(string shpPath, Encoding encoding = null) : base(shpPath, encoding) + { } + + + internal override Core.ShapefileReader CreateReader(string shpPath, Encoding encoding) + { + return new Core.ShapefileMultiPointReader(shpPath, encoding); + } + + + /// + public override bool Read(out Geometry geometry, out AttributesTable attributes, out bool deleted) + { + var readSucceed = ReadMultiPoint(out var multiPoint, out attributes, out deleted); + geometry = multiPoint; + return readSucceed; + } + + + /// + /// Reads geometry and feature attributes from underlying SHP and DBF files. + /// + /// Feature geometry. + /// Feature atrributes. + /// Indicates if the record was marked as deleted. + /// + /// true if the enumerator was successfully advanced to the next record; + /// false if the enumerator has passed the end of the table. + /// + public bool ReadMultiPoint(out MultiPoint geometry, out AttributesTable attributes, out bool deleted) + { + if (!Reader.Read(out deleted)) + { + geometry = MultiPoint.Empty; + attributes = null; + return false; + } + + geometry = GetMultiPoint(Reader.Shape, HasZ, HasM); + attributes = Reader.Fields.GetAttributesTable(); + return true; + } + + + internal static MultiPoint GetMultiPoint(Core.ShpShapeBuilder shape, bool hasZ, bool hasM) + { + if (shape.PointCount < 1) + return MultiPoint.Empty; + + var pointCount = shape.PointCount; + var points = new Point[pointCount]; + + for (int i = 0; i < pointCount; i++) + { + points[i] = shape[i].ToPoint(hasZ, hasM); + } + + return new MultiPoint(points); + } + + } +} diff --git a/src/NetTopologySuite.IO.Esri/Readers/ShapefilePointReader.cs b/src/NetTopologySuite.IO.Esri/Readers/ShapefilePointReader.cs new file mode 100644 index 0000000..9845589 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/Readers/ShapefilePointReader.cs @@ -0,0 +1,71 @@ +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile +{ + /// + /// MultiLine shapefile reader. + /// + public class ShapefilePointReader : ShapefileReader + { + + /// + internal ShapefilePointReader(string shpPath, Encoding encoding = null) : base(shpPath, encoding) + { } + + + internal override Core.ShapefileReader CreateReader(string shpPath, Encoding encoding) + { + return new Core.ShapefilePointReader(shpPath, encoding); + } + + + /// + public override bool Read(out Geometry geometry, out AttributesTable attributes, out bool deleted) + { + var readSucceed = ReadPoint(out var point, out attributes, out deleted); + geometry = point; + return readSucceed; + } + + + /// + /// Reads geometry and feature attributes from underlying SHP and DBF files. + /// + /// Feature geometry. + /// Feature atrributes. + /// Indicates if the record was marked as deleted. + /// + /// true if the enumerator was successfully advanced to the next record; + /// false if the enumerator has passed the end of the table. + /// + public bool ReadPoint(out Point geometry, out AttributesTable attributes, out bool deleted) + { + if (!Reader.Read(out deleted)) + { + geometry = Point.Empty; + attributes = null; + return false; + } + + geometry = GetPoint(Reader.Shape, HasZ, HasM); + attributes = Reader.Fields.GetAttributesTable(); + return true; + } + + + internal static Point GetPoint(Core.ShpShapeBuilder shape, bool hasZ, bool hasM) + { + if (shape.PointCount < 1) + return Point.Empty; + + Debug.Assert(shape.PointCount == 1, "Point " + nameof(Core.ShpShapeBuilder) + " has more than one point."); + + return shape[0].ToPoint(hasZ, hasM); + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/Readers/ShapefilePolyLineReader.cs b/src/NetTopologySuite.IO.Esri/Readers/ShapefilePolyLineReader.cs new file mode 100644 index 0000000..cdad7c1 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/Readers/ShapefilePolyLineReader.cs @@ -0,0 +1,91 @@ +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile +{ + /// + /// MultiLine shapefile reader. + /// + public class ShapefilePolyLineReader : ShapefileReader + { + + /// + internal ShapefilePolyLineReader(string shpPath, Encoding encoding = null) : base(shpPath, encoding) + { } + + internal override Core.ShapefileReader CreateReader(string shpPath, Encoding encoding) + { + return new Core.ShapefileMultiPartReader(shpPath, encoding); + } + + /// + public override bool Read(out Geometry geometry, out AttributesTable attributes, out bool deleted) + { + var readSucceed = ReadMultiLine(out var multiLine, out attributes, out deleted); + if (multiLine.Count == 1) + { + geometry = multiLine[0]; // LineString + } + else + { + geometry = multiLine; // MultiLineString + } + return readSucceed; + } + + + /// + /// Reads geometry and feature attributes from underlying SHP and DBF files. + /// + /// Feature geometry. + /// Feature atrributes. + /// Indicates if the record was marked as deleted. + /// + /// true if the enumerator was successfully advanced to the next record; + /// false if the enumerator has passed the end of the table. + /// + public bool ReadMultiLine(out MultiLineString geometry, out AttributesTable attributes, out bool deleted) + { + if (!Reader.Read(out deleted)) + { + geometry = MultiLineString.Empty; + attributes = null; + return false; + } + + geometry = GetMultiLineString(Reader.Shape, HasZ, HasM); + attributes = Reader.Fields.GetAttributesTable(); + return true; + } + + + internal static MultiLineString GetMultiLineString(Core.ShpShapeBuilder shape, bool hasZ, bool hasM) + { + if (shape.PartCount < 1 || shape.PointCount < 2) + return MultiLineString.Empty; + + var lines = new List(shape.PartCount); + + for (int partIndex = 0; partIndex < shape.PartCount; partIndex++) + { + var partCoordinates = shape.GetPartCoordinates(partIndex, hasZ, hasM); + + // Line must have at least 2 points + if (partCoordinates.Count < 2) + continue; + + var line = new LineString(partCoordinates, GeometryFactory.Default); + lines.Add(line); + + } + + if (lines.Count < 1) + return MultiLineString.Empty; + + return new MultiLineString(lines.ToArray()); + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/Readers/ShapefilePolygonReader.cs b/src/NetTopologySuite.IO.Esri/Readers/ShapefilePolygonReader.cs new file mode 100644 index 0000000..22c439c --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/Readers/ShapefilePolygonReader.cs @@ -0,0 +1,175 @@ +using NetTopologySuite.Algorithm; +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile +{ + /// + /// MultiPolygon shapefile reader. + /// + public class ShapefilePolygonReader : ShapefileReader + { + private static readonly LinearRing[] NoHoles = new LinearRing[0]; + + /// + internal ShapefilePolygonReader(string shpPath, Encoding encoding = null) : base(shpPath, encoding) + { } + + + internal override Core.ShapefileReader CreateReader(string shpPath, Encoding encoding) + { + return new Core.ShapefileMultiPartReader(shpPath, encoding); + } + + + /// + public override bool Read(out Geometry geometry, out AttributesTable attributes, out bool deleted) + { + var readSucceed = ReadMultiPolygon(out var multiPolygon, out attributes, out deleted); + if (multiPolygon.Count == 1) + { + geometry = multiPolygon[0]; // Polygon + } + else + { + geometry = multiPolygon; // MultiPolygon + } + return readSucceed; + } + + + /// + /// Reads geometry and feature attributes from underlying SHP and DBF files. + /// + /// Feature geometry. + /// Feature atrributes. + /// Indicates if the record was marked as deleted. + /// + /// true if the enumerator was successfully advanced to the next record; + /// false if the enumerator has passed the end of the table. + /// + public bool ReadMultiPolygon(out MultiPolygon geometry, out AttributesTable attributes, out bool deleted) + { + if (!Reader.Read(out deleted)) + { + geometry = MultiPolygon.Empty; + attributes = null; + return false; + } + + geometry = GetMultiPolygon(Reader.Shape, HasZ, HasM); + attributes = Reader.Fields.GetAttributesTable(); + return true; + } + + + + internal static MultiPolygon GetMultiPolygon(Core.ShpShapeBuilder shape, bool hasZ, bool hasM) + { + if (shape.PartCount < 1 || shape.PointCount < 3) + return MultiPolygon.Empty; + + var (shells, holes) = GetShellsAndHoles(shape, hasZ, hasM); + + if (shells.Count < 1) + return MultiPolygon.Empty; + + // https://gis.stackexchange.com/a/147971/26684 + // There could be nested shells (nested outer rings). Add a hole to the smallest one. + shells.Sort(CompareLinearRingAreas); + + var polygons = new Polygon[shells.Count]; + for (int i = 0; i < shells.Count; i++) + { + var shell = shells[i]; + var shellHoles = PopHoles(shell, holes); + polygons[i] = new Polygon(shell, shellHoles); + } + + return new MultiPolygon(polygons); + } + + + private static LinearRing[] PopHoles(LinearRing shell, List holes) + { + if (holes.Count < 1) + return NoHoles; + + var shellHoles = new List(); + + for (int i = holes.Count - 1; i >= 0; i--) + { + var hole = holes[i]; + if (hole == null) + continue; + + var holePoint = hole.CoordinateSequence.GetCoordinate(0); + if (!shell.EnvelopeInternal.Contains(holePoint)) // Fast, but not precise. + continue; + + if (!PointLocation.IsInRing(holePoint, shell.CoordinateSequence)) + continue; + + shellHoles.Add(hole); + // holes.RemoveAt(i); // Avoid copying internal List's array over and over. + holes[i] = null; + } + + if (shellHoles.Count < 1) + return NoHoles; + + return shellHoles.ToArray(); + } + + + private static int CompareLinearRingAreas(LinearRing ring1, LinearRing ring2) + { + var area1 = Area.OfRing(ring1.CoordinateSequence); + var area2 = Area.OfRing(ring2.CoordinateSequence); + return area1.CompareTo(area2); + } + + private static (List Shells, List Holes) GetShellsAndHoles(Core.ShpShapeBuilder shape, bool hasZ, bool hasM) + { + var shells = new List(shape.PartCount); + var holes = new List(); + + for (int partIndex = 0; partIndex < shape.PartCount; partIndex++) + { + var partCoordinates = shape.GetPartCoordinates(partIndex, hasZ, hasM, true); + + // Polygon must have at least 3 points + if (partCoordinates.Count < 3) + continue; + + var ring = new LinearRing(partCoordinates, GeometryFactory.Default); + + // https://gis.stackexchange.com/a/147971/26684 + if (ring.IsCCW) + { + holes.Add(ring); + } + else + { + shells.Add(ring); + } + } + + // If there are only holes, without any shel (outer ring) + if (shells.Count < 1 && holes.Count > 0) + { + foreach (var hole in holes) + { + var reversedHole = new LinearRing(hole.CoordinateSequence.Reversed(), GeometryFactory.Default); + shells.Add(reversedHole); + } + holes.Clear(); + } + + return (shells, holes); + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/Readers/ShapefileReader.cs b/src/NetTopologySuite.IO.Esri/Readers/ShapefileReader.cs new file mode 100644 index 0000000..c05adf2 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/Readers/ShapefileReader.cs @@ -0,0 +1,296 @@ +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile +{ + /// + /// Base class for reading a shapefile. + /// + public abstract class ShapefileReader : Shapefile, IEnumerable + { + private protected readonly bool HasZ; + private protected readonly bool HasM; + private protected Core.ShapefileReader Reader { get; } + + /// + /// Initializes a new instance of the reader class. + /// + /// Path to SHP file. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + internal ShapefileReader(string shpPath, Encoding encoding) + { + try + { + Reader = CreateReader(shpPath, encoding); + HasZ = Reader.ShapeType.HasZ(); + HasM = Reader.ShapeType.HasM(); + + var bb = Reader.BoundingBox; + BoundingBox = new Envelope(bb.X.Min, bb.X.Max, bb.Y.Min, bb.Y.Max); + } + catch + { + DisposeManagedResources(); + throw; + } + } + + internal abstract Core.ShapefileReader CreateReader(string shpPath, Encoding encoding); + + + /// + public override ShapeType ShapeType => Reader.ShapeType; + + /// + public override DbfFieldCollection Fields => Reader.Fields; + + + /// + /// Encoding used by the shapefile. + /// + public Encoding Encoding => Reader.Encoding; + + + /// + /// Well-known text representation of coordinate reference system metadata from .prj file. + /// + /// + /// https://support.esri.com/en/technical-article/000001897 + /// /> + public string Projection => Reader.Projection; + + /// + /// The minimum bounding rectangle orthogonal to the X and Y. + /// + public Envelope BoundingBox { get; } = new Envelope(); + + + /// + /// Reads geometry and feature attributes from underlying SHP and DBF files. + /// + /// Feature geometry. + /// Feature atrributes. + /// Indicates if the record was marked as deleted. + /// + /// true if the enumerator was successfully advanced to the next record; + /// false if the enumerator has passed the end of the table. + /// + public abstract bool Read(out Geometry geometry, out AttributesTable attributes, out bool deleted); + + + /// + protected override void DisposeManagedResources() + { + Reader?.Dispose(); + base.DisposeManagedResources(); // This will dispose streams used by ShpReader and DbfReader. Do it at the end. + } + + internal void Restart() + { + Reader.Restart(); + } + + #region *** Enumerator *** + + IEnumerator IEnumerable.GetEnumerator() + { + return new FeatureEnumerator(this); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return new FeatureEnumerator(this); + } + + private class FeatureEnumerator : IEnumerator + { + private readonly ShapefileReader Owner; + public Feature Current { get; private set; } + object IEnumerator.Current => Current; + + public FeatureEnumerator(ShapefileReader owner) + { + Owner = owner; + } + + public void Reset() + { + Owner.Restart(); + } + + public bool MoveNext() + { + if (!Owner.Read(out var geometry, out var attributes, out var deleted)) + { + return false; + } + + if (deleted) + { + return MoveNext(); + } + + Current = geometry.ToFeature(attributes); + return true; + } + + public void Dispose() + { + // Nothing to dispose + } + } + + #endregion + + + /// + /// Opens shapefile reader. + /// + /// Path to shapefile. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + /// Shapefile reader. + public static ShapefileReader Open(string shpPath, Encoding encoding = null) + { + shpPath = Path.ChangeExtension(shpPath, ".shp"); + var shapeType = Core.ShpReader.GetShapeType(shpPath); + + if (shapeType.IsPoint()) + { + return new ShapefilePointReader(shpPath, encoding); + } + else if (shapeType.IsMultiPoint()) + { + return new ShapefileMultiPointReader(shpPath, encoding); + } + else if (shapeType.IsPolyLine()) + { + return new ShapefilePolyLineReader(shpPath, encoding); + } + else if (shapeType.IsPolygon()) + { + return new ShapefilePolygonReader(shpPath, encoding); + } + else + { + throw new FileLoadException("Unsupported shapefile type: " + shapeType, shpPath); + } + } + + /// + /// Reads all shapefile features. + /// + /// Path to shapefile. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + /// Shapefile features collection. + public static Feature[] ReadAll(string shpPath, Encoding encoding = null) + { + using (var shpReader = Open(shpPath, encoding)) + { + return shpReader.ToArray(); + } + } + + /// + /// Reads all shapefile geometries. + /// + /// Path to SHP file. + /// Shapefile geometry collection. + public static Geometry[] ReadAllGeometries(string shpPath) + { + var shapeType = Core.ShpReader.GetShapeType(shpPath); + var hasZ = shapeType.HasZ(); + var hasM = shapeType.HasM(); + + using (var shpStream = new FileStream(shpPath, FileMode.Open, FileAccess.Read, FileShare.Read)) + { + if (shapeType.IsPoint()) + { + return ReadAllPoints(shpStream, hasZ, hasM).ToArray(); + } + else if (shapeType.IsMultiPoint()) + { + return ReadAllMultiPoints(shpStream, hasZ, hasM).ToArray(); + } + else if (shapeType.IsPolyLine()) + { + return ReadAllPolyLines(shpStream, hasZ, hasM).ToArray(); + } + else if (shapeType.IsPolygon()) + { + return ReadAllPolygons(shpStream, hasZ, hasM).ToArray(); + } + else + { + throw new FileLoadException("Unsupported shapefile type: " + shapeType, shpPath); + } + } + } + + private static IEnumerable ReadAllPoints(Stream shpStream, bool hasZ, bool hasM) + { + using (var shpReader = new Core.ShpPointReader(shpStream)) + { + while (shpReader.Read()) + { + yield return ShapefilePointReader.GetPoint(shpReader.Shape, hasZ, hasM); + } + } + } + + private static IEnumerable ReadAllMultiPoints(Stream shpStream, bool hasZ, bool hasM) + { + using (var shpReader = new Core.ShpMultiPointReader(shpStream)) + { + while (shpReader.Read()) + { + yield return ShapefileMultiPointReader.GetMultiPoint(shpReader.Shape, hasZ, hasM); + } + } + } + + private static IEnumerable ReadAllPolyLines(Stream shpStream, bool hasZ, bool hasM) + { + using (var shpReader = new Core.ShpMultiPartReader(shpStream)) + { + while (shpReader.Read()) + { + var multiLine = ShapefilePolyLineReader.GetMultiLineString(shpReader.Shape, hasZ, hasM); + if (multiLine.Count == 1) + { + yield return multiLine[0]; // Polygon + } + else + { + yield return multiLine; // MultiPolygon + } + } + } + } + + private static IEnumerable ReadAllPolygons(Stream shpStream, bool hasZ, bool hasM) + { + using (var shpReader = new Core.ShpMultiPartReader(shpStream)) + { + while (shpReader.Read()) + { + var multiPolygon = ShapefilePolygonReader.GetMultiPolygon(shpReader.Shape, hasZ, hasM); + if (multiPolygon.Count == 1) + { + yield return multiPolygon[0]; // Polygon + } + else + { + yield return multiPolygon; // MultiPolygon + } + } + } + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/DbaseEnumerator.cs b/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/DbaseEnumerator.cs new file mode 100644 index 0000000..f1f1fca --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/DbaseEnumerator.cs @@ -0,0 +1,53 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using NetTopologySuite.Features; + +namespace NetTopologySuite.IO.ShapeFile.Extended +{ + internal class DbaseEnumerator : IEnumerator, IDisposable + { + private DbaseReader m_Reader; + private int m_CurrentAttrTbleIndex; + + public DbaseEnumerator(DbaseReader reader) + { + m_Reader = reader.Clone(); + m_CurrentAttrTbleIndex = 0; + } + + public IAttributesTable Current + { + get; + private set; + } + + public void Dispose() + { + m_Reader.Dispose(); + } + + object IEnumerator.Current => ((IEnumerator)this).Current; + + public bool MoveNext() + { + if (m_CurrentAttrTbleIndex == m_Reader.NumOfRecords) + { + return false; + } + + Current = m_Reader.ReadEntry(m_CurrentAttrTbleIndex); + m_CurrentAttrTbleIndex++; + + return true; + } + + public void Reset() + { + var newReader = m_Reader.Clone(); + m_Reader.Dispose(); + m_Reader = newReader; + m_CurrentAttrTbleIndex = 0; + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/DbaseReader.cs b/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/DbaseReader.cs new file mode 100644 index 0000000..f6fa471 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/DbaseReader.cs @@ -0,0 +1,218 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using NetTopologySuite.Features; +using NetTopologySuite.IO.Streams; + +namespace NetTopologySuite.IO.ShapeFile.Extended +{ + public class DbaseReader : IEnumerable, IDisposable + { + private DbaseHeader m_Header = null; + //private readonly string m_Filename; + private readonly StreamProvider m_StreamProvider; + private BinaryReader m_FileReader; + private bool m_IsDisposed; + + /// + /// Initializes a new instance of the DbaseFileReader class. + /// + /// + public DbaseReader(string filename) + : this(new StreamProvider(StreamTypes.Data, filename, true)) + { + } + + public DbaseReader(StreamProvider streamProvider) + { + m_StreamProvider = streamProvider; + //ReadHeader(); + } + + ~DbaseReader() + { + Dispose(false); + } + + internal int NumOfRecords => m_Header.RecordCount; + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + public IAttributesTable ReadEntry(int index) + { + if (m_IsDisposed) + { + throw new InvalidOperationException("Reader was disposed, cannot read from a disposed reader"); + } + + if (index < 0) + { + throw new ArgumentException("Index must be positive", "index"); + } + + int seekLocation = 1; // m_Header.FieldHeaderSize + (index * m_Header.RecordSize); + + if (seekLocation >= m_FileReader.BaseStream.Length) + { + throw new ArgumentOutOfRangeException("index", "No DBF entry with index " + index); + } + + m_FileReader.BaseStream.Seek(seekLocation, SeekOrigin.Begin); + + var tbl = new AttributesTable(); + var data = ReadCurrentEntry(); + + for (int i = 0; i < data.Count; i++) + { + tbl.Add(m_Header.Fields[i].Name, data[i]); + } + + return tbl; + } + + public IEnumerator GetEnumerator() + { + return new DbaseEnumerator(this); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return ((IEnumerable)this).GetEnumerator(); + } + + internal DbaseReader Clone() + { + return new DbaseReader(m_StreamProvider); + } + + private ArrayList ReadCurrentEntry() + { + ArrayList attrs = null; + + // retrieve the record length + int tempNumFields = 1; // m_Header.NumFields; + + // storage for the actual values + attrs = new ArrayList(tempNumFields); + + // read the deleted flag + char tempDeleted = (char)m_FileReader.ReadChar(); + + // read the record length + int tempRecordLength = 1; // for the deleted character just read. + + // read the Fields + for (int j = 0; j < tempNumFields; j++) + { + // find the length of the field. + int tempFieldLength = m_Header.Fields[j].Length; + tempRecordLength = tempRecordLength + tempFieldLength; + + // find the field type + char tempFieldType = (char)m_Header.Fields[j].FieldType; + + // read the data. + object tempObject = null; + switch (tempFieldType) + { + case 'L': // logical data type, one character (T,t,F,f,Y,y,N,n) + char tempChar = (char)m_FileReader.ReadByte(); + if ((tempChar == 'T') || (tempChar == 't') || (tempChar == 'Y') || (tempChar == 'y')) + tempObject = true; + else tempObject = false; + break; + + case 'C': // character record. + + if (m_Header.Encoding == null) + { + char[] sbuffer = m_FileReader.ReadChars(tempFieldLength); + tempObject = new string(sbuffer).Trim().Replace("\0", string.Empty); //.ToCharArray(); + } + else + { + byte[] buf = m_FileReader.ReadBytes(tempFieldLength); + tempObject = m_Header.Encoding.GetString(buf, 0, buf.Length).Trim(); + } + break; + + case 'D': // date data type. + char[] ebuffer = m_FileReader.ReadChars(8); + string tempString = new string(ebuffer, 0, 4); + + int year; + if (!int.TryParse(tempString, NumberStyles.Integer, CultureInfo.InvariantCulture, out year)) + break; + tempString = new string(ebuffer, 4, 2); + + int month; + if (!int.TryParse(tempString, NumberStyles.Integer, CultureInfo.InvariantCulture, out month)) + break; + tempString = new string(ebuffer, 6, 2); + + int day; + if (!int.TryParse(tempString, NumberStyles.Integer, CultureInfo.InvariantCulture, out day)) + break; + + try + { + if (day > 0 && year > 0 && month > 0 && month <= 12) // don't try to parse date when day is invalid - it will be useless and slow for large files + tempObject = new DateTime(year, month, day); + } + catch (Exception) + { + } + + break; + + case 'N': // number + case 'F': // floating point number + char[] fbuffer = m_FileReader.ReadChars(tempFieldLength); + tempString = new string(fbuffer); + + // if we can't format the number, just save it as a string + tempObject = tempString; + + double result; + var numberStyle = NumberStyles.AllowLeadingWhite | NumberStyles.AllowTrailingWhite | NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowThousands | NumberStyles.AllowExponent; + if (double.TryParse(tempString, numberStyle, CultureInfo.InvariantCulture, out result)) + { + tempObject = result; + } + break; + + default: + throw new NotSupportedException("Do not know how to parse Field type " + tempFieldType); + } + + attrs.Add(tempObject); + } + + // ensure that the full record has been read. + if (tempRecordLength < m_Header.RecordSize) + { + m_FileReader.ReadBytes(m_Header.RecordSize - tempRecordLength); + } + return attrs; + } + + private void Dispose(bool disposing) + { + if (!m_IsDisposed) + { + if (m_FileReader != null) + { + m_FileReader.Close(); + } + + m_IsDisposed = true; + } + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/Entities/IShapefileFeature.cs b/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/Entities/IShapefileFeature.cs new file mode 100644 index 0000000..5b24910 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/Entities/IShapefileFeature.cs @@ -0,0 +1,9 @@ +using NetTopologySuite.Features; + +namespace NetTopologySuite.IO.ShapeFile.Extended.Entities +{ + public interface IShapefileFeature : IFeature + { + long FeatureId { get; } + } +} diff --git a/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/Entities/ShapefileFeature.cs b/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/Entities/ShapefileFeature.cs new file mode 100644 index 0000000..6c9373d --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/Entities/ShapefileFeature.cs @@ -0,0 +1,75 @@ +using System; +using System.Threading; +using System.Runtime.Serialization; + +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Handlers; + +namespace NetTopologySuite.IO.ShapeFile.Extended.Entities +{ + [Serializable] + internal class ShapefileFeature : IShapefileFeature, ISerializable + { + + public ShapefileFeature(ShapeReader shapeReader, DbaseReader dbfReader, ShapeLocationInFileInfo shapeLocation, GeometryFactory geoFactory) + { + FeatureId = shapeLocation.ShapeIndex; + + // removed "lazy" evaluation to avoid dispose issues (see #27) + var geom = shapeReader.ReadShapeAtOffset(shapeLocation.OffsetFromStartOfFile, geoFactory); + var attributes = dbfReader.ReadEntry(shapeLocation.ShapeIndex); + + Geometry = geom; + Attributes = attributes; + } + + private ShapefileFeature(SerializationInfo info, StreamingContext context) + { + var geom = (Geometry)info.GetValue("Geometry", typeof(Geometry)); + var attributes = (IAttributesTable)info.GetValue("Attributes", typeof(IAttributesTable)); + + FeatureId = info.GetInt64("FeatureId"); + Geometry = geom; + Attributes = attributes; + } + + public Geometry Geometry { get; } + + public Envelope BoundingBox => Geometry.EnvelopeInternal; + + public IAttributesTable Attributes { get; } + + public long FeatureId { get; } + + Geometry IFeature.Geometry + { + get => Geometry; + set => throw new NotSupportedException("Setting geometry on a shapefile reader is not supported!"); + } + + Envelope IFeature.BoundingBox + { + get => BoundingBox; + set => throw new InvalidOperationException("Setting BoundingBox not allowed for Shapefile feature"); + } + + IAttributesTable IFeature.Attributes + { + get => Attributes; + set => throw new NotSupportedException("Setting attributes on a shapefile reader is not supported!"); + } + + void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) + { + if (info is null) + { + throw new ArgumentNullException(nameof(info)); + } + + info.AddValue("Geometry", Geometry); + info.AddValue("Attributes", Attributes); + info.AddValue("FeatureId", FeatureId); + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/ShapeDataReader.cs b/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/ShapeDataReader.cs new file mode 100644 index 0000000..6431024 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/ShapeDataReader.cs @@ -0,0 +1,195 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using NetTopologySuite.Geometries; +using NetTopologySuite.Index; +using NetTopologySuite.Index.Strtree; +using NetTopologySuite.IO.Handlers; +using NetTopologySuite.IO.ShapeFile.Extended.Entities; +using NetTopologySuite.IO.Streams; + +namespace NetTopologySuite.IO.ShapeFile.Extended +{ + public class ShapeDataReader : IDisposable + { + private const string DBF_EXT = ".dbf"; + + private readonly ISpatialIndex m_SpatialIndex; + private readonly Task m_IndexCreationTask; + private bool m_IsIndexingComplete; + private readonly CancellationTokenSource m_CancellationTokenSrc; + private readonly DbaseReader m_DbfReader; + private readonly GeometryFactory m_GeoFactory; + private readonly ShapeReader m_ShapeReader; + + public ShapeDataReader(string shapeFilePath, ISpatialIndex index, GeometryFactory geoFactory, bool buildIndexAsync) + :this(new StreamProviderRegistry(shapeFilePath, true, true), index, geoFactory, buildIndexAsync) + { + } + + public ShapeDataReader(StreamProviderRegistry streamProviderRegistry , ISpatialIndex index, GeometryFactory geoFactory, bool buildIndexAsync) + { + + m_SpatialIndex = index; + m_GeoFactory = geoFactory; + + ValidateParameters(); + + m_ShapeReader = new ShapeReader(streamProviderRegistry); + + if (buildIndexAsync) + { + m_CancellationTokenSrc = new CancellationTokenSource(); + m_IndexCreationTask = Task.Factory.StartNew(FillSpatialIndex, m_CancellationTokenSrc.Token); + } + else + { + FillSpatialIndex(); + } + + m_DbfReader = new DbaseReader(streamProviderRegistry.DataStream); + } + + public ShapeDataReader(string shapeFilePath, ISpatialIndex index, GeometryFactory geoFactory) + : this(shapeFilePath, index, geoFactory, true) + { } + + public ShapeDataReader(string shapeFilePath, ISpatialIndex index) + : this(shapeFilePath, index, new GeometryFactory()) + { } + + public ShapeDataReader(string shapeFilePath) + : this(shapeFilePath, new STRtree()) + { } + + ~ShapeDataReader() + { + Dispose(false); + } + + public Envelope ShapefileBounds => m_ShapeReader.ShapefileHeader.Bounds; + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + /// + /// Query shapefile by MBR. + /// MBR coordinates MUST be in the Shapefile's coordinate system. + /// + /// NOTE: If you are using the default ISpatialIndex (which is an instance of the STRtree NTS class), it has some limitations. + /// Since it works with MBRs rather than the shapes themselves, you can get some shapes that are not actually in the MBR + /// you provided just because their MBRs are bounded by the given envelope. + /// If you wish to avoid this behaviour, send true in the second paramter, but be weary of the consequences listed below. + /// + /// The envlope to query. + /// + /// False by default, true to double-check the returned geometries against given Envelope, to avoid index error margin. + /// + /// It is advisable that you implement your own ISpatialIndex with your required precision rather than set this to True. + /// + /// ********** + /// CAUTION: If you choose to set this parameter as True, it will greatly affect performance as it + /// will cancel any lazy mechanism implemented with reading the geometries from the file. + /// Do not set this to True unless you either: + /// A. Do not have any performance restrictions. + /// Or: + /// B. Absolutely need that precision in the geographic query. + /// ********** + /// + /// + public IEnumerable ReadByMBRFilter(Envelope envelope, bool testGeometriesActuallyInMBR = false) + { + if (envelope == null) + { + throw new ArgumentNullException("envelope"); + } + + // If index creation task wasnt completed, wait for it to complete. + if (!m_IsIndexingComplete) + { + m_IndexCreationTask.Wait(); + } + + var shapesInRegion = m_SpatialIndex.Query(envelope); + + if (shapesInRegion.Count == 0) + { + return Enumerable.Empty(); + } + + var results = shapesInRegion.Select(ReadFeature); + + if (!testGeometriesActuallyInMBR) + { + return results; + } + else + { + var envelopeGeo = new GeometryFactory().ToGeometry(envelope); + + return results.Where(feature => envelopeGeo.Intersects(feature.Geometry)); + } + } + + private IShapefileFeature ReadFeature(ShapeLocationInFileInfo shapeLocationInfo) + { + return new ShapefileFeature(m_ShapeReader, m_DbfReader, shapeLocationInfo, m_GeoFactory); + } + + /// + /// Check validity of parameters - null values and that all file needed to read shapes exist. + /// + private void ValidateParameters() + { + if (m_SpatialIndex == null) + { + throw new ArgumentNullException("index"); + } + if (m_GeoFactory == null) + { + throw new ArgumentNullException("GeoFactory"); + } + } + + private void FillSpatialIndex() + { + bool isAsync = m_CancellationTokenSrc != null; + + foreach (var mbrInfo in m_ShapeReader.ReadMBRs()) + { + if (isAsync && m_CancellationTokenSrc.IsCancellationRequested) + { + break; + } + + m_SpatialIndex.Insert(mbrInfo.ShapeMBR, mbrInfo.ShapeFileDetails); + } + + m_IsIndexingComplete = true; + } + + private void Dispose(bool disposing) + { + if (m_CancellationTokenSrc != null) + { + m_CancellationTokenSrc.Cancel(); + } + + if (m_DbfReader != null) + { + m_DbfReader.Dispose(); + } + + if (m_ShapeReader != null) + { + m_ShapeReader.Dispose(); + } + + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/ShapeReader.cs b/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/ShapeReader.cs new file mode 100644 index 0000000..70bb16f --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/ShapeFile.Extended/ShapeReader.cs @@ -0,0 +1,203 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading; +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Handlers; +using NetTopologySuite.IO.Streams; + +namespace NetTopologySuite.IO.ShapeFile.Extended +{ + /// + /// A class to read from a set of files forming the ESRI-Shapefile + /// + public class ShapeReader : IDisposable + { + const long HEADER_LENGTH = 100; + + private BigEndianBinaryReader m_ShapeFileReader; + + //private readonly string m_ShapeFilePath; + private readonly StreamProviderRegistry m_StreamProviderRegistry; + private readonly ShapeHandler m_ShapeHandler; + private readonly Lazy m_ShapeOffsetCache; + private bool m_IsDisposed; + + /// + /// Creates an instance of this class to read from the Shapefile set of files defined by + /// + /// The path to the Shapefile + public ShapeReader(string shapefilePath) : this(new StreamProviderRegistry(shapefilePath, true)) + { + } + + public ShapeReader(StreamProviderRegistry streamProviderRegistry) + { + if (streamProviderRegistry == null) + throw new ArgumentNullException("streamProviderRegistry"); + + m_StreamProviderRegistry = streamProviderRegistry; + + ShapefileHeader = new ShapefileHeader(ShapeReaderStream); + m_ShapeHandler = Shapefile.GetShapeHandler(ShapefileHeader.ShapeType); + + m_ShapeOffsetCache = new Lazy(BuildOffsetCache, LazyThreadSafetyMode.ExecutionAndPublication); + + } + + /// + /// Finalizer + /// + ~ShapeReader() + { + Dispose(false); + } + + /// + /// Dispose method + /// + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (m_IsDisposed) + { + return; + } + + m_IsDisposed = true; + CloseShapeFileHandle(); + } + /// + /// Gets a value indicating the header of the main Shapefile (*.shp) + /// + public ShapefileHeader ShapefileHeader { get; } + + private BigEndianBinaryReader ShapeReaderStream + { + get + { + ThrowIfDisposed(); + + if (m_ShapeFileReader == null) + { + lock (m_StreamProviderRegistry) + { + if (m_ShapeFileReader == null) + { + m_ShapeFileReader = new BigEndianBinaryReader(m_StreamProviderRegistry.ShapeStream.OpenRead()); + } + } + } + + return m_ShapeFileReader; + } + } + + /// + /// Function to read the bounding boxes of all geometries in the Shapefile (*.shp) + /// + /// An enumeration of bounding boxes + public IEnumerable ReadMBRs() + { + ThrowIfDisposed(); + + lock (m_StreamProviderRegistry) + { + var newReader = new BigEndianBinaryReader(m_StreamProviderRegistry.ShapeStream.OpenRead()); + return m_ShapeHandler.ReadMBRs(newReader); + } + } + + public IEnumerable ReadAllShapes(GeometryFactory geoFactory) + { + ThrowIfDisposed(); + + if (geoFactory == null) + { + throw new ArgumentNullException("geoFactory"); + } + + return m_ShapeOffsetCache.Value.Select(offset => ReadShapeAtOffset(offset, geoFactory)); + } + + public Geometry ReadShapeAtIndex(int index, GeometryFactory geoFactory) + { + ThrowIfDisposed(); + + if (geoFactory == null) + { + throw new ArgumentNullException("geoFactory"); + } + + if (index < 0 || index >= m_ShapeOffsetCache.Value.Length) + { + throw new ArgumentOutOfRangeException("index"); + } + + return ReadShapeAtOffset(m_ShapeOffsetCache.Value[index], geoFactory); + } + + /// + /// Read shape at a given offset. + /// + /// The offset at which the requested shape metadata begins. + /// + /// + public Geometry ReadShapeAtOffset(long shapeOffset, GeometryFactory geoFactory) + { + Geometry currGeomtry = null; + ThrowIfDisposed(); + + if (shapeOffset < HEADER_LENGTH || shapeOffset >= ShapeReaderStream.BaseStream.Length) + { + throw new IndexOutOfRangeException("Shape offset cannot be lower than header length (100) or higher than shape file size"); + } + + lock (ShapeReaderStream) + { + // Skip to shape size location in file. + ShapeReaderStream.BaseStream.Seek(shapeOffset + 4, SeekOrigin.Begin); + + int currShapeLengthInWords = ShapeReaderStream.ReadInt32BE(); + + currGeomtry = m_ShapeHandler.Read(ShapeReaderStream, currShapeLengthInWords, geoFactory); + } + + return currGeomtry; + } + + private long[] BuildOffsetCache() + { + using (var shapeFileReader = new BigEndianBinaryReader(m_StreamProviderRegistry.ShapeStream.OpenRead())) + { + return m_ShapeHandler.ReadMBRs(shapeFileReader) + .Select(mbrInfo => mbrInfo.ShapeFileDetails.OffsetFromStartOfFile) + .ToArray(); + } + + } + + private void ThrowIfDisposed() + { + if (m_IsDisposed) + { + throw new InvalidOperationException("Cannot use a disposed ShapeReader"); + } + } + + private void CloseShapeFileHandle() + { + if (m_ShapeFileReader != null) + { + m_ShapeFileReader.Close(); + m_ShapeFileReader = null; + } + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/Writers/ShapefileMultiPointWriter.cs b/src/NetTopologySuite.IO.Esri/Writers/ShapefileMultiPointWriter.cs new file mode 100644 index 0000000..b77214f --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/Writers/ShapefileMultiPointWriter.cs @@ -0,0 +1,54 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile +{ + + /// + /// MultiPoint shapefile writer. + /// + public class ShapefileMultiPointWriter : ShapefileWriter + { + /// + public ShapefileMultiPointWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding, string projection) + : base(shpPath, type, fields, encoding, projection) + { } + + /// + public ShapefileMultiPointWriter(string shpPath, ShapeType type, params DbfField[] fields) + : base(shpPath, type, fields) + { } + + + internal override Core.ShapefileWriter CreateWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding, string projection) + { + return new Core.ShapefileMultiPointWriter(shpPath, type, fields, encoding, projection); + } + + internal override void GetShape(Geometry geometry, Core.ShpShapeBuilder shape) + { + shape.Clear();; + if (geometry is MultiPoint multiPoint) + { + shape.StartNewPart(); + AddMultiPoint(shape, multiPoint); + } + else + { + throw GetUnsupportedGeometryTypeException(geometry); + } + } + + + private void AddMultiPoint(Core.ShpShapeBuilder shape, MultiPoint multiPoint) + { + for (int i = 0; i < multiPoint.NumGeometries; i++) + { + shape.AddPoint(multiPoint.GetGeometryN(i) as Point); + } + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/Writers/ShapefilePointWriter.cs b/src/NetTopologySuite.IO.Esri/Writers/ShapefilePointWriter.cs new file mode 100644 index 0000000..c3ca4d9 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/Writers/ShapefilePointWriter.cs @@ -0,0 +1,51 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile +{ + + /// + /// Point shapefile writer. + /// + public class ShapefilePointWriter : ShapefileWriter + { + /// + public ShapefilePointWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding = null, string projection = null) + : base(shpPath, type, fields, encoding, projection) + { } + + /// + public ShapefilePointWriter(string shpPath, ShapeType type, params DbfField[] fields) + : base(shpPath, type, fields) + { } + + + internal override Core.ShapefileWriter CreateWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding, string projection) + { + return new Core.ShapefilePointWriter(shpPath, type, fields, encoding, projection); + } + + internal override void GetShape(Geometry geometry, Core.ShpShapeBuilder shape) + { + shape.Clear(); + if (geometry is Point point) + { + shape.StartNewPart(); + AddPoint(shape, point); + } + else + { + throw GetUnsupportedGeometryTypeException(geometry); + } + } + + + private void AddPoint(Core.ShpShapeBuilder shape, Point point) + { + shape.AddPoint(point); + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/Writers/ShapefilePolyLineWriter.cs b/src/NetTopologySuite.IO.Esri/Writers/ShapefilePolyLineWriter.cs new file mode 100644 index 0000000..ea8775f --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/Writers/ShapefilePolyLineWriter.cs @@ -0,0 +1,66 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile +{ + + /// + /// MultiLine shapefile writer. + /// + public class ShapefilePolyLineWriter : ShapefileWriter + { + /// + public ShapefilePolyLineWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding, string projection) + : base(shpPath, type, fields, encoding, projection) + { } + + /// + public ShapefilePolyLineWriter(string shpPath, ShapeType type, params DbfField[] fields) + : base(shpPath, type, fields) + { } + + + internal override Core.ShapefileWriter CreateWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding, string projection) + { + return new Core.ShapefileMultiPartWriter(shpPath, type, fields, encoding, projection); + } + + internal override void GetShape(Geometry geometry, Core.ShpShapeBuilder shape) + { + shape.Clear(); + if (geometry is MultiLineString multiLine) + { + AddMultiLineString(multiLine, shape); + } + else if (geometry is LineString line) + { + AddLineString(line, shape); + } + else + { + throw GetUnsupportedGeometryTypeException(geometry); + } + } + + + private void AddMultiLineString(MultiLineString multiLine, Core.ShpShapeBuilder shape) + { + for (int i = 0; i < multiLine.NumGeometries; i++) + { + AddLineString(multiLine.GetGeometryN(i) as LineString, shape); + } + } + + + private void AddLineString(LineString line, Core.ShpShapeBuilder shape) + { + if (line == null || line == LineString.Empty) + return; + + shape.AddPart(line.CoordinateSequence); + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/Writers/ShapefilePolygonWriter.cs b/src/NetTopologySuite.IO.Esri/Writers/ShapefilePolygonWriter.cs new file mode 100644 index 0000000..69e7955 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/Writers/ShapefilePolygonWriter.cs @@ -0,0 +1,77 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections.Generic; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile +{ + + /// + /// MultiPolygon shapefile writer. + /// + public class ShapefilePolygonWriter : ShapefileWriter + { + /// + public ShapefilePolygonWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding, string projection) + : base(shpPath, type, fields, encoding, projection) + { } + + /// + public ShapefilePolygonWriter(string shpPath, ShapeType type, params DbfField[] fields) + : base(shpPath, type, fields) + { } + + + internal override Core.ShapefileWriter CreateWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding, string projection) + { + return new Core.ShapefileMultiPartWriter(shpPath, type, fields, encoding, projection); + } + + internal override void GetShape(Geometry geometry, Core.ShpShapeBuilder shape) + { + shape.Clear(); + if (geometry is MultiPolygon multiPolygon) + { + AddMultiPolygon(multiPolygon, shape); + } + else if (geometry is Polygon polygon) + { + AddPolygon(polygon, shape); + } + else + { + throw GetUnsupportedGeometryTypeException(geometry); + } + } + + + private void AddMultiPolygon(MultiPolygon multiPolygon, Core.ShpShapeBuilder shape) + { + shape.Clear(); + + for (int i = 0; i < multiPolygon.NumGeometries; i++) + { + AddPolygon(multiPolygon.GetGeometryN(i) as Polygon, shape); + } + } + + + private void AddPolygon(Polygon polygon, Core.ShpShapeBuilder shape) + { + if (polygon == null || polygon == Polygon.Empty) + return; + + // SHP Spec: Vertices for a single polygon are always in clockwise order. + var shellCoordinates = polygon.Shell.IsCCW ? polygon.Shell.CoordinateSequence.Reversed() : polygon.Shell.CoordinateSequence; + shape.AddPart(shellCoordinates); + + foreach (var hole in polygon.Holes) + { + // SHP Spec: Vertices of rings defining holes in polygons are in a counterclockwise direction. + var holeCoordinates = hole.IsCCW ? hole.CoordinateSequence : hole.CoordinateSequence.Reversed(); + shape.AddPart(holeCoordinates); + } + } + } +} diff --git a/src/NetTopologySuite.IO.Esri/Writers/ShapefileWriter.cs b/src/NetTopologySuite.IO.Esri/Writers/ShapefileWriter.cs new file mode 100644 index 0000000..9101c99 --- /dev/null +++ b/src/NetTopologySuite.IO.Esri/Writers/ShapefileWriter.cs @@ -0,0 +1,166 @@ +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; + +namespace NetTopologySuite.IO.Shapefile +{ + /// + /// Base class for writing a shapefile. + /// + public abstract class ShapefileWriter : Shapefile + { + private readonly Core.ShapefileWriter Writer; + + + /// + /// Initializes a new instance of the writer class. + /// + /// Path to SHP file. + /// Shape type. + /// Shapefile attribute definitions. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + /// Projection metadata for the shapefile (.prj file). + internal ShapefileWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding, string projection) + { + try + { + Writer = CreateWriter(shpPath, type, fields, encoding, projection); + ShapeType = type; + } + catch + { + DisposeManagedResources(); + throw; + } + } + + + /// + /// Initializes a new instance of the writer class. + /// + /// Path to SHP file. + /// Shape type. + /// Shapefile attribute definitions. + internal ShapefileWriter(string shpPath, ShapeType type, params DbfField[] fields) : this(shpPath, type, fields, null, null) + { + } + + internal abstract Core.ShapefileWriter CreateWriter(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding, string projection); + + internal abstract void GetShape(Geometry geometry, Core.ShpShapeBuilder shape); + + /// + public override ShapeType ShapeType { get; } = ShapeType.NullShape; + + /// + public override DbfFieldCollection Fields => Writer.Fields; + + + /// + /// Writes geometry and feature attributes to underlying SHP and DBF files. + /// + public void Write(Geometry geometry, IAttributesTable attributes) + { + if (geometry != null && !geometry.IsEmpty) + { + GetShape(geometry, Writer.Shape); + } + else + { + Writer.Shape.Clear(); + } + + foreach (var field in Writer.Fields) + { + field.Value = attributes[field.Name]; + } + Writer.Write(); + } + + + /// + /// Writes geometry and feature attributes to underlying SHP and DBF files. + /// + public void Write(IFeature feature) + { + Write(feature.Geometry, feature.Attributes); + } + + + /// + /// Writes geometry and feature attributes to underlying SHP and DBF files. + /// + public void Write(IEnumerable features) + { + foreach (var feature in features) + { + Write(feature); + } + } + + + internal Exception GetUnsupportedGeometryTypeException(Geometry geometry) + { + return new ArgumentException(GetType().Name + " does not support " + geometry.GeometryType + " geometry."); + } + + + /// + protected override void DisposeManagedResources() + { + Writer?.Dispose(); + base.DisposeManagedResources(); // This will dispose streams used by ShpWriter and DbfWriter. Do it at the end. + } + + + /// + /// Opens shapefile writer. + /// + /// Path to shapefile. + /// Shape type. + /// Shapefile fields definitions. + /// DBF file encoding. If null encoding will be guess from related .CPG file or from reserved DBF bytes. + /// Projection metadata for the shapefile (.prj file). + /// Shapefile writer. + public static ShapefileWriter Open(string shpPath, ShapeType type, IReadOnlyList fields, Encoding encoding = null, string projection = null) + { + if (type.IsPoint()) + { + return new ShapefilePointWriter(shpPath, type, fields, encoding, projection); + } + else if (type.IsMultiPoint()) + { + return new ShapefileMultiPointWriter(shpPath, type, fields, encoding, projection); + } + else if (type.IsPolyLine()) + { + return new ShapefilePolyLineWriter(shpPath, type, fields, encoding, projection); + } + else if (type.IsPolygon()) + { + return new ShapefilePolygonWriter(shpPath, type, fields, encoding, projection); + } + else + { + throw new FileLoadException("Unsupported shapefile type: " + type, shpPath); + } + } + + /// + /// Opens shapefile writer. + /// + /// Path to shapefile. + /// Shape type. + /// Shapefile fields definitions. + /// Shapefile writer. + public static ShapefileWriter Open(string shpPath, ShapeType type, params DbfField[] fields) + { + return Open(shpPath, type, fields, null, null); + } + } +} diff --git a/src/NetTopologySuite.IO.ShapefileNG/Internal/GeneralIOHelpers.cs b/src/NetTopologySuite.IO.ShapefileNG/Internal/GeneralIOHelpers.cs new file mode 100644 index 0000000..f612fa5 --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/Internal/GeneralIOHelpers.cs @@ -0,0 +1,36 @@ +using System; +using System.IO; +using System.Runtime.CompilerServices; + +namespace NetTopologySuite.IO.Internal +{ + internal static class GeneralIOHelpers + { + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void FillBufferOrThrow(Stream stream, byte[] buffer, int offset, int count) + { + // optimize for the overwhelmingly most likely path: one read does the trick. + int bytesRead = stream.Read(buffer, offset, count); + if (bytesRead != count) + { + FillBufferOrThrowRare(stream, buffer, offset, count, bytesRead); + } + } + + private static void FillBufferOrThrowRare(Stream stream, byte[] buffer, int offset, int count, int prevBytesRead) + { + while (prevBytesRead != 0) + { + offset += prevBytesRead; + count -= prevBytesRead; + prevBytesRead = stream.Read(buffer, offset, count); + if (prevBytesRead == count) + { + return; + } + } + + throw new EndOfStreamException(); + } + } +} diff --git a/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefileFormatForwardOnlyReader.cs b/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefileFormatForwardOnlyReader.cs new file mode 100644 index 0000000..e544311 --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefileFormatForwardOnlyReader.cs @@ -0,0 +1,53 @@ +using System; +using System.IO; +using System.Runtime.InteropServices; + +namespace NetTopologySuite.IO.Internal +{ + internal class ShapefileFormatForwardOnlyReader + { + private readonly Stream _forwardOnlyReadableStream; + + private readonly byte[] _oneHundredByteBuffer = new byte[100]; + + public ShapefileFormatForwardOnlyReader(Stream forwardOnlyReadableStream) + { + _forwardOnlyReadableStream = forwardOnlyReadableStream; + } + + public ShapefileHeaderNG ReadHeader() + { + byte[] scratchBuffer = _oneHundredByteBuffer; + GeneralIOHelpers.FillBufferOrThrow(_forwardOnlyReadableStream, scratchBuffer, 0, 100); + + ReadOnlySpan headerBytes = scratchBuffer; + + // TODO: This will lead to having different values on different machines. + // On mashines with BitConverter.IsLittleEndian == true this will give LittleEndian decoded values (int, ShapeTypeNG and double). + // On mashines with BitConverter.IsLittleEndian == false those values will be BigEndian decoded. + int bigEndianFileLengthInWords = MemoryMarshal.Read(headerBytes.Slice(24, 4)); // The value name is misleading. It should be littleEndianFileLengthInWords. It is converted to bigEndian in ShapefileHeaderNG() constructor. + var shapeType = MemoryMarshal.Read(headerBytes.Slice(32, 4)); + var boundingBox = MemoryMarshal.Cast(headerBytes.Slice(36)); + return new ShapefileHeaderNG(bigEndianFileLengthInWords, shapeType, boundingBox[0], boundingBox[1], boundingBox[2], boundingBox[3], boundingBox[4], boundingBox[5], boundingBox[6], boundingBox[7]); + } + + public ShapefileIndexFileRecordNG ReadIndexFileRecordHeader() + { + byte[] scratchBuffer = _oneHundredByteBuffer; + GeneralIOHelpers.FillBufferOrThrow(_forwardOnlyReadableStream, scratchBuffer, 0, 8); + return MemoryMarshal.Read(scratchBuffer); + } + + public ShapefileMainFileRecordHeaderNG ReadMainFileRecordHeader() + { + byte[] scratchBuffer = _oneHundredByteBuffer; + GeneralIOHelpers.FillBufferOrThrow(_forwardOnlyReadableStream, scratchBuffer, 0, 8); + return MemoryMarshal.Read(scratchBuffer); + } + + public void FillNextRecordContents(byte[] buffer, int offset, int count) + { + GeneralIOHelpers.FillBufferOrThrow(_forwardOnlyReadableStream, buffer, offset, count); + } + } +} diff --git a/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefileFormatSeekableReader.cs b/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefileFormatSeekableReader.cs new file mode 100644 index 0000000..41a15da --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefileFormatSeekableReader.cs @@ -0,0 +1,71 @@ +using System.IO; + +namespace NetTopologySuite.IO.Internal +{ + internal sealed class ShapefileFormatSeekableReader + { + private readonly ShapefileFormatForwardOnlyReader _mainFileReader; + + private readonly Stream _seekableMainFileStream; + + private readonly ShapefileFormatForwardOnlyReader _indexFileReader; + + private readonly Stream _seekableIndexFileStream; + + private int _lastRecordIndex = -1; + + private ShapefileMainFileRecordHeaderNG _lastMainFileRecordHeader; + + public ShapefileFormatSeekableReader(Stream seekableMainFileStream, Stream seekableIndexFileStream) + { + _mainFileReader = new ShapefileFormatForwardOnlyReader(seekableMainFileStream); + _indexFileReader = new ShapefileFormatForwardOnlyReader(seekableIndexFileStream); + + _seekableMainFileStream = seekableMainFileStream; + _seekableIndexFileStream = seekableIndexFileStream; + + seekableMainFileStream.Position = 0; + MainFileHeader = _mainFileReader.ReadHeader(); + if (seekableMainFileStream.Length != MainFileHeader.FileLengthInBytes) + { + throw new InvalidDataException($"Main file stream is {seekableMainFileStream.Length} bytes long, but the header indicates that it should be {MainFileHeader.FileLengthInBytes} bytes long."); + } + + seekableIndexFileStream.Position = 0; + IndexFileHeader = _indexFileReader.ReadHeader(); + if (seekableIndexFileStream.Length != IndexFileHeader.FileLengthInBytes) + { + throw new InvalidDataException($"Index file stream is {seekableIndexFileStream.Length} bytes long, but the header indicates that it should be {IndexFileHeader.FileLengthInBytes} bytes long."); + } + + RecordCount = (IndexFileHeader.FileLengthInBytes - 100) / 8; + } + + public ShapefileHeaderNG MainFileHeader { get; } + + public ShapefileHeaderNG IndexFileHeader { get; } + + public int RecordCount { get; } + + public ShapefileMainFileRecordHeaderNG GetMainFileRecordHeaderNG(int index) + { + if (index == _lastRecordIndex) + { + return _lastMainFileRecordHeader; + } + + _seekableIndexFileStream.Position = 100 + (index * 8); + var indexFileRecordHeader = _indexFileReader.ReadIndexFileRecordHeader(); + + _seekableMainFileStream.Position = indexFileRecordHeader.RecordHeaderOffsetInBytes; + var result = _mainFileReader.ReadMainFileRecordHeader(); + if (result.RecordNumber != index + 1) + { + throw new InvalidDataException($"Index file is inconsistent with the main file: index record #{index + 1} points to main file record #{result.RecordNumber}."); + } + + _lastRecordIndex = index; + return _lastMainFileRecordHeader = result; + } + } +} diff --git a/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefileFormatSpanReader.cs b/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefileFormatSpanReader.cs new file mode 100644 index 0000000..bdbe3ef --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefileFormatSpanReader.cs @@ -0,0 +1,135 @@ +using System; +using System.IO; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +namespace NetTopologySuite.IO.Internal +{ + internal readonly ref struct ShapefileFormatSpanReader + { + private readonly ReadOnlySpan _mainFile; + + private readonly ReadOnlySpan _indexFile; + + public ShapefileFormatSpanReader(ReadOnlySpan mainFile, ReadOnlySpan indexFile) + { + _mainFile = mainFile; + _indexFile = indexFile; + } + + public readonly void ValidateFileStructure() + { + if (_mainFile.Length < Unsafe.SizeOf()) + { + throw new InvalidDataException("Main file must be at least 100 bytes long."); + } + + ref readonly var mainFileHeader = ref MainFileHeader; + + if (_indexFile.Length < Unsafe.SizeOf()) + { + throw new InvalidDataException("Index file must be at least 100 bytes long."); + } + + ref readonly var indexFileHeader = ref IndexFileHeader; + + // it's fine if the header indicates a SMALLER length than what we were created with, + // since that just means that the caller didn't slice off the end. not a huge deal. + if (mainFileHeader.FileLengthInBytes > _mainFile.Length) + { + throw new InvalidDataException("Main file's header thinks that the file is bigger than the size we were created with."); + } + + // avoid repeated 32 --> 64 conversions by doing this once + long mainFileLength = mainFileHeader.FileLengthInBytes; + + if (indexFileHeader.FileLengthInBytes > _indexFile.Length) + { + throw new InvalidDataException("Index file's header thinks that the file is bigger than the size we were created with."); + } + + if (indexFileHeader.FileLengthInBytes != (RecordCount * 8) + Unsafe.SizeOf()) + { + throw new InvalidDataException("Index file must contain exactly 8 bytes per record after the 100-byte header."); + } + + int nextRecordNumber = 1; + var fileShapeType = mainFileHeader.ShapeType; + foreach (ref readonly var indexRecord in MemoryMarshal.Cast(_indexFile.Slice(Unsafe.SizeOf()))) + { + // work in longs here, since invalid data could otherwise cause overflow. + long recordHeaderOffset = ShapefilePrimitiveHelpers.SwapByteOrderOnLittleEndianMachines(indexRecord.BigEndianRecordHeaderOffsetInWords) * 2L; + if (recordHeaderOffset < Unsafe.SizeOf()) + { + throw new InvalidDataException("All records in the main file must begin after the 100-byte header."); + } + + long recordContentLengthInBytesFromIndex = ShapefilePrimitiveHelpers.SwapByteOrderOnLittleEndianMachines(indexRecord.BigEndianRecordContentLengthInWords) * 2L; + if (recordContentLengthInBytesFromIndex < Unsafe.SizeOf()) + { + throw new InvalidDataException("All records in the main file be at least 4 bytes long."); + } + + long recordContentByteOffset = recordHeaderOffset + Unsafe.SizeOf(); + long recordEndByteOffset = recordContentByteOffset + recordContentLengthInBytesFromIndex; + if (mainFileLength < recordEndByteOffset) + { + throw new InvalidDataException("The index file identifies one or more records that would extend beyond the end of the main file."); + } + + var recordHeader = MemoryMarshal.Read(_mainFile.Slice((int)recordHeaderOffset)); + long recordContentLengthInBytesFromMainFile = ShapefilePrimitiveHelpers.SwapByteOrderOnLittleEndianMachines(recordHeader.BigEndianRecordContentLengthInWords) * 2L; + if (recordContentLengthInBytesFromIndex != recordContentLengthInBytesFromMainFile) + { + throw new InvalidDataException("The index file disagrees with main file about the size of one or more records."); + } + + if (recordHeader.RecordNumber != nextRecordNumber++) + { + throw new InvalidDataException("Records in the main file are out of order."); + } + + var recordShapeType = MemoryMarshal.Read(_mainFile.Slice((int)recordContentByteOffset)); + if (recordShapeType != fileShapeType) + { + throw new InvalidDataException("Shape type for all records in the main file must match shape type from its header."); + } + } + } + + public ref readonly ShapefileHeaderStruct MainFileHeader + { + get + { + return ref Unsafe.As(ref Unsafe.AsRef(in MemoryMarshal.GetReference(_mainFile))); + } + } + + public ref readonly ShapefileHeaderStruct IndexFileHeader + { + get + { + return ref Unsafe.As(ref Unsafe.AsRef(in MemoryMarshal.GetReference(_indexFile))); + } + } + + public int RecordCount + { + get + { + return (IndexFileHeader.FileLengthInBytes - Unsafe.SizeOf()) / Unsafe.SizeOf(); + } + } + + public ReadOnlySpan GetInnerRecordContents(int recordIndex) + { + var indexRecord = MemoryMarshal.Read(_indexFile.Slice(100 + (recordIndex * 8))); + int recordContentsStart = indexRecord.RecordHeaderOffsetInBytes + Unsafe.SizeOf(); + int recordContentLength = indexRecord.RecordContentLengthInBytes; + var recordContents = _mainFile.Slice(recordContentsStart, recordContentLength); + + // "inner" record contents = everything after the ShapeType + return recordContents.Slice(Unsafe.SizeOf()); + } + } +} diff --git a/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefileHeaderStruct.cs b/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefileHeaderStruct.cs new file mode 100644 index 0000000..7e0b3db --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefileHeaderStruct.cs @@ -0,0 +1,70 @@ +using System.Runtime.InteropServices; + +namespace NetTopologySuite.IO.Internal +{ + [StructLayout(LayoutKind.Explicit, Size = 100, Pack = 1)] + internal struct ShapefileHeaderStruct + { + private static readonly int BigEndianFileCode = ShapefilePrimitiveHelpers.SwapByteOrderOnLittleEndianMachines(9994); + + [FieldOffset(0)] + private readonly int _fileCode; + + [FieldOffset(28)] + private readonly int _version; + + [FieldOffset(24)] + public int BigEndianFileLengthInWords; + + [FieldOffset(32)] + public ShapeTypeNG ShapeType; + + [FieldOffset(36)] + public double MinX; + + [FieldOffset(44)] + public double MinY; + + [FieldOffset(52)] + public double MaxX; + + [FieldOffset(60)] + public double MaxY; + + [FieldOffset(68)] + public double MinZ; + + [FieldOffset(76)] + public double MaxZ; + + [FieldOffset(84)] + public double MinM; + + [FieldOffset(92)] + public double MaxM; + + public ShapefileHeaderStruct(int bigEndianFileLengthInWords, ShapeTypeNG shapeType, double minX, double minY, double maxX, double maxY, double minZ, double maxZ, double minM, double maxM) + { + this = default; + _fileCode = BigEndianFileCode; + _version = 1000; + + BigEndianFileLengthInWords = bigEndianFileLengthInWords; + ShapeType = shapeType; + MinX = minX; + MinY = minY; + MaxX = maxX; + MaxY = maxY; + MinZ = minZ; + MaxZ = maxZ; + MinM = minM; + MaxM = maxM; + } + + public int FileLengthInBytes + { + get => ShapefilePrimitiveHelpers.BigEndianWordCountToNativeByteCount(BigEndianFileLengthInWords); + set => BigEndianFileLengthInWords = ShapefilePrimitiveHelpers.NativeByteCountToBigEndianWordCount(value); + } + } +} \ No newline at end of file diff --git a/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefilePrimitiveHelpers.cs b/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefilePrimitiveHelpers.cs new file mode 100644 index 0000000..5d5dd0b --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/Internal/ShapefilePrimitiveHelpers.cs @@ -0,0 +1,56 @@ +using System; +using System.Buffers.Binary; +using System.Runtime.CompilerServices; + +namespace NetTopologySuite.IO.Internal +{ + internal static class ShapefilePrimitiveHelpers + { + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ValidateLittleEndianMachine() + { + if (!BitConverter.IsLittleEndian) + { + ThrowForBigEndianMachine(); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static T SwapByteOrderOnLittleEndianMachines(T val) + where T : unmanaged + { + ValidateLittleEndianMachine(); + if (typeof(T) == typeof(int) || typeof(T) == typeof(ShapeTypeNG)) + { + ref int s32 = ref Unsafe.As(ref val); + s32 = BinaryPrimitives.ReverseEndianness(s32); + } + else if (typeof(T) == typeof(long) || typeof(T) == typeof(double)) + { + ref long s64 = ref Unsafe.As(ref val); + s64 = BinaryPrimitives.ReverseEndianness(s64); + } + + return val; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int BigEndianWordCountToNativeByteCount(int val) + { + val = SwapByteOrderOnLittleEndianMachines(val); + return checked(val * 2); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int NativeByteCountToBigEndianWordCount(int val) + { + return SwapByteOrderOnLittleEndianMachines(val / 2); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + private static void ThrowForBigEndianMachine() + { + throw new NotImplementedException("Support for big-endian machine architectures has not yet been implemented."); + } + } +} diff --git a/src/NetTopologySuite.IO.ShapefileNG/NetTopologySuite.IO.ShapefileNG.csproj b/src/NetTopologySuite.IO.ShapefileNG/NetTopologySuite.IO.ShapefileNG.csproj new file mode 100644 index 0000000..4d03d74 --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/NetTopologySuite.IO.ShapefileNG.csproj @@ -0,0 +1,38 @@ + + + + $(ProjectDir)..\ + netstandard2.0 + NetTopologySuite.IO + true + + + 8.0 + + + CS1591 + + + + NetTopologySuite.IO.ShapefileNG + + + + NetTopologySuite.IO.ShapefileNG + NTS - Topology Suite - Shapefile IO Libraries - Next Generation + NetTopologySuite - Team + NetTopologySuite - Team + BSD-3-Clause + This package contains several IO libraries to cope with spatial data from various sources. + NTS;Shapefile + + + + + + + + + + diff --git a/src/NetTopologySuite.IO.ShapefileNG/ShapeRecords/MultiPointXYRecordNG.cs b/src/NetTopologySuite.IO.ShapefileNG/ShapeRecords/MultiPointXYRecordNG.cs new file mode 100644 index 0000000..41613a4 --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/ShapeRecords/MultiPointXYRecordNG.cs @@ -0,0 +1,30 @@ +using System; + +namespace NetTopologySuite.IO.ShapeRecords +{ + public readonly ref struct MultiPointXYRecordNG + { + public readonly double MinX; + + public readonly double MinY; + + public readonly double MaxX; + + public readonly double MaxY; + + public readonly ReadOnlySpan Points; + + public MultiPointXYRecordNG(double minX, double minY, double maxX, double maxY, ReadOnlySpan points) + { + MinX = minX; + MinY = minY; + MaxX = maxX; + MaxY = maxY; + + // https://docs.microsoft.com/en-us/dotnet/api/system.readonlyspan-1?view=net-5.0 + // ReadOnlySpan is a ref struct that is allocated on the stack and can never escape to the managed heap. + // + Points = points; + } + } +} diff --git a/src/NetTopologySuite.IO.ShapefileNG/ShapeRecords/PointXYRecordNG.cs b/src/NetTopologySuite.IO.ShapefileNG/ShapeRecords/PointXYRecordNG.cs new file mode 100644 index 0000000..0b2dfa4 --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/ShapeRecords/PointXYRecordNG.cs @@ -0,0 +1,31 @@ +using System; +using System.Runtime.InteropServices; + +namespace NetTopologySuite.IO.ShapeRecords +{ + [StructLayout(LayoutKind.Sequential)] + public readonly struct PointXYRecordNG : IEquatable + { + public readonly double X; + + public readonly double Y; + + public PointXYRecordNG(double x, double y) + { + X = x; + Y = y; + } + + public override bool Equals(object obj) + => obj is PointXYRecordNG other && Equals(other); + + public bool Equals(PointXYRecordNG other) + => X.Equals(other.X) && Y.Equals(other.Y); + + public override int GetHashCode() + => (X, Y).GetHashCode(); + + public override string ToString() + => $"({X}, {Y})"; + } +} diff --git a/src/NetTopologySuite.IO.ShapefileNG/ShapeTypeNG.cs b/src/NetTopologySuite.IO.ShapefileNG/ShapeTypeNG.cs new file mode 100644 index 0000000..f320764 --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/ShapeTypeNG.cs @@ -0,0 +1,33 @@ +namespace NetTopologySuite.IO +{ + public enum ShapeTypeNG + { + NullShape = 0, + + Point = 1, + + PolyLine = 3, + + Polygon = 5, + + MultiPoint = 8, + + PointZ = 11, + + PolyLineZ = 13, + + PolygonZ = 15, + + MultiPointZ = 18, + + PointM = 21, + + PolyLineM = 23, + + PolygonM = 25, + + MultiPointM = 28, + + MultiPatch = 31, + } +} diff --git a/src/NetTopologySuite.IO.ShapefileNG/ShapefileHeaderNG.cs b/src/NetTopologySuite.IO.ShapefileNG/ShapefileHeaderNG.cs new file mode 100644 index 0000000..547e5b4 --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/ShapefileHeaderNG.cs @@ -0,0 +1,51 @@ +using NetTopologySuite.Geometries; + +namespace NetTopologySuite.IO +{ + using Internal; + + public sealed class ShapefileHeaderNG + { + private ShapefileHeaderStruct _data; + + public ShapefileHeaderNG(int fileLengthInBytes, ShapeTypeNG shapeType, Envelope boundingBox) + : this(fileLengthInBytes, shapeType, boundingBox.MinX, boundingBox.MinY, boundingBox.MaxX, boundingBox.MaxY, double.NaN, double.NaN, double.NaN, double.NaN) + { + } + + public ShapefileHeaderNG(int fileLengthInBytes, ShapeTypeNG shapeType, double minX, double minY, double maxX, double maxY, double minZ, double maxZ, double minM, double maxM) + { + int bigEndianFileLengthInWords = ShapefilePrimitiveHelpers.NativeByteCountToBigEndianWordCount(fileLengthInBytes); + _data = new ShapefileHeaderStruct(bigEndianFileLengthInWords, shapeType, minX, minY, maxX, maxY, minZ, maxZ, minM, maxM); + } + + internal ShapefileHeaderNG(in ShapefileHeaderStruct data) + { + _data = data; + } + + public ref int BigEndianFileLengthInWords => ref _data.BigEndianFileLengthInWords; + + public int FileLengthInBytes => _data.FileLengthInBytes; + + public ref ShapeTypeNG ShapeType => ref _data.ShapeType; + + public ref double MinX => ref _data.MinX; + + public ref double MinY => ref _data.MinY; + + public ref double MaxX => ref _data.MaxX; + + public ref double MaxY => ref _data.MaxY; + + public ref double MinZ => ref _data.MinZ; + + public ref double MaxZ => ref _data.MaxZ; + + public ref double MinM => ref _data.MinM; + + public ref double MaxM => ref _data.MaxM; + + internal ref ShapefileHeaderStruct Data => ref _data; + } +} diff --git a/src/NetTopologySuite.IO.ShapefileNG/ShapefileIndexFileRecordNG.cs b/src/NetTopologySuite.IO.ShapefileNG/ShapefileIndexFileRecordNG.cs new file mode 100644 index 0000000..e661b14 --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/ShapefileIndexFileRecordNG.cs @@ -0,0 +1,26 @@ +using System.Runtime.InteropServices; + +namespace NetTopologySuite.IO +{ + using Internal; + + [StructLayout(LayoutKind.Sequential)] + public struct ShapefileIndexFileRecordNG + { + public int BigEndianRecordHeaderOffsetInWords; + + public int BigEndianRecordContentLengthInWords; + + public int RecordHeaderOffsetInBytes + { + get => ShapefilePrimitiveHelpers.BigEndianWordCountToNativeByteCount(BigEndianRecordHeaderOffsetInWords); + set => BigEndianRecordHeaderOffsetInWords = ShapefilePrimitiveHelpers.NativeByteCountToBigEndianWordCount(value); + } + + public int RecordContentLengthInBytes + { + get => ShapefilePrimitiveHelpers.BigEndianWordCountToNativeByteCount(BigEndianRecordContentLengthInWords); + set => BigEndianRecordContentLengthInWords = ShapefilePrimitiveHelpers.NativeByteCountToBigEndianWordCount(value); + } + } +} diff --git a/src/NetTopologySuite.IO.ShapefileNG/ShapefileMainFileRecordHeaderNG.cs b/src/NetTopologySuite.IO.ShapefileNG/ShapefileMainFileRecordHeaderNG.cs new file mode 100644 index 0000000..51868e0 --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/ShapefileMainFileRecordHeaderNG.cs @@ -0,0 +1,27 @@ +using System.Buffers.Binary; +using System.Runtime.InteropServices; + +namespace NetTopologySuite.IO +{ + using Internal; + + [StructLayout(LayoutKind.Sequential)] + public struct ShapefileMainFileRecordHeaderNG + { + public int BigEndianRecordNumber; + + public int BigEndianRecordContentLengthInWords; + + public int RecordNumber + { + get => ShapefilePrimitiveHelpers.SwapByteOrderOnLittleEndianMachines(BigEndianRecordNumber); + set => BigEndianRecordNumber = ShapefilePrimitiveHelpers.SwapByteOrderOnLittleEndianMachines(value); + } + + public int RecordContentLengthInBytes + { + get => ShapefilePrimitiveHelpers.BigEndianWordCountToNativeByteCount(BigEndianRecordContentLengthInWords); + set => BigEndianRecordContentLengthInWords = ShapefilePrimitiveHelpers.NativeByteCountToBigEndianWordCount(value); + } + } +} diff --git a/src/NetTopologySuite.IO.ShapefileNG/ShapefileSpanReaderNG.cs b/src/NetTopologySuite.IO.ShapefileNG/ShapefileSpanReaderNG.cs new file mode 100644 index 0000000..18e78b8 --- /dev/null +++ b/src/NetTopologySuite.IO.ShapefileNG/ShapefileSpanReaderNG.cs @@ -0,0 +1,71 @@ +using System; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +namespace NetTopologySuite.IO +{ + using Internal; + using ShapeRecords; + + public ref struct ShapefileSpanReaderNG + { + private readonly ShapefileFormatSpanReader _innerReader; + + public ShapefileSpanReaderNG(ReadOnlySpan mainFile, ReadOnlySpan indexFile) + { + _innerReader = new ShapefileFormatSpanReader(mainFile, indexFile); + _innerReader.ValidateFileStructure(); + RecordCount = _innerReader.RecordCount; + } + + public int RecordCount { get; } + + public ShapeTypeNG ShapeType => _innerReader.MainFileHeader.ShapeType; + + public PointXYRecordNG GetPointXYRecord(int recordIndex) + { + if ((uint)recordIndex >= (uint)RecordCount) + { + ThrowArgumentOutOfRangeExceptionForRecordIndex(); + } + + if (ShapeType != ShapeTypeNG.Point) + { + ThrowInvalidOperationExceptionForShapeTypeMismatch(); + } + + return MemoryMarshal.Read(_innerReader.GetInnerRecordContents(recordIndex)); + } + + public MultiPointXYRecordNG GetMultiPointXYRecord(int recordIndex) + { + if ((uint)recordIndex >= (uint)RecordCount) + { + ThrowArgumentOutOfRangeExceptionForRecordIndex(); + } + + if (ShapeType != ShapeTypeNG.MultiPoint) + { + ThrowInvalidOperationExceptionForShapeTypeMismatch(); + } + + var recordContents = _innerReader.GetInnerRecordContents(recordIndex); + var bbox = MemoryMarshal.Cast(recordContents.Slice(0, 32)); + int numPoints = MemoryMarshal.Read(recordContents.Slice(32, 4)); + var points = MemoryMarshal.Cast(recordContents.Slice(36, numPoints * Unsafe.SizeOf())); + return new MultiPointXYRecordNG(bbox[0], bbox[1], bbox[2], bbox[3], points); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + private static void ThrowArgumentOutOfRangeExceptionForRecordIndex() + { + throw new ArgumentOutOfRangeException("recordIndex", "Must be non-negative and less than RecordCount."); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + private void ThrowInvalidOperationExceptionForShapeTypeMismatch() + { + throw new InvalidOperationException($"This method does not support shapefiles whose ShapeType is {ShapeType}."); + } + } +} \ No newline at end of file diff --git a/test/NetTopologySuite.IO.Esri.Test/Attributes/AttributesTest.cs b/test/NetTopologySuite.IO.Esri.Test/Attributes/AttributesTest.cs new file mode 100644 index 0000000..4e25651 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Attributes/AttributesTest.cs @@ -0,0 +1,155 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.Features; +using NUnit.Framework; +using System; +using System.Collections; +using System.Collections.Generic; +using System.IO; +using NetTopologySuite.IO.Shapefile; +using NetTopologySuite.IO.Dbf; +using System.Linq; + +namespace NetTopologySuite.IO.ShapeFile.Test.Attributes +{ + public class AttributesTest + { + protected GeometryFactory Factory { get; private set; } + + protected WKTReader Reader { get; private set; } + + /// + /// + /// + public void Start() + { + // Set current dir to shapefiles dir + Environment.CurrentDirectory = CommonHelpers.TestShapefilesDirectory; + + this.Factory = new GeometryFactory(); + this.Reader = new WKTReader(); + + // ReadFromShapeFile(); + // TestSharcDbf(); + TestShapeCreation(); + } + + private void TestShapeCreation() + { + var points = new Coordinate[3]; + points[0] = new Coordinate(0, 0); + points[1] = new Coordinate(1, 0); + points[2] = new Coordinate(1, 1); + + var line_string = new LineString(points); + + var attributes = new AttributesTable(); + attributes.Add("FOO", "FOO"); + + var feature = new Feature(Factory.CreateMultiLineString(new LineString[] { line_string }), attributes); + var features = new Feature[1]; + features[0] = feature; + + features.SaveToShapefile("line_string.shp"); + } + + [Test] + public void TestConstructor2() + { + IAttributesTable at = null; + /* + Assert.DoesNotThrow( + () => at = new AttributesTable(new[] {new[] {"key1", new object()}, new[] {"key2", new object()}})); + Assert.That(at, Is.Not.Null); + Assert.That(at.Count, Is.EqualTo(2)); + Assert.That(at.Exists("key1"), Is.True); + Assert.That(at.Exists("key2"), Is.True); + Assert.Throws( + () => at = new AttributesTable(new[] {new[] {"key1", new object()}, new[] {(object) "key2",}})); + Assert.Throws( + () => at = new AttributesTable(new[] {new[] {"key1", new object()}, new[] {new object(), "item2",}})); + */ + Assert.DoesNotThrow(() => at = new AttributesTable { { "key1", new object() }, { "key2", new object() } }); + Assert.That(at, Is.Not.Null); + Assert.That(at.Count, Is.EqualTo(2)); + Assert.That(at.Exists("key1"), Is.True); + Assert.That(at.Exists("key2"), Is.True); + } + + private void TestSharcDbf() + { + const string filename = @"Strade.dbf"; + if (!File.Exists(filename)) + throw new FileNotFoundException(filename + " not found at " + Environment.CurrentDirectory); + + using (var reader = new DbfReader(filename)) + { + Console.WriteLine("RecordSize: " + reader.RecordSize); + Console.WriteLine("Fields.Count: " + reader.Fields.Count); + Console.WriteLine("RecordCount: " + reader.RecordCount); + Console.WriteLine("LastUpdateDate: " + reader.LastUpdateDate); + + foreach (var field in reader.Fields) + { + Console.WriteLine("FieldName: " + field.Name); + Console.WriteLine("DBF Type: " + field.FieldType); + //Console.WriteLine("CLR Type: " + field.Type); + Console.WriteLine("Length: " + field.Length); + Console.WriteLine("Length: " + field.Length); + Console.WriteLine("Precision: " + field.Precision); + //Console.WriteLine("DataAddress: " + field.DataAddress); + } + + foreach (var record in reader) + { + foreach (var attr in record) + { + Console.WriteLine(" " + attr.Key + ": " + attr.Value); + } + } + } + Console.WriteLine(); + } + + private void ReadFromShapeFile() + { + const string filename = @"country"; + if (!File.Exists(filename + ".dbf")) + throw new FileNotFoundException(filename + " not found at " + Environment.CurrentDirectory); + + List featureCollection; + ShapeType shapeType; + DbfFieldCollection fields; + + using (var shpReader = ShapefileReader.Open(filename)) + { + shapeType = shpReader.ShapeType; + fields = shpReader.Fields; + featureCollection = shpReader.ToList(); + } + + int index = 0; + Console.WriteLine("Elements = " + featureCollection.Count); + foreach (var feature in featureCollection) + { + Console.WriteLine("Feature " + index++); + foreach (string name in feature.Attributes.GetNames()) + Console.WriteLine(name + ": " + feature.Attributes[name]); + } + + //Directory + string dir = CommonHelpers.TestShapefilesDirectory + Path.DirectorySeparatorChar; + + // Test write with stub header + string file = dir + "testWriteStubHeader"; + featureCollection.SaveToShapefile(file); + + + // Test write with header from a existing shapefile + file = dir + "testWriteShapefileHeader"; + using (var shpWriter = ShapefileWriter.Open(file, shapeType, fields)) + { + shpWriter.Write(featureCollection); + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/CommonHelpers.cs b/test/NetTopologySuite.IO.Esri.Test/CommonHelpers.cs new file mode 100644 index 0000000..acbe7cd --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/CommonHelpers.cs @@ -0,0 +1,7 @@ +using System; +using System.IO; + +internal sealed class CommonHelpers +{ + public static readonly string TestShapefilesDirectory = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "TestShapefiles"); +} diff --git a/test/NetTopologySuite.IO.Esri.Test/DbfDateTest.cs b/test/NetTopologySuite.IO.Esri.Test/DbfDateTest.cs new file mode 100644 index 0000000..0ce2440 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/DbfDateTest.cs @@ -0,0 +1,57 @@ +using NetTopologySuite.IO.Dbf; +using NUnit.Framework; +using System; +using System.Collections; +using System.IO; +using System.Linq; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + /// + /// + /// + [TestFixture] + public class DbfDateTest + { + /// + /// Initializes a new instance of the class. + /// + public DbfDateTest() + { + + } + + /// + /// + /// + [Test] + public void ReadDbfDate() + { + string file = Path.Combine(CommonHelpers.TestShapefilesDirectory, "date.dbf"); + + if (!File.Exists(file)) + throw new FileNotFoundException("file not found at " + Path.GetDirectoryName(file)); + + using (var reader = new DbfReader(file)) + { + var dateField = reader.Fields.FirstOrDefault(f => f is DbfDateField) as DbfDateField; + Assert.IsNotNull(dateField); + + var firstRecord = reader.FirstOrDefault(); + + Assert.IsNotNull(firstRecord); + Assert.AreEqual(2, firstRecord.Count); + + foreach (object item in firstRecord.Values) + Assert.IsNotNull(item); + + var date = dateField.DateValue.Value; + + Assert.AreEqual(10, date.Day); + Assert.AreEqual(3, date.Month); + Assert.AreEqual(2006, date.Year); + } + + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Features/UseOfIndexAndPreparedGeometry.cs b/test/NetTopologySuite.IO.Esri.Test/Features/UseOfIndexAndPreparedGeometry.cs new file mode 100644 index 0000000..1efe488 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Features/UseOfIndexAndPreparedGeometry.cs @@ -0,0 +1,240 @@ +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using NetTopologySuite.Geometries.Utilities; +using NetTopologySuite.Index.Quadtree; +using NetTopologySuite.IO.Shapefile; +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Diagnostics.Contracts; +using System.Linq; + +namespace NetTopologySuite.IO.ShapeFile.Test.Features +{ + + public class UseOfIndexAndPreparedGeometry + { + [TestCase(@"D:\Daten\Geofabrik\roads.shp")] + public void TestShapefile(string shapefile) + { + if (!System.IO.File.Exists(shapefile)) + throw new IgnoreException("file not present"); + + List featureCollection; + Envelope bbox; + + //var shp = new Extended.ShapeDataReader(shapefile); + using (var shp = ShapefileReader.Open(shapefile)) + { + bbox = shp.BoundingBox; + featureCollection = shp.Cast().ToList(); + } + + const double min1 = 0.4; + const double min2 = 0.5 - min1; + + var rnd = new System.Random(8888); + var queryBox = new Envelope( + bbox.MinX + (min1 + rnd.NextDouble() * min2) * bbox.Width, + bbox.MaxX - (min1 + rnd.NextDouble() * min2) * bbox.Width, + bbox.MinY + (min1 + rnd.NextDouble() * min2) * bbox.Height, + bbox.MaxY - (min1 + rnd.NextDouble() * min2) * bbox.Height); + + var shape = new SineStarFactory(GeometryFactory.Default) { Envelope = queryBox }.CreateSineStar(); + TestShapefilePlain(featureCollection, shape, "intersects"); + TestShapefilePlain(featureCollection, shape, "intersects"); + TestShapefilePrepared(featureCollection, shape, "intersects"); + TestShapefileIndexed(featureCollection, shape, "intersects"); + + } + + private void TestShapefilePrepared(ICollection features, Geometry queryGeom, string spatialPredicate) + { + System.Diagnostics.Debug.WriteLine("\nPrepared"); + var sw = new System.Diagnostics.Stopwatch(); + sw.Start(); + System.Diagnostics.Debug.WriteLine("Setup collection: {0}ms", sw.ElapsedMilliseconds); + sw.Restart(); + var prep = NetTopologySuite.Geometries.Prepared.PreparedGeometryFactory.Prepare(queryGeom); + var lst = new List(2048); + foreach (var feature in features) + { + if (prep.Intersects(feature.Geometry)) + lst.Add(feature); + } + System.Diagnostics.Debug.WriteLine("Query collection: {0}ms", sw.ElapsedMilliseconds); + System.Diagnostics.Debug.WriteLine("Queried {0} features of a set of {1}", lst.Count, features.Count); + } + + private void TestShapefilePlain(ICollection features, Geometry queryGeom, string spatialPredicate) + { + System.Diagnostics.Debug.WriteLine("\nPlain"); + var sw = new System.Diagnostics.Stopwatch(); + sw.Start(); + System.Diagnostics.Debug.WriteLine("Setup collection: {0}ms", sw.ElapsedMilliseconds); + sw.Restart(); + var lst = new List(2048); + foreach (var feature in features) + { + if (queryGeom.Intersects(feature.Geometry)) + lst.Add(feature); + } + System.Diagnostics.Debug.WriteLine("Query collection: {0}ms", sw.ElapsedMilliseconds); + System.Diagnostics.Debug.WriteLine("Queried {0} features of a set of {1}", lst.Count, features.Count); + } + + private void TestShapefileIndexed(ICollection features, Geometry queryGeom, string spatialPredicate) + { + System.Diagnostics.Debug.WriteLine("\nIndexed"); + var sw = new System.Diagnostics.Stopwatch(); + sw.Start(); + var idxFeatureCollection = new IndexedFeatureColection(); + foreach (var feature in features) + idxFeatureCollection.Add(feature); + System.Diagnostics.Debug.WriteLine("Setup collection: {0}ms", sw.ElapsedMilliseconds); + sw.Restart(); + var lst = new List(idxFeatureCollection.Query(queryGeom, spatialPredicate)); + System.Diagnostics.Debug.WriteLine("Query collection: {0}ms", sw.ElapsedMilliseconds); + System.Diagnostics.Debug.WriteLine("Queried {0} features of a set of {1}", lst.Count, features.Count); + } + + } + + public class IndexedFeatureColection : Collection, ICollection + { + private Quadtree _featuresIndex; + + private class FeatureComparer : EqualityComparer + { + public override bool Equals(IFeature x, IFeature y) + { + if (x == null || y == null) throw new ArgumentNullException(); + + if (x.Attributes == null && y.Attributes != null) + return false; + + if (x.Attributes != null && y.Attributes == null) + return false; + + if (x.Attributes.Count != y.Attributes.Count) + return false; + + string[] names = x.Attributes.GetNames(); + foreach (string name in names) + { + object v1 = x.Attributes[name]; + object v2 = y.Attributes[name]; + if (!v1.Equals(v2)) return false; + } + + if (!x.Geometry.EqualsTopologically(y.Geometry)) + return false; + + return true; + } + + public override int GetHashCode(IFeature obj) + { + int res = obj.Geometry.GetHashCode(); + if (obj.Attributes != null) + { + foreach (object value in obj.Attributes.GetValues()) + res ^= value.GetHashCode(); + } + return res; + } + } + + private readonly FeatureComparer _featureComparer = new FeatureComparer(); + + public IndexedFeatureColection() + { + _featuresIndex = new Quadtree(); + } + + protected override void ClearItems() + { + base.ClearItems(); + _featuresIndex = new Quadtree(); + } + + protected override void InsertItem(int index, IFeature item) + { + base.InsertItem(index, item); + _featuresIndex.Insert(item.BoundingBox, index); + } + + protected override void RemoveItem(int index) + { + _featuresIndex.Remove(this[index].BoundingBox, index); + base.RemoveItem(index); + } + + protected override void SetItem(int index, IFeature item) + { + var oldItem = this[index]; + _featuresIndex.Remove(oldItem.Geometry.EnvelopeInternal, index); + base.SetItem(index, item); + _featuresIndex.Insert(item.Geometry.EnvelopeInternal, index); + } + + public new bool Contains(IFeature item) + { + var indices = _featuresIndex.Query(item.BoundingBox); + foreach (int tmpItem in indices) + { + var feature = this[tmpItem]; + if (_featureComparer.Equals(item, feature)) + return true; + } + return false; + } + + public IEnumerable Query(Geometry geom, string spatialPredicate) + { + if (geom == null) + throw new ArgumentNullException("geom"); + + var prepgeom = NetTopologySuite.Geometries.Prepared.PreparedGeometryFactory.Prepare(geom); + switch (spatialPredicate.Trim().ToLowerInvariant()) + { + case "intersects": + return QueryInternal(geom.EnvelopeInternal, prepgeom.Intersects); + case "contains": + return QueryInternal(geom.EnvelopeInternal, prepgeom.Contains); + case "containsproperly": + return QueryInternal(geom.EnvelopeInternal, prepgeom.ContainsProperly); + case "coveredby": + return QueryInternal(geom.EnvelopeInternal, prepgeom.CoveredBy); + case "covers": + return QueryInternal(geom.EnvelopeInternal, prepgeom.Covers); + case "crosses": + return QueryInternal(geom.EnvelopeInternal, prepgeom.Crosses); + case "disjoint": + return QueryInternal(geom.EnvelopeInternal, prepgeom.Disjoint); + case "overlaps": + return QueryInternal(geom.EnvelopeInternal, prepgeom.Overlaps); + case "within": + return QueryInternal(geom.EnvelopeInternal, prepgeom.Within); + case "touches": + return QueryInternal(geom.EnvelopeInternal, prepgeom.Touches); + + } + return null; + } + + private IEnumerable QueryInternal(Envelope bbox, Func spatialPredicate) + { + Contract.Assert(bbox != null); + Contract.Assert(spatialPredicate != null); + + foreach (int index in _featuresIndex.Query(bbox)) + { + var feature = this[index]; + if (spatialPredicate(feature.Geometry)) + yield return feature; + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Geometries/LineStringSamples.cs b/test/NetTopologySuite.IO.Esri.Test/Geometries/LineStringSamples.cs new file mode 100644 index 0000000..36a79ca --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Geometries/LineStringSamples.cs @@ -0,0 +1,156 @@ +using NetTopologySuite.Geometries; +using System; +using NetTopologySuite.Operation.Buffer; + +namespace NetTopologySuite.IO.ShapeFile.Test.Geometries +{ + /// + /// + /// + public class LineStringSamples + { + protected GeometryFactory Factory { get; private set; } + + protected WKTReader Reader { get; private set; } + + private LineString line = null; + + /// + /// + /// + public LineStringSamples() : base() + { + this.Factory = new GeometryFactory(); + this.Reader = new WKTReader(); + + var coordinates = new Coordinate[] + { + new Coordinate(10, 10), + new Coordinate(20, 20), + new Coordinate(20, 10), + }; + line = Factory.CreateLineString(coordinates); + } + + /// + /// + /// + public void Start() + { + var pointInLine = Factory.CreatePoint(new Coordinate(20, 10)); + var pointOutLine = Factory.CreatePoint(new Coordinate(20, 31)); + var aLine = Factory.CreateLineString(new Coordinate[] { new Coordinate(23, 32.2), new Coordinate(922, 11) }); + var anotherLine = Factory.CreateLineString(new Coordinate[] { new Coordinate(0, 1), new Coordinate(30, 30) }); + + try + { + Write(line.Area); + Write(line.Boundary); + Write(line.BoundaryDimension); + Write(line.Centroid); + Write(line.Coordinate); + Write(line.Coordinates); + Write(line.CoordinateSequence); + Write(line.Dimension); + Write(line.EndPoint); + Write(line.Envelope); + Write(line.EnvelopeInternal); + Write(line.InteriorPoint); + Write(line.IsClosed); + Write(line.IsEmpty); + Write(line.IsRing); + Write(line.IsSimple); + Write(line.IsValid); + Write(line.Length); + Write(line.NumPoints); + Write(line.StartPoint); + if (line.UserData != null) + Write(line.UserData); + else Write("UserData null"); + + Write(line.Buffer(10)); + Write(line.Buffer(10, new BufferParameters { EndCapStyle = EndCapStyle.Flat })); + Write(line.Buffer(10, new BufferParameters { EndCapStyle = EndCapStyle.Square })); + Write(line.Buffer(10, 20)); + Write(line.Buffer(10, new BufferParameters(20) { EndCapStyle = EndCapStyle.Flat })); + Write(line.Buffer(10, new BufferParameters(20) { EndCapStyle = EndCapStyle.Square })); + Write(line.Contains(pointInLine)); + Write(line.Contains(pointOutLine)); + Write(line.Crosses(pointInLine)); + Write(line.Crosses(pointOutLine)); + Write(line.Difference(pointInLine)); + Write(line.Difference(pointOutLine)); + Write(line.Disjoint(pointInLine)); + Write(line.Disjoint(pointOutLine)); + Write(line.Distance(pointInLine)); + Write(line.Distance(pointOutLine)); + Write(line.EqualsTopologically(line.Copy() as LineString)); + Write(line.EqualsExact(line.Copy() as LineString)); + Write(line.ConvexHull()); + Write(line.Intersection(pointInLine)); + Write(line.Intersection(pointOutLine)); + Write(line.Intersection(aLine)); + Write(line.Intersects(pointInLine)); + Write(line.Intersects(pointOutLine)); + Write(line.Intersects(aLine)); + Write(line.IsWithinDistance(pointOutLine, 2)); + Write(line.IsWithinDistance(pointOutLine, 222)); + Write(line.Overlaps(pointInLine)); + Write(line.Overlaps(pointOutLine)); + Write(line.Overlaps(aLine)); + Write(line.Overlaps(anotherLine)); + Write(line.Relate(pointInLine)); + Write(line.Relate(pointOutLine)); + Write(line.Relate(aLine)); + Write(line.Relate(anotherLine)); + Write(line.SymmetricDifference(pointInLine)); + Write(line.SymmetricDifference(pointOutLine)); + Write(line.SymmetricDifference(aLine)); + Write(line.SymmetricDifference(anotherLine)); + Write(line.ToString()); + Write(line.AsText()); + Write(line.Touches(pointInLine)); + Write(line.Touches(pointOutLine)); + Write(line.Touches(aLine)); + Write(line.Touches(anotherLine)); + Write(line.Union(pointInLine)); + Write(line.Union(pointOutLine)); + Write(line.Union(aLine)); + Write(line.Union(anotherLine)); + Write(line.Within(pointInLine)); + Write(line.Within(pointOutLine)); + Write(line.Within(aLine)); + Write(line.Within(anotherLine)); + + string linestring = "LINESTRING (1.2 3.4, 5.6 7.8, 9.1 10.12)"; + string anotherlinestringg = "LINESTRING (12345 3654321, 685 7777.945677, 782 111.1)"; + var geom1 = Reader.Read(linestring); + Write(geom1.AsText()); + var geom2 = Reader.Read(anotherlinestringg); + Write(geom2.AsText()); + + byte[] bytes = line.AsBinary(); + var test1 = new WKBReader().Read(bytes); + Write(test1.ToString()); + + //bytes = new GDBWriter().Write(line); + //test1 = new GDBReader().Read(bytes); + //Write(test1.ToString()); + } + catch (Exception ex) + { + throw ex; + } + } + + protected void Write(object o) + { + Console.WriteLine(o.ToString()); + } + + protected void Write(string s) + { + Console.WriteLine(s); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Geometries/MultiPointSamples.cs b/test/NetTopologySuite.IO.Esri.Test/Geometries/MultiPointSamples.cs new file mode 100644 index 0000000..2a090d9 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Geometries/MultiPointSamples.cs @@ -0,0 +1,94 @@ +using NetTopologySuite.Geometries; +using System; +using NetTopologySuite.Operation.Buffer; + +namespace NetTopologySuite.IO.ShapeFile.Test.Geometries +{ + /// + /// + /// + public class MultiPointSamples + { + private MultiPoint multiPoint = null; + + protected GeometryFactory Factory { get; private set; } + + protected WKTReader Reader { get; private set; } + + /// + /// + /// + public MultiPointSamples() : base() + { + this.Factory = new GeometryFactory(); + this.Reader = new WKTReader(); + + var coordinates = new Coordinate[] + { + new Coordinate(100,100), + new Coordinate(200,200), + new Coordinate(300,300), + new Coordinate(400,400), + new Coordinate(500,500), + }; + multiPoint = Factory.CreateMultiPointFromCoords(coordinates); + } + + /// + /// + /// + public void Start() + { + try + { + Write(multiPoint.Area); + Write(multiPoint.Boundary); + Write(multiPoint.BoundaryDimension); + Write(multiPoint.Centroid); + Write(multiPoint.Coordinate); + Write(multiPoint.Coordinates); + Write(multiPoint.Dimension); + Write(multiPoint.Envelope); + Write(multiPoint.EnvelopeInternal); + Write(multiPoint.Geometries.Length); + Write(multiPoint.InteriorPoint); + Write(multiPoint.IsEmpty); + Write(multiPoint.IsSimple); + Write(multiPoint.IsValid); + Write(multiPoint.Length); + Write(multiPoint.NumGeometries); + Write(multiPoint.NumPoints); + + Write(multiPoint.Buffer(10)); + Write(multiPoint.Buffer(10, new BufferParameters {EndCapStyle = EndCapStyle.Flat })); + Write(multiPoint.Buffer(10, new BufferParameters { EndCapStyle = EndCapStyle.Square })); + Write(multiPoint.Buffer(10, 20)); + Write(multiPoint.Buffer(10, new BufferParameters(20) { EndCapStyle = EndCapStyle.Flat })); + Write(multiPoint.Buffer(10, new BufferParameters(20) { EndCapStyle = EndCapStyle.Square })); + Write(multiPoint.ConvexHull()); + + byte[] bytes = multiPoint.AsBinary(); + var test1 = new WKBReader().Read(bytes); + Write(test1.ToString()); + + //bytes = new GDBWriter().Write(multiPoint); + //test1 = new GDBReader().Read(bytes); + //Write(test1.ToString()); + } + catch (Exception ex) + { + throw ex; + } + } + + protected void Write(object o) + { + Console.WriteLine(o.ToString()); + } + + protected void Write(string s) + { + Console.WriteLine(s); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Geometries/PointSamples.cs b/test/NetTopologySuite.IO.Esri.Test/Geometries/PointSamples.cs new file mode 100644 index 0000000..2518248 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Geometries/PointSamples.cs @@ -0,0 +1,143 @@ +using NetTopologySuite.Geometries; +using System; +using NetTopologySuite.Operation.Buffer; + +namespace NetTopologySuite.IO.ShapeFile.Test.Geometries +{ + /// + /// + /// + public class PointSamples + { + protected GeometryFactory Factory { get; private set; } + + protected WKTReader Reader { get; private set; } + private Point point = null; + + /// + /// + /// + public PointSamples() + { + this.Factory = new GeometryFactory(); + this.Reader = new WKTReader(); + + point = Factory.CreatePoint(new Coordinate(100, 100)); + } + + /// + /// + /// + public void Start() + { + var pInterior = Factory.CreatePoint(new Coordinate(100, 100)); + var pExterior = Factory.CreatePoint(new Coordinate(100, 101)); + + try + { + Write(point.Area); + Write(point.Boundary); + Write(point.BoundaryDimension); + Write(point.Centroid); + Write(point.Coordinate); + Write(point.Coordinates); + Write(point.CoordinateSequence); + Write(point.Dimension); + Write(point.Envelope); + Write(point.EnvelopeInternal); + Write(point.Factory); + Write(point.InteriorPoint); + Write(point.IsEmpty); + Write(point.IsSimple); + Write(point.IsValid); + Write(point.Length); + Write(point.NumPoints); + Write(point.PrecisionModel); + Write(point.X); + Write(point.Y); + + Write(point.Contains(pInterior)); + Write(point.Contains(pExterior)); + + Write(point.Buffer(10)); + Write(point.Buffer(10, new BufferParameters { EndCapStyle = EndCapStyle.Square })); + Write(point.Buffer(10, new BufferParameters { EndCapStyle = EndCapStyle.Flat })); + Write(point.Buffer(10, 20)); + Write(point.Buffer(10, new BufferParameters(20) { EndCapStyle = EndCapStyle.Square })); + Write(point.Buffer(10, new BufferParameters(20) { EndCapStyle = EndCapStyle.Flat })); + + Write(point.Crosses(pInterior)); + Write(point.Crosses(pExterior)); + Write(point.Difference(pInterior)); + Write(point.Difference(pExterior)); + Write(point.Disjoint(pInterior)); + Write(point.Disjoint(pExterior)); + Write(point.EqualsTopologically(pInterior)); + Write(point.EqualsTopologically(pExterior)); + Write(point.EqualsExact(pInterior)); + Write(point.EqualsExact(pExterior)); + Write(point.ConvexHull()); + Write(point.Intersection(pInterior)); + Write(point.Intersection(pExterior)); + Write(point.Intersects(pInterior)); + Write(point.Intersects(pExterior)); + Write(point.IsWithinDistance(pInterior, 0.001)); + Write(point.IsWithinDistance(pExterior, 0.001)); + Write(point.Overlaps(pInterior)); + Write(point.Overlaps(pExterior)); + Write(point.SymmetricDifference(pInterior)); + Write(point.SymmetricDifference(pExterior)); + Write(point.ToString()); + Write(point.AsText()); + Write(point.Touches(pInterior)); + Write(point.Touches(pExterior)); + Write(point.Union(pInterior)); + Write(point.Union(pExterior)); + Write(point.Within(pInterior)); + Write(point.Within(pExterior)); + + string pointstring = "POINT (100.22 100.33)"; + string anotherpointstring = "POINT (12345 3654321)"; + var geom1 = Reader.Read(pointstring); + Write(geom1.AsText()); + var geom2 = Reader.Read(anotherpointstring); + Write(geom2.AsText()); + + byte[] bytes = point.AsBinary(); + var test1 = new WKBReader().Read(bytes); + Write(test1.ToString()); + + bytes = Factory.CreatePoint(new Coordinate(double.MinValue, double.MinValue)).AsBinary(); + var testempty = new WKBReader().Read(bytes); + Write(testempty); + + //bytes = new GDBWriter().Write(geom1); + //test1 = new GDBReader().Read(bytes); + //Write(test1.ToString()); + + // Test Empty Geometries + Write(Point.Empty); + Write(LineString.Empty); + Write(Polygon.Empty); + Write(MultiPoint.Empty); + Write(MultiLineString.Empty); + Write(MultiPolygon.Empty); + Write(GeometryCollection.Empty); + } + catch (Exception ex) + { + throw ex; + } + } + + protected void Write(object o) + { + Console.WriteLine(o.ToString()); + } + + protected void Write(string s) + { + Console.WriteLine(s); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Geometries/PolygonSamples.cs b/test/NetTopologySuite.IO.Esri.Test/Geometries/PolygonSamples.cs new file mode 100644 index 0000000..8510b1a --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Geometries/PolygonSamples.cs @@ -0,0 +1,170 @@ +using NetTopologySuite.Geometries; +using System; +using NetTopologySuite.Operation.Buffer; + +namespace NetTopologySuite.IO.ShapeFile.Test.Geometries +{ + public class PolygonSamples + { + protected GeometryFactory Factory { get; private set; } + + protected WKTReader Reader { get; private set; } + + private Polygon polygon = null; + private LinearRing shell = null; + private LinearRing hole = null; + + /// + /// + /// + public PolygonSamples() + { + this.Factory = new GeometryFactory(new PrecisionModel(PrecisionModels.Fixed)); + + shell = Factory.CreateLinearRing(new Coordinate[] { new Coordinate(100,100), + new Coordinate(200,100), + new Coordinate(200,200), + new Coordinate(100,200), + new Coordinate(100,100), }); + hole = Factory.CreateLinearRing(new Coordinate[] { new Coordinate(120,120), + new Coordinate(180,120), + new Coordinate(180,180), + new Coordinate(120,180), + new Coordinate(120,120), }); + polygon = Factory.CreatePolygon(shell, new LinearRing[] { hole, }); + } + + public void Start() + { + var interiorPoint = Factory.CreatePoint(new Coordinate(130, 150)); + var exteriorPoint = Factory.CreatePoint(new Coordinate(650, 1500)); + var aLine = Factory.CreateLineString(new Coordinate[] { new Coordinate(23, 32.2), new Coordinate(10, 222) }); + var anotherLine = Factory.CreateLineString(new Coordinate[] { new Coordinate(0, 1), new Coordinate(30, 30) }); + var intersectLine = Factory.CreateLineString(new Coordinate[] { new Coordinate(0, 1), new Coordinate(300, 300) }); + + try + { + Write(polygon.Area); + Write(polygon.Boundary); + Write(polygon.BoundaryDimension); + Write(polygon.Centroid); + Write(polygon.Coordinate); + Write(polygon.Coordinates.Length); + Write(polygon.Dimension); + Write(polygon.Envelope); + Write(polygon.EnvelopeInternal); + Write(polygon.ExteriorRing); + Write(polygon.InteriorPoint); + Write(polygon.InteriorRings.Length); + Write(polygon.IsEmpty); + Write(polygon.IsSimple); + Write(polygon.IsValid); + Write(polygon.Length); + Write(polygon.NumInteriorRings); + Write(polygon.NumPoints); + if (polygon.UserData != null) + Write(polygon.UserData); + else Write("UserData null"); + + Write(polygon.Buffer(10)); + Write(polygon.Buffer(10, new BufferParameters { EndCapStyle = EndCapStyle.Flat})); + Write(polygon.Buffer(10, new BufferParameters { EndCapStyle = EndCapStyle.Square })); + Write(polygon.Buffer(10, 20)); + Write(polygon.Buffer(10, new BufferParameters(20) { EndCapStyle = EndCapStyle.Flat })); + Write(polygon.Buffer(10, new BufferParameters(20) { EndCapStyle = EndCapStyle.Square })); + Write(polygon.Contains(interiorPoint)); + Write(polygon.Contains(exteriorPoint)); + Write(polygon.Contains(aLine)); + Write(polygon.Contains(anotherLine)); + Write(polygon.Crosses(interiorPoint)); + Write(polygon.Crosses(exteriorPoint)); + Write(polygon.Crosses(aLine)); + Write(polygon.Crosses(anotherLine)); + Write(polygon.Difference(interiorPoint)); + Write(polygon.Difference(exteriorPoint)); + Write(polygon.Difference(aLine)); + Write(polygon.Difference(anotherLine)); + Write(polygon.Disjoint(interiorPoint)); + Write(polygon.Disjoint(exteriorPoint)); + Write(polygon.Disjoint(aLine)); + Write(polygon.Disjoint(anotherLine)); + Write(polygon.Distance(interiorPoint)); + Write(polygon.Distance(exteriorPoint)); + Write(polygon.Distance(aLine)); + Write(polygon.Distance(anotherLine)); + Write(polygon.Intersection(interiorPoint)); + Write(polygon.Intersection(exteriorPoint)); + Write(polygon.Intersection(aLine)); + Write(polygon.Intersection(anotherLine)); + Write(polygon.Intersects(interiorPoint)); + Write(polygon.Intersects(exteriorPoint)); + Write(polygon.Intersects(aLine)); + Write(polygon.Intersects(anotherLine)); + Write(polygon.IsWithinDistance(interiorPoint, 300)); + Write(polygon.IsWithinDistance(exteriorPoint, 300)); + Write(polygon.IsWithinDistance(aLine, 300)); + Write(polygon.IsWithinDistance(anotherLine, 300)); + Write(polygon.Overlaps(interiorPoint)); + Write(polygon.Overlaps(exteriorPoint)); + Write(polygon.Overlaps(aLine)); + Write(polygon.Overlaps(anotherLine)); + Write(polygon.Relate(interiorPoint)); + Write(polygon.Relate(exteriorPoint)); + Write(polygon.Relate(aLine)); + Write(polygon.Relate(anotherLine)); + Write(polygon.SymmetricDifference(interiorPoint)); + Write(polygon.SymmetricDifference(exteriorPoint)); + Write(polygon.SymmetricDifference(aLine)); + Write(polygon.SymmetricDifference(anotherLine)); + Write(polygon.ToString()); + Write(polygon.AsText()); + Write(polygon.Touches(interiorPoint)); + Write(polygon.Touches(exteriorPoint)); + Write(polygon.Touches(aLine)); + Write(polygon.Touches(anotherLine)); + Write(polygon.Union(interiorPoint)); + Write(polygon.Union(exteriorPoint)); + Write(polygon.Union(aLine)); + Write(polygon.Union(anotherLine)); + + string aPoly = "POLYGON ((20 20, 100 20, 100 100, 20 100, 20 20))"; + string anotherPoly = "POLYGON ((20 20, 100 20, 100 100, 20 100, 20 20), (50 50, 60 50, 60 60, 50 60, 50 50))"; + var geom1 = Reader.Read(aPoly); + Write(geom1.AsText()); + var geom2 = Reader.Read(anotherPoly); + Write(geom2.AsText()); + + // ExpandToInclude tests + var envelope = new Envelope(0, 0, 0, 0); + envelope.ExpandToInclude(geom1.EnvelopeInternal); + envelope.ExpandToInclude(geom2.EnvelopeInternal); + Write(envelope.ToString()); + + // The polygon is not correctly ordered! Calling normalize we fix the problem... + polygon.Normalize(); + + byte[] bytes = polygon.AsBinary(); + var test1 = new WKBReader().Read(bytes); + Write(test1.ToString()); + + //bytes = new GDBWriter().Write(polygon); + //test1 = new GDBReader().Read(bytes); + //Write(test1.ToString()); + } + catch (Exception ex) + { + throw ex; + } + } + + protected void Write(object o) + { + Console.WriteLine(o.ToString()); + } + + protected void Write(string s) + { + Console.WriteLine(s); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Issue123Test.cs b/test/NetTopologySuite.IO.Esri.Test/Issue123Test.cs new file mode 100644 index 0000000..f292033 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Issue123Test.cs @@ -0,0 +1,61 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Shapefile; +using NetTopologySuite.Operation.Union; +using NetTopologySuite.Operation.Valid; +using NUnit.Framework; +using System.Collections.Generic; +using System.IO; +using System.Linq; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + [TestFixture] + [NtsIssueNumber(123)] + public class Issue123Test + { + [Test] + public void CascadedUnionError() + { + string[] wkt = + { + //MULTIPOLYGON (((-2.775 -37.382, -2.7694818956884695 -37.302294048833446, -4.381 -37.19, -4.379 -37.16, -2.7674053419183364 -37.272299383264858, -2.766 -37.252, -2.703 -37.257, -2.712 -37.386, -2.775 -37.382)), ((-0.558 -16.355, -0.556624473051351 -16.33528411373603, -2.168 -16.223, -2.165 -16.193, -0.55452706181921063 -16.305221219408683, -0.549 -16.226, -0.485 -16.23, -0.494 -16.36, -0.558 -16.355))) + "MULTIPOLYGON (((-2.775 -37.382, -2.7694818956884695 -37.302294048833446, -4.381 -37.19, -4.379 -37.16, -2.7674053419183364 -37.272299383264858, -2.766 -37.252, -2.703 -37.257, -2.712 -37.386, -2.775 -37.382)), ((-0.558 -16.355, -0.556624473051351 -16.33528411373603, -2.168 -16.223, -2.165 -16.193, -0.55452706181921063 -16.305221219408683, -0.549 -16.226, -0.485 -16.23, -0.494 -16.36, -0.558 -16.355)))", + //MULTIPOLYGON (((-4.218 -16.08, -4.216 -16.05, -2.924 -16.14, -2.926 -16.17, -4.218 -16.08)), ((-5.291 -18.097, -5.243 -17.415, -5.239 -17.352, -5.15929328747628 -17.357518157020873, -5.071 -16.091, -5.041 -16.093, -5.1292306097055169 -17.359599419328081, -5.109 -17.361, -5.114 -17.424, -5.161 -18.106, -5.291 -18.097))) + "MULTIPOLYGON (((-4.218 -16.08, -4.216 -16.05, -2.924 -16.14, -2.926 -16.17, -4.218 -16.08)), ((-5.291 -18.097, -5.243 -17.415, -5.239 -17.352, -5.15929328747628 -17.357518157020873, -5.071 -16.091, -5.041 -16.093, -5.1292306097055169 -17.359599419328081, -5.109 -17.361, -5.114 -17.424, -5.161 -18.106, -5.291 -18.097)))" + }; + + IList items = new List(); + var factory = GeometryFactory.Default; + var reader = new WKTReader(factory); + var geoms = reader.Read(wkt[0]); + for (int i = 0; i < geoms.NumGeometries; i++) + { + var geom = geoms.GetGeometryN(i); + items.Add(geom); + } + geoms = reader.Read(wkt[1]); + for (int i = 0; i < geoms.NumGeometries; i++) + { + var geom = geoms.GetGeometryN(i); + items.Add(geom); + } + + var op = new UnaryUnionOp(items, new GeometryFactory(new PrecisionModel(100))); + var result = op.Union(); + Assert.IsNotNull(result); + } + + [Test] + public void CascadedUnionError2() + { + var file = Path.Combine(CommonHelpers.TestShapefilesDirectory, "error_union.shp"); + var geoms = ShapefileReader.ReadAllGeometries(file).ToGeometryCollection(); + + var isValidOp = new IsValidOp(geoms); + Assert.That(!isValidOp.IsValid); + Assert.That(isValidOp.ValidationError.ErrorType, Is.EqualTo(TopologyValidationErrors.RingSelfIntersection)); + + Assert.That(geoms.Union, Throws.InstanceOf()); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Issue161.cs b/test/NetTopologySuite.IO.Esri.Test/Issue161.cs new file mode 100644 index 0000000..7cfe33f --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Issue161.cs @@ -0,0 +1,29 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Shapefile; +using NUnit.Framework; +using System.IO; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + class Issue161 + { + [Test(Description = + "ShapefileDataReader error 'The output char buffer is too small to contain the decoded characters'")] + public void TestIssue161() + { + //SETUP + string filePath = Path.Combine(CommonHelpers.TestShapefilesDirectory, "LSOA_2011_EW_BGC.shp"); + if (!File.Exists(filePath)) Assert.Ignore("File '{0}' not present", filePath); + + //ATTEMPT + using (var reader = ShapefileReader.Open(filePath)) + { + while (reader.Read(out var geometry, out var attributes, out var deleted))//&& count++ < 3) + { + object val; + Assert.DoesNotThrow(() => val = attributes["LSOA11CD"]); + } + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Issue173Fixture.cs b/test/NetTopologySuite.IO.Esri.Test/Issue173Fixture.cs new file mode 100644 index 0000000..4e770d1 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Issue173Fixture.cs @@ -0,0 +1,48 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.Features; +using NetTopologySuite.Geometries.Implementation; +using NUnit.Framework; +using System.Collections.Generic; +using System.IO; +using NetTopologySuite.IO.Shapefile; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + [NtsIssueNumber(173)] + public class Issue173Fixture + { + [Test, Description("The NetTopologySuite.IO.GeoTools class method ShapeFile.GetGeometryType(Geometry geom) will always returns ShapeGeometryType.PointZM making all shapefile geometry GeometryZM.")] + public void Test() + { + var seq = DotSpatialAffineCoordinateSequenceFactory.Instance.Create(1, Ordinates.XY); + seq.SetOrdinate(0, Ordinate.X, -91.0454); + seq.SetOrdinate(0, Ordinate.Y, 32.5907); + var pt = new GeometryFactory(DotSpatialAffineCoordinateSequenceFactory.Instance).CreatePoint(seq); + + var attr = new AttributesTable(); + attr.Add("FirstName", "John"); + attr.Add("LastName", "Doe"); + + var features = new List(); + features.Add(new Feature(pt, attr)); + + string fileName = Path.GetTempFileName(); + fileName = fileName.Substring(0, fileName.Length - 4); + + features.SaveToShapefile(fileName); + + bool isPoint = false; + using (var reader = ShapefileReader.Open(fileName)) + { + isPoint = reader.ShapeType.ToString() == "Point"; + } + + foreach (string file in Directory.GetFiles(Path.GetTempPath(), Path.GetFileName(fileName) + ".*")) + { + File.Delete(file); + } + + Assert.IsTrue(isPoint); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Issue174.cs b/test/NetTopologySuite.IO.Esri.Test/Issue174.cs new file mode 100644 index 0000000..89521eb --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Issue174.cs @@ -0,0 +1,35 @@ +using NetTopologySuite.IO.Shapefile; +using NUnit.Framework; +using System; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + [NtsIssueNumber(174)] + class Issue174 + { + [Test] + public void ensure_NetTopologySuite_IO_ShapeFile_assembly_is_strongly_named() + { + AssertStronglyNamedAssembly(typeof(ShapefileReader)); + } + + [Test] + public void ensure_NetTopologySuite_IO_GDB_assembly_is_strongly_named() + { + //AssertStronglyNamedAssembly(typeof(GDBReader)); + } + + [Test] + public void ensure_NetTopologySuite_IO_GeoTools_assembly_is_strongly_named() + { + AssertStronglyNamedAssembly(typeof(ShapefileWriter)); + } + + private void AssertStronglyNamedAssembly(Type typeFromAssemblyToCheck) + { + Assert.IsNotNull(typeFromAssemblyToCheck, "Cannot determine assembly from null"); + var assembly = typeFromAssemblyToCheck.Assembly; + StringAssert.DoesNotContain("PublicKeyToken=null", assembly.FullName, "Strongly named assembly should have a PublicKeyToken in fully qualified name"); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Issue178Fixture.cs b/test/NetTopologySuite.IO.Esri.Test/Issue178Fixture.cs new file mode 100644 index 0000000..91dda8d --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Issue178Fixture.cs @@ -0,0 +1,47 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Shapefile; +using NUnit.Framework; +using System; +using System.IO; +using System.Linq; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + [TestFixture] + public class Issue178Fixture + { + [SetUp] + public void SetUp() + { + // Set current dir to shapefiles dir + Environment.CurrentDirectory = CommonHelpers.TestShapefilesDirectory; + } + + [Test] + public void TestCorruptedShapeFile() + { + var factory = GeometryFactory.Default; + const string filename = "christchurch-canterbury-h.shp"; + Assert.Throws(() => + { + ShapefileReader.ReadAll(filename).ToList(); + Assert.Fail("Invalid file: code should be unreachable"); + }); + + // ensure file isn't locked + string path = Path.Combine(Environment.CurrentDirectory, filename); + bool ok; + using (var file = File.OpenRead(path)) + { + using (var reader = new BinaryReader(file)) + { + // read a value + int val = reader.Read(); + Console.WriteLine("read a value: " + val); + ok = true; + } + } + Assert.That(ok, Is.True); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Issue36Tests.cs b/test/NetTopologySuite.IO.Esri.Test/Issue36Tests.cs new file mode 100644 index 0000000..7bfd826 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Issue36Tests.cs @@ -0,0 +1,74 @@ +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Dbf; +using NetTopologySuite.IO.Shapefile; +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.IO; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + /// + /// see: https://github.com/NetTopologySuite/NetTopologySuite/issues/36 + /// + [TestFixture] + public class Issue36Tests + { + private int _numPassed; + + [SetUp] + public void SetUp() + { + // Set current dir to shapefiles dir + Environment.CurrentDirectory = CommonHelpers.TestShapefilesDirectory; + + _numPassed = 0; + } + + [Test] + public void ok_when_writing_shapefile_with_features() + { + var xColumn = new DbfCharacterField("X", 10); + + IAttributesTable attributesTable = new AttributesTable(); + attributesTable.Add("X", "y"); + IFeature feature = new Feature(new Point(1, 2), attributesTable); + + + using (var writer = ShapefileWriter.Open(@"issue36", ShapeType.Point, xColumn)) + { + var features = new List(); + features.Add(feature); + Assert.DoesNotThrow(() => writer.Write(features)); + } + + _numPassed++; + } + + [Test] + public void ok_when_writing_shapefile_with_no_features() + { + var xColumn = new DbfCharacterField("X", 10); + + using (var writer = ShapefileWriter.Open(@"issue36", ShapeType.Point, xColumn)) + { + var features = new List(); + Assert.DoesNotThrow(() => writer.Write(features)); + } + + _numPassed++; + } + + [TearDown] + public void TearDown() + { + if (_numPassed < 2) return; + + // Clean up! + File.Delete("issue36.dbf"); + File.Delete("issue36.shp"); + File.Delete("issue36.shx"); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Issue46Fixture.cs b/test/NetTopologySuite.IO.Esri.Test/Issue46Fixture.cs new file mode 100644 index 0000000..9f4a420 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Issue46Fixture.cs @@ -0,0 +1,36 @@ +using NUnit.Framework; +using System.IO; +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Shapefile; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + /// + /// + /// + [TestFixture] + [ShapeFileIssueNumber(46)] + public class Issue46Fixture + { + [Test] + public void Invalid_data_should_be_not_read_as_default() + { + var factory = GeometryFactory.Default; + + string shp_path = Path.Combine(CommonHelpers.TestShapefilesDirectory, "Victoria North.shp"); + Assert.True(File.Exists(shp_path)); + + var data = ShapefileReader.ReadAllGeometries(shp_path).ToGeometryCollection(); + Assert.IsNotNull(data); + Assert.IsNotEmpty(data); + Assert.AreEqual(2, data.Count); + + Assert.IsTrue(data[0].IsEmpty); + Assert.IsInstanceOf(data[0]); + Assert.IsTrue(factory.CreateMultiPolygon().EqualsExact(data[0])); + + Assert.IsFalse(data[1].IsEmpty); + Assert.IsInstanceOf(data[1]); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Issue4Fixture.cs b/test/NetTopologySuite.IO.Esri.Test/Issue4Fixture.cs new file mode 100644 index 0000000..500aa77 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Issue4Fixture.cs @@ -0,0 +1,89 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.Features; +using NetTopologySuite.Geometries.Implementation; +using NUnit.Framework; +using System.Collections.Generic; +using System.IO; +using System.Linq; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + [TestFixture] + public class Issue4Fixture + { + private static string CreateShapefilePath() + { + string path = Path.GetTempFileName(); + path = Path.ChangeExtension(path, string.Empty).TrimEnd('.'); + return path; + } + + private static Coordinate[] CreateCoords() + { + IList coordinates = new List(); + coordinates.Add(new Coordinate(0, 0)); + coordinates.Add(new Coordinate(1, 0)); + coordinates.Add(new Coordinate(1, 1)); + coordinates.Add(new Coordinate(0, 1)); + coordinates.Add(new Coordinate(0, 0)); + return coordinates.ToArray(); + } + + private static CoordinateSequence CopyToSequence(Coordinate[] coords, CoordinateSequence sequence) + { + for (int i = 0; i < coords.Length; i++) + { + sequence.SetOrdinate(i, Ordinate.X, coords[i].X); + sequence.SetOrdinate(i, Ordinate.Y, coords[i].Y); + } + return sequence; + } + + [Test] + public void shapefile_with_empty_attributes_table_should_not_thrown_errors() + { + // They should. + // https://desktop.arcgis.com/en/arcmap/latest/manage-data/shapefiles/geoprocessing-considerations-for-shapefile-output.htm + // The dBASE file must contain at least one field. When you create a new shapefile or dBASE table, an integer ID field is created as a default. + + /* + IFeature feature = new Feature(new Point(0, 0), new AttributesTable()); + IList features = new List { feature }; + + string path = CreateShapefilePath(); + var header = ShapefileDataWriter.GetHeader(feature, features.Count); + var writer = new ShapefileDataWriter(path) { Header = header }; + writer.Write(features); + Assert.That(File.Exists(Path.ChangeExtension(path, ".shp")), Is.True); + */ + } + + [Test] + public void create_xyonly_geom_using_sequence_and_dimension_two() + { + var coords = CreateCoords(); + + var factory = CoordinateArraySequenceFactory.Instance; + var sequence = CopyToSequence(coords, factory.Create(coords.Length, 2)); + + var polygon = GeometryFactory.Default.CreatePolygon(sequence); + Assert.That(polygon, Is.Not.Null); + Assert.That(polygon.Shell, Is.Not.Null); + Assert.That(polygon.Shell.CoordinateSequence.Dimension, Is.EqualTo(2)); + } + + [Test] + public void create_xyonly_geom_using_sequence_and_ordinates_xy() + { + var coords = CreateCoords(); + + var factory = CoordinateArraySequenceFactory.Instance; + var sequence = CopyToSequence(coords, factory.Create(coords.Length, Ordinates.XY)); + + var polygon = GeometryFactory.Default.CreatePolygon(sequence); + Assert.That(polygon, Is.Not.Null); + Assert.That(polygon.Shell, Is.Not.Null); + Assert.That(polygon.Shell.CoordinateSequence.Dimension, Is.EqualTo(2)); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Issue60Fixture.cs b/test/NetTopologySuite.IO.Esri.Test/Issue60Fixture.cs new file mode 100644 index 0000000..73afa2d --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Issue60Fixture.cs @@ -0,0 +1,42 @@ +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using NUnit.Framework; +using System.Collections.Generic; +using System.IO; +using NetTopologySuite.IO.Shapefile; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + [TestFixture] + [ShapeFileIssueNumber(60)] + public class Issue60Fixture + { + /// + /// + /// + /// without fix results into System.OverflowException + [Test] + public void Feature_without_fields_should_be_written_correctly() + { + // They should not. + // https://desktop.arcgis.com/en/arcmap/latest/manage-data/shapefiles/geoprocessing-considerations-for-shapefile-output.htm + // The dBASE file must contain at least one field. When you create a new shapefile or dBASE table, an integer ID field is created as a default. + + /* + string test56 = Path.Combine(CommonHelpers.TestShapefilesDirectory, "test60.shp"); + var factory = new GeometryFactory(); + var attributes = new AttributesTable(); + var feature = new Feature(factory.CreatePoint(new Coordinate(1, 2)), attributes); + var features = new[] { feature }; + + features.SaveToShapefile(test56); + + foreach (var readFeature in ShapefileReader.ReadAll(test56)) + { + Assert.AreEqual(feature.Geometry.AsText(), readFeature.Geometry.AsText()); + } + */ + } + + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/NetTopologySuite.IO.Esri.Test.csproj b/test/NetTopologySuite.IO.Esri.Test/NetTopologySuite.IO.Esri.Test.csproj new file mode 100644 index 0000000..ca570f7 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/NetTopologySuite.IO.Esri.Test.csproj @@ -0,0 +1,33 @@ + + + + netcoreapp3.1 + + false + + true + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/NetTopologySuite.IO.Esri.Test/NtsIssueNumberAttribute.cs b/test/NetTopologySuite.IO.Esri.Test/NtsIssueNumberAttribute.cs new file mode 100644 index 0000000..102ed81 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/NtsIssueNumberAttribute.cs @@ -0,0 +1,19 @@ +using System; +using NUnit.Framework; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + /// + /// The issue number used in this test (or fixture) actually refers to an + /// issue on https://github.com/NetTopologySuite/NetTopologySuite, back + /// before this project was split out on its own. + /// + [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class)] + public sealed class NtsIssueNumberAttribute : PropertyAttribute + { + public NtsIssueNumberAttribute(int issueNumber) + : base("NetTopologySuite issue", issueNumber) + { + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/DbaseReaderTests.cs b/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/DbaseReaderTests.cs new file mode 100644 index 0000000..f1f941a --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/DbaseReaderTests.cs @@ -0,0 +1,292 @@ +using System; +using System.IO; +using System.Linq; +using NetTopologySuite.Features; +using NetTopologySuite.IO.ShapeFile.Extended; +using NUnit.Framework; + +namespace NetTopologySuite.IO.Tests.ShapeFile.Extended +{ + /// + /// Summary description for DbfFileReaderTests + /// + [TestFixture] + public class DbaseReaderTests + { + private static readonly DateTime DATE_SAVED_IN_DBF = new DateTime(1990, 1, 1); + + private DbaseReader m_Reader; + private TempFileWriter m_TmpFile; + + [Test] + public void Ctor_SendNullPath_ShouldThrowException() + { + // Act. + Assert.Catch(() => + { + m_Reader = new DbaseReader((string)null); + }); + } + + [Test] + public void Ctor_SendEmptyString_ShouldThrowException() + { + // Act. + Assert.Catch(() => + { + m_Reader = new DbaseReader(string.Empty); + }); + } + + [Test] + public void Ctor_SendWhitespaceString_ShouldThrowException() + { + // Act. + Assert.Catch(() => + { + m_Reader = new DbaseReader(" \t "); + }); + } + + [Test] + public void Ctor_SendNonExistantPath_ShouldThrowException() + { + // Act. + Assert.Catch(() => + { + m_Reader = new DbaseReader(@"C:\this\is\sheker\path\should\never\exist\on\ur\pc"); + }); + } + + [Test] + public void Ctor_SendValidParameters_ShouldReturnNotNull() + { + // Arrange + m_TmpFile = new TempFileWriter(".dbf", DbfFiles.Read("line_ed50_geo")); + + // Act. + m_Reader = new DbaseReader(m_TmpFile.Path); + + // Assert. + Assert.IsNotNull(m_Reader); + } + + [Test] + public void ReadEntry_SendNegativeIndex_ShouldThrowException() + { + // Arrange + m_TmpFile = new TempFileWriter(".dbf", DbfFiles.Read("point_ed50_geo")); + m_Reader = new DbaseReader(m_TmpFile.Path); + + // Act. + Assert.Catch(() => + { + m_Reader.ReadEntry(-1); + }); + } + + [Test] + public void ReadEntry_SendOutOfBoundIndex_ShouldThrowException() + { + // Arrange + m_TmpFile = new TempFileWriter(".dbf", DbfFiles.Read("point_ed50_geo")); + m_Reader = new DbaseReader(m_TmpFile.Path); + + // Act. + Assert.Catch(() => + { + m_Reader.ReadEntry(3); + }); + } + + [Test] + public void ReadEntry_TryReadAfterDisposed_ShouldThrowException() + { + // Arrange + m_TmpFile = new TempFileWriter(".dbf", DbfFiles.Read("point_ed50_geo")); + m_Reader = new DbaseReader(m_TmpFile.Path); + + m_Reader.Dispose(); + + // Act. + Assert.Catch(() => + { + m_Reader.ReadEntry(1); + }); + } + + [Test] + public void ReadEntry_ReadEntryValues_ShoudReturnCorrectValues() + { + // Arrange + m_TmpFile = new TempFileWriter(".dbf", DbfFiles.Read("point_ed50_geo")); + m_Reader = new DbaseReader(m_TmpFile.Path); + + var expectedTable = new + { + Ids = new double[] + { + 3, 2, 1 + }, + Strings = new string[] + { + "str3", "str2", "str1" + }, + WholeNums = new double[] + { + 3, 2, 1 + }, + DecNums = new double[] + { + 3, 2, 1 + }, + }; + + // Act. + var results = new IAttributesTable[] + { + m_Reader.ReadEntry(0), + m_Reader.ReadEntry(1), + m_Reader.ReadEntry(2) + }; + + // Assert. + int currResIndex = 0; + foreach (var res in results) + { + object id = res["id"]; + object str = res["str"]; + object wholeNum = res["wholeNum"]; + object decNum = res["decNum"]; + object date = res["dt"]; + + Assert.IsNotNull(id); + Assert.IsNotNull(str); + Assert.IsNotNull(wholeNum); + Assert.IsNotNull(decNum); + Assert.IsNotNull(date); + + Assert.IsInstanceOf(id); + Assert.IsInstanceOf(str); + Assert.IsInstanceOf(wholeNum); + Assert.IsInstanceOf(decNum); + Assert.IsInstanceOf(date); + + Assert.AreEqual(id, expectedTable.Ids[currResIndex]); + Assert.AreEqual(str, expectedTable.Strings[currResIndex]); + Assert.AreEqual(wholeNum, expectedTable.WholeNums[currResIndex]); + Assert.AreEqual(decNum, expectedTable.DecNums[currResIndex]); + Assert.AreEqual(date, DATE_SAVED_IN_DBF); + + currResIndex++; + } + } + + [Test] + public void ReadEntry_ReadNonExistantKeyFromEntry_ShoudReturnCorrectValues() + { + // Arrange + m_TmpFile = new TempFileWriter(".dbf", DbfFiles.Read("point_ed50_geo")); + m_Reader = new DbaseReader(m_TmpFile.Path); + + var results = m_Reader.ReadEntry(0); + + // Act. + Assert.Catch(() => + { + object a = results["a"]; + }); + } + + [Test] + public void ForEachIteration_ReadEntryValues_ShoudReturnCorrectValues() + { + // Arrange + m_TmpFile = new TempFileWriter(".dbf", DbfFiles.Read("point_ed50_geo")); + m_Reader = new DbaseReader(m_TmpFile.Path); + + var expectedTable = new + { + Ids = new double[] + { + 3, 2, 1 + }, + Strings = new string[] + { + "str3", "str2", "str1" + }, + WholeNums = new double[] + { + 3, 2, 1 + }, + DecNums = new double[] + { + 3, 2, 1 + }, + }; + + // Act. + var results = m_Reader.ToArray(); + + Assert.AreEqual(results.Length, 3); + + // Assert. + int currResIndex = 0; + foreach (var res in results) + { + object id = res["id"]; + object str = res["str"]; + object wholeNum = res["wholeNum"]; + object decNum = res["decNum"]; + object date = res["dt"]; + + Assert.IsNotNull(id); + Assert.IsNotNull(str); + Assert.IsNotNull(wholeNum); + Assert.IsNotNull(decNum); + Assert.IsNotNull(date); + + Assert.IsInstanceOf(id); + Assert.IsInstanceOf(str); + Assert.IsInstanceOf(wholeNum); + Assert.IsInstanceOf(decNum); + Assert.IsInstanceOf(date); + + Assert.AreEqual(id, expectedTable.Ids[currResIndex]); + Assert.AreEqual(str, expectedTable.Strings[currResIndex]); + Assert.AreEqual(wholeNum, expectedTable.WholeNums[currResIndex]); + Assert.AreEqual(decNum, expectedTable.DecNums[currResIndex]); + Assert.AreEqual(date, DATE_SAVED_IN_DBF); + + currResIndex++; + } + } + + [TearDown] + public void TestCleanup() + { + if (m_Reader != null) + { + m_Reader.Dispose(); + m_Reader = null; + } + + if (m_TmpFile != null) + { + m_TmpFile.Dispose(); + m_TmpFile = null; + } + } + } + + static class DbfFiles + { + public static byte[] Read(string filename) + { + string file = Path.ChangeExtension(filename, ".dbf"); + string path = Path.Combine(CommonHelpers.TestShapefilesDirectory, file); + Assert.That(File.Exists(path), Is.True); + return File.ReadAllBytes(path); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/HelperMethods.cs b/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/HelperMethods.cs new file mode 100644 index 0000000..5fc2e35 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/HelperMethods.cs @@ -0,0 +1,71 @@ +using System; +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Handlers; +using Assert = NUnit.Framework.Assert; + +namespace NetTopologySuite.IO.Tests.ShapeFile.Extended +{ + public static class HelperMethods + { + private static readonly double REQUIRED_PRECISION = Math.Pow(10, -9); + + public static void AssertEnvelopesEqual(Envelope env1, Envelope env2) + { + AssertEnvelopesEqual(env1, env2, REQUIRED_PRECISION); + } + + public static void AssertEnvelopesEqual(Envelope env1, Envelope env2, double requiredPrecision, string errorMessage = "") + { + AssertDoubleValuesEqual(env1.MaxX, env2.MaxX, requiredPrecision, errorMessage); + AssertDoubleValuesEqual(env1.MaxY, env2.MaxY, requiredPrecision, errorMessage); + AssertDoubleValuesEqual(env1.MinX, env2.MinX, requiredPrecision, errorMessage); + AssertDoubleValuesEqual(env1.MinY, env2.MinY, requiredPrecision, errorMessage); + } + + public static void AssertPolygonsEqual(Polygon poly1, Polygon poly2) + { + Assert.IsNotNull(poly1); + Assert.IsNotNull(poly2); + + LineString line1 = poly1.Shell; + LineString line2 = poly2.Shell; + + Assert.AreEqual(line1.Coordinates.Length, line2.Coordinates.Length, "Number of coordinates between polygons doesn't match"); + + for (int i = 0; i < line2.Coordinates.Length; i++) + { + AssertCoordinatesEqual(line2.Coordinates[i], line1.Coordinates[i]); + } + } + + public static void AssertCoordinatesEqual(Coordinate coord1, Coordinate coord2) + { + AssertDoubleValuesEqual(coord1.X, coord2.X); + AssertDoubleValuesEqual(coord1.Y, coord2.Y); + } + + public static void AssertDoubleValuesEqual(double num1, double num2) + { + AssertDoubleValuesEqual(num1, num2, REQUIRED_PRECISION); + } + + public static void AssertDoubleValuesEqual(double num1, double num2, double requiredPrecision, string errorMessage = "") + { + if (string.IsNullOrWhiteSpace(errorMessage)) + { + Assert.AreEqual(num1, num2, requiredPrecision); + } + else + { + Assert.AreEqual(num1, num2, requiredPrecision, errorMessage); + } + } + + public static void AssertMBRInfoEqual(MBRInfo info1, MBRInfo info2) + { + Assert.AreEqual(info1.ShapeFileDetails.OffsetFromStartOfFile, info2.ShapeFileDetails.OffsetFromStartOfFile); + Assert.AreEqual(info1.ShapeFileDetails.ShapeIndex, info2.ShapeFileDetails.ShapeIndex); + AssertEnvelopesEqual(info1.ShapeMBR, info2.ShapeMBR); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/ShapeDataReaderTests.cs b/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/ShapeDataReaderTests.cs new file mode 100644 index 0000000..26dea7b --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/ShapeDataReaderTests.cs @@ -0,0 +1,656 @@ +using System; +using System.IO; +using System.Linq; +using NetTopologySuite.Geometries; +using NetTopologySuite.Index.Strtree; +using NetTopologySuite.IO.Handlers; +using NetTopologySuite.IO.ShapeFile.Extended; +using NetTopologySuite.IO.ShapeFile.Extended.Entities; +using NetTopologySuite.IO.ShapeFile.Test; +using NUnit.Framework; + +namespace NetTopologySuite.IO.Tests.ShapeFile.Extended +{ + [TestFixture] + public class ShapeDataReaderTests + { + private TempFileWriter[] m_TempFiles; + private ShapeDataReader m_shapeDataReader; + + [Test] + public void Ctor_SendNullPath_ShouldThrowException() + { + // Act. + Assert.Catch(() => + { + m_shapeDataReader = new ShapeDataReader((string)null); + }); + } + + [Test] + public void Ctor_SendEmptyPath_ShouldThrowException() + { + // Act. + Assert.Catch(() => + { + m_shapeDataReader = new ShapeDataReader(string.Empty); + }); + } + + [Test] + public void Ctor_SendWhitespacePath_ShouldThrowException() + { + // Act. + Assert.Catch(() => + { + m_shapeDataReader = new ShapeDataReader(" \t "); + }); + } + + [Test] + public void Ctor_SendNonExistantFilePath_ShouldThrowException() + { + // Act. + Assert.Catch(() => + { + m_shapeDataReader = new ShapeDataReader(@"C:\this\is\sheker\path\should\never\exist\on\ur\pc"); + }); + } + + [Test] + public void Ctor_SendShpWithNoDbf_ShouldThrowException() + { + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + }; + + // Act. + Assert.Catch(() => + { + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + }); + } + + [Test] + public void Ctor_SendNullSpatialIndex_ShouldThrowException() + { + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + // Act. + Assert.Catch(() => + { + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path, null); + }); + } + + [Test] + public void Ctor_SendNullGeometryFactory_ShouldThrowException() + { + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + // Act. + Assert.Catch(() => + { + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path, new STRtree(), null); + }); + } + + [Test] + public void Ctor_SendShpWithNoPrj_ShouldReturnNotNull() + { + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + // Act. + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + Assert.IsNotNull(m_shapeDataReader); + } + + [Test] + public void Ctor_SetAsyncIndexToTrue_ShouldReturnNotNull() + { + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")) + }; + + // Act. + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path, new STRtree(), new GeometryFactory(), true); + + // Assert. + Assert.IsNotNull(m_shapeDataReader); + } + + [Test] + public void Ctor_SetAsyncIndexToFalse_ShouldReturnNotNull() + { + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + // Act. + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path, new STRtree(), new GeometryFactory(), false); + + // Assert. + Assert.IsNotNull(m_shapeDataReader); + } + + [Test] + public void ShapeFileBounds_ReadPointED50Geo_ShouldReturnCorrectEnvelope() + { + // Arrange. + var expectedMBR = new Envelope(34.14526022208882, 34.28293070132935, 31.85116738930965, 31.92063218020455); + + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("point_ed50_geo")), + new TempFileWriter(".dbf", DbfFiles.Read("point_ed50_geo")), + }; + + // Act. + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Assert. + HelperMethods.AssertEnvelopesEqual(expectedMBR, m_shapeDataReader.ShapefileBounds); + } + + [Test] + public void ReadByGeoFilter_ReadAllInBounds_ShouldReturnAllShapesAndCorrectDbfData() + { + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + Polygon[] expectedResult = new Polygon[] + { + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })), + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + })) + }; + + string[] expectedShapeMetadata = new string[] { "Rectangle", "Triangle" }; + + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Act. + var results = m_shapeDataReader.ReadByMBRFilter(m_shapeDataReader.ShapefileBounds); + + // Assert. + Assert.IsNotNull(results); + + int currIndex = 0; + foreach (var result in results) + { + Assert.IsNotNull(result); + Assert.IsInstanceOf(result); + var sf = (ShapefileFeature)result; + Assert.AreEqual(sf.FeatureId, currIndex); + Assert.IsNotNull(result.Attributes); + + HelperMethods.AssertPolygonsEqual(result.Geometry as Polygon, expectedResult[currIndex]); + + object shapeNameData = result.Attributes["ShapeName"]; + Assert.IsInstanceOf(shapeNameData); + + Assert.AreEqual((string)shapeNameData, expectedShapeMetadata[currIndex++]); + } + } + + [Test] + public void ReadByGeoFilter_ReadWithWholeTriangleInBounds_ShouldReturnTriangle() + { + var boundsWithWholeTriangle = new Envelope(-0.62331, 0.63774, -0.02304, 0.76942); + + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + var expectedTriangle = new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + })); + + string expectedShapeMetadata = "Triangle"; + + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Act. + var results = m_shapeDataReader.ReadByMBRFilter(boundsWithWholeTriangle); + + // Assert. + Assert.IsNotNull(results); + + var result = results.Single(); + + Assert.IsNotNull(result); + Assert.IsInstanceOf(result); + Assert.AreEqual(((ShapefileFeature)result).FeatureId, 1); + Assert.IsNotNull(result.Attributes); + + HelperMethods.AssertPolygonsEqual(result.Geometry as Polygon, expectedTriangle); + + object shapeNameData = result.Attributes["ShapeName"]; + Assert.IsInstanceOf(shapeNameData); + + Assert.AreEqual((string)shapeNameData, expectedShapeMetadata); + } + + [Test] + public void ReadByGeoFilter_ReadWithWholeRectangleInBounds_ShouldReturnRectangle() + { + var boundsWithWholeTriangle = new Envelope(-1.39510, -0.12716, -1.13938, -0.22977); + + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + var expectedTriangle = new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })); + + string expectedShapeMetadata = "Rectangle"; + + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Act. + var results = m_shapeDataReader.ReadByMBRFilter(boundsWithWholeTriangle); + + // Assert. + Assert.IsNotNull(results); + + var result = results.Single(); + + Assert.IsNotNull(result); + Assert.IsInstanceOf(result); + Assert.AreEqual(((ShapefileFeature)result).FeatureId, 0); + Assert.IsNotNull(result.Attributes); + + HelperMethods.AssertPolygonsEqual(result.Geometry as Polygon, expectedTriangle); + + object shapeNameData = result.Attributes["ShapeName"]; + Assert.IsInstanceOf(shapeNameData); + + Assert.AreEqual((string)shapeNameData, expectedShapeMetadata); + } + + [Test] + public void ReadByGeoFilter_ReadWithWholeRectangleInBoundsAndFlagSetToTrue_ShouldReturnRectangle() + { + var boundsWithWholeTriangle = new Envelope(-1.39510, -0.12716, -1.13938, -0.22977); + + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + var expectedTriangle = new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })); + + string expectedShapeMetadata = "Rectangle"; + + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Act. + var results = m_shapeDataReader.ReadByMBRFilter(boundsWithWholeTriangle, true); + + // Assert. + Assert.IsNotNull(results); + + var result = results.Single(); + + Assert.IsNotNull(result); + Assert.IsInstanceOf(result); + Assert.AreEqual(((ShapefileFeature)result).FeatureId, 0); + Assert.IsNotNull(result.Attributes); + + HelperMethods.AssertPolygonsEqual(result.Geometry as Polygon, expectedTriangle); + + object shapeNameData = result.Attributes["ShapeName"]; + Assert.IsInstanceOf(shapeNameData); + + Assert.AreEqual((string)shapeNameData, expectedShapeMetadata); + } + + [Test] + public void ReadByGeoFilter_ReadWithRectanglePartiallyInBounds_ShouldReturnRectangle() + { + var boundsWithWholeTriangle = new Envelope(-0.93340, -0.38902, -0.73281, -0.29179); + + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + var expectedTriangle = new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })); + + string expectedShapeMetadata = "Rectangle"; + + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Act. + var results = m_shapeDataReader.ReadByMBRFilter(boundsWithWholeTriangle); + + // Assert. + Assert.IsNotNull(results); + + var result = results.Single(); + + Assert.IsNotNull(result); + Assert.IsInstanceOf(result); + Assert.AreEqual(((ShapefileFeature)result).FeatureId, 0); + Assert.IsNotNull(result.Attributes); + + HelperMethods.AssertPolygonsEqual(result.Geometry as Polygon, expectedTriangle); + + object shapeNameData = result.Attributes["ShapeName"]; + Assert.IsInstanceOf(shapeNameData); + + Assert.AreEqual((string)shapeNameData, expectedShapeMetadata); + } + + [Test] + public void ReadByGeoFilter_ReadWithRectanglePartiallyInBoundsAndFlagSetToTrue_ShouldReturnRectangle() + { + var boundsWithWholeTriangle = new Envelope(-0.93340, -0.38902, -0.73281, -0.29179); + + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + var expectedTriangle = new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })); + + string expectedShapeMetadata = "Rectangle"; + + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Act. + var results = m_shapeDataReader.ReadByMBRFilter(boundsWithWholeTriangle, true); + + // Assert. + Assert.IsNotNull(results); + + var result = results.Single(); + + Assert.IsNotNull(result); + Assert.IsInstanceOf(result); + Assert.AreEqual(((ShapefileFeature)result).FeatureId, 0); + Assert.IsNotNull(result.Attributes); + + HelperMethods.AssertPolygonsEqual(result.Geometry as Polygon, expectedTriangle); + + object shapeNameData = result.Attributes["ShapeName"]; + Assert.IsInstanceOf(shapeNameData); + + Assert.AreEqual((string)shapeNameData, expectedShapeMetadata); + } + + [Test] + public void ReadByGeoFilter_ReadWithRectangleMBRPartiallyInBoundsAndFlagSetToTrue_ShouldReturnNoGeometries() + { + var boundsWithWholeTriangle = new Envelope(-1.17459, -1.00231, -1.09803, -0.80861); + + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Act. + var results = m_shapeDataReader.ReadByMBRFilter(boundsWithWholeTriangle, true); + + // Assert. + Assert.IsNotNull(results); + Assert.IsFalse(results.Any()); + } + + // TODO: Don't know how bad it is that this tests passes. + // I give it as a parameter a rectangle that partially intersects only with the MBR of the + // shape, and doesn't intersect with the shape itself at all. + // It only works because the default index is RTree, use a different index? + [Test] + public void ReadByGeoFilter_ReadWithRectangleMBRPartiallyInBounds_ShouldReturnRectangle() + { + var boundsWithWholeTriangle = new Envelope(-1.17459, -1.00231, -1.09803, -0.80861); + + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + var expectedTriangle = new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })); + + string expectedShapeMetadata = "Rectangle"; + + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Act. + var results = m_shapeDataReader.ReadByMBRFilter(boundsWithWholeTriangle); + + // Assert. + Assert.IsNotNull(results); + + var result = results.Single(); + + Assert.IsNotNull(result); + Assert.IsInstanceOf(result); + Assert.AreEqual(((ShapefileFeature)result).FeatureId, 0); + Assert.IsNotNull(result.Attributes); + + HelperMethods.AssertPolygonsEqual(result.Geometry as Polygon, expectedTriangle); + + object shapeNameData = result.Attributes["ShapeName"]; + Assert.IsInstanceOf(shapeNameData); + + Assert.AreEqual((string)shapeNameData, expectedShapeMetadata); + } + + [Test] + public void ReadByGeoFilter_ReadWithNoShapeInBounds_ShouldReturnEmptyEnumerable() + { + var boundsWithWholeTriangle = new Envelope(-1.17459, -1.00231, -1.09803, -1.5); + + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Act. + var results = m_shapeDataReader.ReadByMBRFilter(boundsWithWholeTriangle); + + // Assert. + Assert.IsNotNull(results); + Assert.IsFalse(results.Any()); + } + + [Test] + public void ReadByGeoFilter_ReadWithNoShapeInBoundsAndFlagSetToTrue_ShouldReturnEmptyEnumerable() + { + var boundsWithWholeTriangle = new Envelope(-1.17459, -1.00231, -1.09803, -1.5); + + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Act. + var results = m_shapeDataReader.ReadByMBRFilter(boundsWithWholeTriangle, true); + + // Assert. + Assert.IsNotNull(results); + Assert.IsFalse(results.Any()); + } + + [Test, ShapeFileIssueNumber(27)] + public void ReadByGeoFilter_ReadDbfDataAfterReaderObjectDisposed_ShouldNotThrowException() + { + var boundsWithWholeTriangle = new Envelope(-1.17459, -1.00231, -1.09803, -0.80861); + + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Act. + var results = m_shapeDataReader.ReadByMBRFilter(boundsWithWholeTriangle); + + // Assert. + Assert.IsNotNull(results); + var result = results.Single(); + + // Dispose of the reader object. + m_shapeDataReader.Dispose(); + + // Try reading data. + Assert.IsNotNull(result.Attributes); + } + + [Test, ShapeFileIssueNumber(27)] + public void ReadByGeoFilter_ReadShapeDataAfterReaderObjectDisposed_ShouldNotThrowException() + { + var boundsWithWholeTriangle = new Envelope(-1.17459, -1.00231, -1.09803, -0.80861); + + // Arrange. + m_TempFiles = new TempFileWriter[] + { + new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")), + new TempFileWriter(".dbf", DbfFiles.Read("UnifiedChecksMaterial")), + }; + + m_shapeDataReader = new ShapeDataReader(m_TempFiles[0].Path); + + // Act. + var results = m_shapeDataReader.ReadByMBRFilter(boundsWithWholeTriangle); + + // Assert. + Assert.IsNotNull(results); + var result = results.Single(); + + // Dispose of the reader object. + m_shapeDataReader.Dispose(); + + // Try reading data. + Assert.IsNotNull(result.Geometry); + } + + [TearDown] + public void TestCleanup() + { + if (m_shapeDataReader != null) + { + m_shapeDataReader.Dispose(); + m_shapeDataReader = null; + } + + if (m_TempFiles != null) + { + foreach (var tempFile in m_TempFiles) + { + tempFile.Dispose(); + } + + m_TempFiles = null; + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/ShapeReaderTests.cs b/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/ShapeReaderTests.cs new file mode 100644 index 0000000..75ca282 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/ShapeReaderTests.cs @@ -0,0 +1,1254 @@ +using System; +using System.IO; +using System.Linq; +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Handlers; +using NUnit.Framework; + +namespace NetTopologySuite.IO.Tests.ShapeFile.Extended +{ + [TestFixture] + public class ShapeReaderTests + { + private IO.ShapeFile.Extended.ShapeReader m_Reader; + private TempFileWriter m_TmpFile; + + [Test] + public void Ctor_SendNullPath_ShouldThrowException() + { + // Act. + Assert.Catch(() => + { + new IO.ShapeFile.Extended.ShapeReader((string)null); + }); + } + + [Test] + public void Ctor_SendEmptyPath_ShouldThrowException() + { + // Act. + Assert.Catch(() => + { + new IO.ShapeFile.Extended.ShapeReader(string.Empty); + }); + } + + [Test] + public void Ctor_SendWhitespacePath_ShouldThrowException() + { + // Act. + Assert.Catch(() => + { + new IO.ShapeFile.Extended.ShapeReader(" \t "); + }); + } + + [Test] + public void Ctor_SendNonExistantFilePath_ShouldThrowException() + { + // Act. + Assert.Catch(() => + { + new IO.ShapeFile.Extended.ShapeReader(@"C:\this\is\sheker\path\should\never\exist\on\ur\pc"); + }); + } + + [Test] + public void Ctor_SendValidParameters_ShouldReturnNotNull() + { + // Arrange + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("line_ed50_geo")); + + // Act. + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + // Assert. + Assert.IsNotNull(m_Reader); + } + + [Test] + public void FileHeader_ReadPoint_ShouldReturnCorrectValues() + { + // Arrange. + var expectedMBR = new Envelope(34.14526022208882, 34.28293070132935, 31.85116738930965, 31.92063218020455); + + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("point_ed50_geo")); + + // Act. + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + // Assert. + Assert.IsNotNull(m_Reader); + Assert.IsNotNull(m_Reader.ShapefileHeader); + Assert.AreEqual(m_Reader.ShapefileHeader.ShapeType, ShapeGeometryType.Point); + HelperMethods.AssertEnvelopesEqual(m_Reader.ShapefileHeader.Bounds, expectedMBR); + } + + [Test] + public void FileHeader_ReadLine_ShouldReturnCorrectValues() + { + // Arrange. + var expectedMBR = new Envelope(639384.5630270261, 662946.9241196744, 3505730.839052265, 3515879.236960234); + + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("line_ed50_utm36")); + + // Act. + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + // Assert. + Assert.IsNotNull(m_Reader); + Assert.IsNotNull(m_Reader.ShapefileHeader); + Assert.AreEqual(m_Reader.ShapefileHeader.ShapeType, ShapeGeometryType.LineString); + HelperMethods.AssertEnvelopesEqual(m_Reader.ShapefileHeader.Bounds, expectedMBR); + } + + [Test] + public void FileHeader_ReadPolygon_ShouldReturnCorrectValues() + { + // Arrange. + var expectedMBR = new Envelope(33.47383821246188, 33.75452922072821, 32.0295864794076, 32.1886342399706); + + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("polygon_wgs84_geo")); + + // Act. + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + // Assert. + Assert.IsNotNull(m_Reader); + Assert.IsNotNull(m_Reader.ShapefileHeader); + Assert.AreEqual(m_Reader.ShapefileHeader.ShapeType, ShapeGeometryType.Polygon); + HelperMethods.AssertEnvelopesEqual(m_Reader.ShapefileHeader.Bounds, expectedMBR); + } + + [Test] + public void ReadMBRs_ReadPoint_ShouldReturnCorrectValues() + { + // Arrange. + MBRInfo[] infos = null; + + var expectedInfos = new[] + { + new MBRInfo(new Envelope(new Coordinate(34.282930701329349, 31.851167389309651)), + 100, + 0), + new MBRInfo(new Envelope(new Coordinate(34.145260222088822, 31.864369159253059)), + 128, + 1), + new MBRInfo(new Envelope(new Coordinate(34.181721116813314, 31.920632180204553)), + 156, + 2), + }; + + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("point_ed50_geo")); + + // Act. + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + infos = m_Reader.ReadMBRs().ToArray(); + + // Assert. + Assert.IsNotNull(infos); + Assert.AreEqual(3, infos.Length); + + int currIndex = 0; + + foreach (var expectedInfo in expectedInfos) + { + HelperMethods.AssertMBRInfoEqual(expectedInfo, infos[currIndex++]); + } + } + + [Test] + public void ReadMBRs_ReadUnifiedWithNullAtStart_ShouldReturnCorrectValues() + { + // Arrange. + MBRInfo[] infos = null; + + var expectedInfos = new[] + { + new MBRInfo(new Envelope(-1.151515151515152, -0.353535353535354, -0.929292929292929, -0.419191919191919), + 112, + 1), + new MBRInfo(new Envelope(-0.457070707070707, 0.421717171717172, 0.070707070707071, 0.578282828282829), + 248, + 2), + }; + + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterialNullAtStart")); + + // Act. + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + infos = m_Reader.ReadMBRs().ToArray(); + + // Assert. + Assert.IsNotNull(infos); + Assert.AreEqual(expectedInfos.Length, infos.Length); + + int currIndex = 0; + + foreach (var expectedInfo in expectedInfos) + { + HelperMethods.AssertMBRInfoEqual(expectedInfo, infos[currIndex++]); + } + } + + [Test] + public void ReadMBRs_ReadUnifiedWithNullInMiddle_ShouldReturnCorrectValues() + { + // Arrange. + MBRInfo[] infos = null; + + var expectedInfos = new[] + { + new MBRInfo(new Envelope(-1.151515151515152, -0.353535353535354, -0.929292929292929, -0.419191919191919), + 100, + 0), + new MBRInfo(new Envelope(-0.457070707070707, 0.421717171717172, 0.070707070707071, 0.578282828282829), + 248, + 2), + }; + + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterialNullInMiddle")); + + // Act. + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + infos = m_Reader.ReadMBRs().ToArray(); + + // Assert. + Assert.IsNotNull(infos); + Assert.AreEqual(expectedInfos.Length, infos.Length); + + int currIndex = 0; + + foreach (var expectedInfo in expectedInfos) + { + HelperMethods.AssertMBRInfoEqual(expectedInfo, infos[currIndex++]); + } + } + + [Test] + public void ReadMBRs_ReadUnifiedWithNullAtEnd_ShouldReturnCorrectValues() + { + // Arrange. + MBRInfo[] infos = null; + + var expectedInfos = new[] + { + new MBRInfo(new Envelope(-1.151515151515152, -0.353535353535354, -0.929292929292929, -0.419191919191919), + 100, + 0), + new MBRInfo(new Envelope(-0.457070707070707, 0.421717171717172, 0.070707070707071, 0.578282828282829), + 236, + 1), + }; + + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterialNullAtEnd")); + + // Act. + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + infos = m_Reader.ReadMBRs().ToArray(); + + // Assert. + Assert.IsNotNull(infos); + Assert.AreEqual(expectedInfos.Length, infos.Length); + + int currIndex = 0; + + foreach (var expectedInfo in expectedInfos) + { + HelperMethods.AssertMBRInfoEqual(expectedInfo, infos[currIndex++]); + } + } + + [Test] + public void ReadMBRs_ReadLine_ShouldReturnCorrectValues() + { + // Arrange. + MBRInfo[] infos = null; + + var expectedInfos = new[] + { + new MBRInfo(new Envelope(34.573027972716453, 34.628034609274806, 31.803273460424684, 31.895998933480186), + 100, + 0), + new MBRInfo(new Envelope(34.396692412092257, 34.518021336158107, 31.778756216701534, 31.864880893370035), + 236, + 1), + }; + + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("line_wgs84_geo")); + + // Act. + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + infos = m_Reader.ReadMBRs().ToArray(); + + // Assert. + Assert.IsNotNull(infos); + Assert.AreEqual(2, infos.Length); + + int currIndex = 0; + + foreach (var expectedInfo in expectedInfos) + { + HelperMethods.AssertMBRInfoEqual(expectedInfo, infos[currIndex++]); + } + } + + [Test] + public void ReadMBRs_ReadPolygon_ShouldReturnCorrectValues() + { + // Arrange. + MBRInfo[] infos = null; + + var expectedInfos = new[] + { + new MBRInfo(new Envelope(33.719047819505683, 33.78096814177016, 31.928805665809271, 32.025301664150398), + 100, + 0), + new MBRInfo(new Envelope(33.819000337359398, 33.929011051318348, 31.97406740944362, 32.072449163771559), + 252, + 1), + }; + + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("polygon_ed50_geo")); + + // Act. + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + infos = m_Reader.ReadMBRs().ToArray(); + + // Assert. + Assert.IsNotNull(infos); + Assert.AreEqual(2, infos.Length); + + int currIndex = 0; + + foreach (var expectedInfo in expectedInfos) + { + HelperMethods.AssertMBRInfoEqual(expectedInfo, infos[currIndex++]); + } + } + + [Test] + public void ReadShapeAtOffset_SendNegativeOffset_shouldThrowException() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("polygon intersecting line")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + // Act. + Assert.Catch(() => + { + m_Reader.ReadShapeAtOffset(-1, factory); + }); + } + + [Test] + public void ReadShapeAtOffset_SendOffsetAtEndOfFile_shouldThrowException() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("polygon intersecting line")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + // Act. + Assert.Catch(() => + { + m_Reader.ReadShapeAtOffset(ShpFiles.Read("polygon intersecting line").Length, factory); + }); + } + + [Test] + public void ReadShapeAtOffset_ReadPolygonWithIntersectingLine_shouldReturnInvalidGeo() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("polygon intersecting line")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + long[] shapeOffsets = { 100, 236 }; + + bool[] expectedValidityResults = new bool[] { false, true }; + + // Act. + for (int i = 0; i < shapeOffsets.Length; i++) + { + var geo = m_Reader.ReadShapeAtOffset(shapeOffsets[i], factory); + + // Assert. + Assert.IsNotNull(geo); + Assert.AreEqual(geo.IsValid, expectedValidityResults[i]); + } + } + + [Test] + public void ReadShapeAtOffset_ReadPoint_shouldReturnCorrectValue() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("point_ed50_geo")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + long[] shapeOffsets = { 100, 128, 156 }; + + double[,] expectedCoordinates = {{ 34.282930701329349, 31.851167389309651 }, + { 34.145260222088822, 31.864369159253059 }, + { 34.181721116813314, 31.920632180204553 }}; + + // Act. + for (int i = 0; i < shapeOffsets.Length; i++) + { + var geo = m_Reader.ReadShapeAtOffset(shapeOffsets[i], factory); + + // Assert. + Assert.IsNotNull(geo); + Assert.IsTrue(geo.IsValid); + Assert.IsInstanceOf(geo); + var givenPoint = geo as Point; + + HelperMethods.AssertDoubleValuesEqual(givenPoint.X, expectedCoordinates[i, 0]); + HelperMethods.AssertDoubleValuesEqual(givenPoint.Y, expectedCoordinates[i, 1]); + } + } + + [Test] + public void ReadShapeAtOffset_ReadLines_shouldReturnCorrectValue() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("line_wgs84_geo")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + long[] shapeOffsets = { 100, 236 }; + + var expectedLines = new Coordinate[,] + { + { + new Coordinate(34.574599590903837, 31.884368958893564), + new Coordinate(34.57648553272869, 31.803273460424684), + new Coordinate(34.628034609274806, 31.875882220681703), + new Coordinate(34.573027972716453, 31.895998933480186), + new Coordinate(34.582143358203268, 31.886883547993374) + }, + { + new Coordinate(34.448555812275849, 31.864880893370035), + new Coordinate(34.396692412092257, 31.778756216701534), + new Coordinate(34.468672525074325, 31.794158074937872), + new Coordinate(34.484703030585621, 31.844135533296601), + new Coordinate(34.518021336158107, 31.838163384184551) + } + }; + + // Act. + for (int i = 0; i < shapeOffsets.Length; i++) + { + var geo = m_Reader.ReadShapeAtOffset(shapeOffsets[i], factory); + + // Assert. + Assert.IsNotNull(geo); + Assert.IsTrue(geo.IsValid); + Assert.IsInstanceOf(geo); + var givenLine = geo as LineString; + + for (int j = 0; j < givenLine.Coordinates.Length; j++) + { + var currPoint = givenLine.Coordinates[j]; + + HelperMethods.AssertDoubleValuesEqual(currPoint.X, expectedLines[i, j].X); + HelperMethods.AssertDoubleValuesEqual(currPoint.Y, expectedLines[i, j].Y); + } + } + } + + [Test] + public void ReadShapeAtOffset_ReadPolygon_shouldReturnCorrectValue() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("polygon_ed50_geo")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + long[] shapeOffsets = { 100, 252 }; + + var expectedLines = new Coordinate[,] + { + { + new Coordinate(33.719047819505683, 31.989469320254013), + new Coordinate(33.730049025918099, 32.025301664150398), + new Coordinate(33.771538712027194, 32.008956957757299), + new Coordinate(33.78096814177016, 31.993555297099103), + new Coordinate(33.744507207486457, 31.928805665809271), + new Coordinate(33.719047819505683, 31.989469320254013) + }, + { + new Coordinate(33.821829475819285, 32.051075573685317), + new Coordinate(33.860176141775888, 32.072449163771559), + new Coordinate(33.927125440097875, 32.054847113210094), + new Coordinate(33.929011051318348, 31.97878189417845), + new Coordinate(33.819000337359398, 31.97406740944362), + new Coordinate(33.821829475819285, 32.051075573685317) + } + }; + + // Act. + for (int i = 0; i < shapeOffsets.Length; i++) + { + var geo = m_Reader.ReadShapeAtOffset(shapeOffsets[i], factory); + + // Assert. + Assert.IsNotNull(geo); + Assert.IsTrue(geo.IsValid); + Assert.IsInstanceOf(geo); + var givenPoly = geo as Polygon; + + Assert.IsNotNull(givenPoly.ExteriorRing); + Assert.AreSame(givenPoly.ExteriorRing, givenPoly.Shell); + Assert.AreEqual(givenPoly.Shell.Coordinates.Length, expectedLines.GetLength(1)); + + LineString givenLine = givenPoly.Shell; + + for (int j = 0; j < givenLine.Coordinates.Length; j++) + { + var currPoint = givenLine.Coordinates[j]; + + HelperMethods.AssertDoubleValuesEqual(currPoint.X, expectedLines[i, j].X); + HelperMethods.AssertDoubleValuesEqual(currPoint.Y, expectedLines[i, j].Y); + } + } + } + + [Test] + public void ReadShapeAtOffset_ReadAllPolygonsFromUnifiedWithNullAtStart_ShouldReturnCorrectValues() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterialNullAtStart")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + var expectedResult = new Coordinate[][] + { + new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + }, + new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + } + }; + long[] offsets = { 112, 248 }; + + // Act. + for (int i = 0; i < offsets.Length; i++) + { + var geo = m_Reader.ReadShapeAtOffset(offsets[i], factory); + + // Assert. + Assert.IsNotNull(geo); + Assert.IsTrue(geo.IsValid); + Assert.IsInstanceOf(geo); + var givenPoly = geo as Polygon; + + Assert.IsNotNull(givenPoly.ExteriorRing); + Assert.AreSame(givenPoly.ExteriorRing, givenPoly.Shell); + Assert.AreEqual(givenPoly.Shell.Coordinates.Length, expectedResult[i].Length); + + LineString givenLine = givenPoly.Shell; + + for (int j = 0; j < givenLine.Coordinates.Length; j++) + { + var currPoint = givenLine.Coordinates[j]; + + HelperMethods.AssertDoubleValuesEqual(currPoint.X, expectedResult[i][j].X); + HelperMethods.AssertDoubleValuesEqual(currPoint.Y, expectedResult[i][j].Y); + } + } + } + + [Test] + public void ReadShapeAtOffset_ReadAllPolygonsFromUnifiedWithNullInMiddle_ShouldReturnCorrectValues() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterialNullInMiddle")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + var expectedResult = new Coordinate[][] + { + new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + }, + new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + } + }; + long[] offsets = { 100, 248 }; + + // Act. + for (int i = 0; i < offsets.Length; i++) + { + var geo = m_Reader.ReadShapeAtOffset(offsets[i], factory); + + // Assert. + Assert.IsNotNull(geo); + Assert.IsTrue(geo.IsValid); + Assert.IsInstanceOf(geo); + var givenPoly = geo as Polygon; + + Assert.IsNotNull(givenPoly.ExteriorRing); + Assert.AreSame(givenPoly.ExteriorRing, givenPoly.Shell); + Assert.AreEqual(givenPoly.Shell.Coordinates.Length, expectedResult[i].Length); + + LineString givenLine = givenPoly.Shell; + + for (int j = 0; j < givenLine.Coordinates.Length; j++) + { + var currPoint = givenLine.Coordinates[j]; + + HelperMethods.AssertDoubleValuesEqual(currPoint.X, expectedResult[i][j].X); + HelperMethods.AssertDoubleValuesEqual(currPoint.Y, expectedResult[i][j].Y); + } + } + } + + [Test] + public void ReadShapeAtOffset_ReadAllPolygonsFromUnifiedWithNullAtEnd_ShouldReturnCorrectValues() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterialNullAtEnd")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + var expectedResult = new Coordinate[][] + { + new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + }, + new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + } + }; + long[] offsets = { 100, 236 }; + + // Act. + for (int i = 0; i < offsets.Length; i++) + { + var geo = m_Reader.ReadShapeAtOffset(offsets[i], factory); + + // Assert. + Assert.IsNotNull(geo); + Assert.IsTrue(geo.IsValid); + Assert.IsInstanceOf(geo); + var givenPoly = geo as Polygon; + + Assert.IsNotNull(givenPoly.ExteriorRing); + Assert.AreSame(givenPoly.ExteriorRing, givenPoly.Shell); + Assert.AreEqual(givenPoly.Shell.Coordinates.Length, expectedResult[i].Length); + + LineString givenLine = givenPoly.Shell; + + for (int j = 0; j < givenLine.Coordinates.Length; j++) + { + var currPoint = givenLine.Coordinates[j]; + + HelperMethods.AssertDoubleValuesEqual(currPoint.X, expectedResult[i][j].X); + HelperMethods.AssertDoubleValuesEqual(currPoint.Y, expectedResult[i][j].Y); + } + } + } + + [Test] + public void ReadShapeAtOffset_TryReadAfterDisposed_shouldThrowException() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("line_wgs84_geo")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + m_Reader.Dispose(); + Assert.Catch(() => + { + m_Reader.ReadShapeAtOffset(108, factory); + }); + } + + [Test] + public void ReadAllShapes_SendNullFactory_ShouldThrowException() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + // Act. + Assert.Catch(() => + { + m_Reader.ReadAllShapes(null); + }); + } + + [Test] + public void ReadAllShapes_ReadEmptyShapeFile_ShouldReturnEmptyEnumerable() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("EmptyShapeFile")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + // Act. + var geos = m_Reader.ReadAllShapes(factory); + + // Assert. + Assert.IsNotNull(geos); + Assert.IsFalse(geos.Any()); + } + + [Test] + public void ReadAllShapes_ReadPointZM_ShouldReturnCorrectValues() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("shape_PointZM")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + double errorMargin = Math.Pow(10, -6); + + double[,] expectedValues = {{-11348202.6085706, 4503476.68482375}, + {-601708.888562033, 3537065.37906758}, + {-7366588.02885523, -637831.461799072}}; + + // Act. + var shapes = m_Reader.ReadAllShapes(factory); + + // Assert. + Assert.IsNotNull(shapes); + var shapesArr = shapes.ToArray(); + Assert.AreEqual(shapesArr.Length, 3); + + for (int i = 0; i < shapesArr.Length; i++) + { + Assert.IsInstanceOf(shapesArr[i]); + var currPoint = shapesArr[i] as Point; + HelperMethods.AssertDoubleValuesEqual(currPoint.X, expectedValues[i, 0], errorMargin); + HelperMethods.AssertDoubleValuesEqual(currPoint.Y, expectedValues[i, 1], errorMargin); + HelperMethods.AssertDoubleValuesEqual(currPoint.Z, 0); + HelperMethods.AssertDoubleValuesEqual(currPoint.M, double.NaN); + } + } + + [Test] + public void ReadAllShapes_ReadPointZMWithMissingMValues_ShouldReturnCorrectValues() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("shape_pointZM_MissingM values")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + double errorMargin = Math.Pow(10, -6); + + double[,] expectedValues = {{-11348202.6085706, 4503476.68482375}, + {-601708.888562033, 3537065.37906758}, + {-7366588.02885523, -637831.461799072}}; + + // Act. + var shapes = m_Reader.ReadAllShapes(factory); + + // Assert. + Assert.IsNotNull(shapes); + var shapesArr = shapes.ToArray(); + Assert.AreEqual(shapesArr.Length, 3); + + for (int i = 0; i < shapesArr.Length; i++) + { + Assert.IsInstanceOf(shapesArr[i]); + var currPoint = shapesArr[i] as Point; + HelperMethods.AssertDoubleValuesEqual(currPoint.X, expectedValues[i, 0], errorMargin); + HelperMethods.AssertDoubleValuesEqual(currPoint.Y, expectedValues[i, 1], errorMargin); + HelperMethods.AssertDoubleValuesEqual(currPoint.Z, 0); + HelperMethods.AssertDoubleValuesEqual(currPoint.M, double.NaN); + } + } + + [Test] + public void ReadAllShapes_ReadPointM_ShouldReturnCorrectValues() + { + // Arrange. + GeometryFactory factory = new GeometryFactory(); + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("shape_pointM")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + double[,] expectedValues = {{-133.606621226874, 66.8997078870497}, + {-68.0564751703992, 56.4888023369036}, + {-143.246348588121, 40.6796494644596}, + {-82.3232716650438, -21.014605647517}}; + + // Act. + var shapes = m_Reader.ReadAllShapes(factory); + + // Assert. + Assert.IsNotNull(shapes); + var shapesArr = shapes.ToArray(); + Assert.AreEqual(shapesArr.Length, 4); + + for (int i = 0; i < shapesArr.Length; i++) + { + Assert.IsInstanceOf(shapesArr[i]); + var currPoint = shapesArr[i] as Point; + HelperMethods.AssertDoubleValuesEqual(currPoint.X, expectedValues[i, 0]); + HelperMethods.AssertDoubleValuesEqual(currPoint.Y, expectedValues[i, 1]); + HelperMethods.AssertDoubleValuesEqual(currPoint.Z, double.NaN); + HelperMethods.AssertDoubleValuesEqual(currPoint.M, double.NaN); + } + } + + [Test] + public void ReadAllShapes_ReadUnifiedChecksMaterial_ShouldRead2ShapesAndCorrectValues() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + Polygon[] expectedResult = new Polygon[] + { + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })), + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + })) + }; + + // Act. + var shapes = m_Reader.ReadAllShapes(factory).ToArray(); + + Assert.IsNotNull(shapes); + Assert.AreEqual(shapes.Length, 2); + + for (int i = 0; i < shapes.Length; i++) + { + Assert.IsInstanceOf(shapes[i]); + HelperMethods.AssertPolygonsEqual(shapes[i] as Polygon, expectedResult[i]); + } + } + + [Test] + public void ReadAllShapes_ReadAllPolygonsFromUnifiedWithNullAtStart_ShouldReturnCorrectValues() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterialNullAtStart")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + Polygon[] expectedResult = new Polygon[] + { + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })), + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + })) + }; + + // Act. + var shapes = m_Reader.ReadAllShapes(factory).ToArray(); + + Assert.IsNotNull(shapes); + Assert.AreEqual(shapes.Length, 2); + + for (int i = 0; i < shapes.Length; i++) + { + Assert.IsInstanceOf(shapes[i]); + HelperMethods.AssertPolygonsEqual(shapes[i] as Polygon, expectedResult[i]); + } + } + + [Test] + public void ReadAllShapes_ReadAllPolygonsFromUnifiedWithNullInMiddle_ShouldReturnCorrectValues() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterialNullInMiddle")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + Polygon[] expectedResult = new Polygon[] + { + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })), + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + })) + }; + + // Act. + var shapes = m_Reader.ReadAllShapes(factory).ToArray(); + + Assert.IsNotNull(shapes); + Assert.AreEqual(shapes.Length, 2); + + for (int i = 0; i < shapes.Length; i++) + { + Assert.IsInstanceOf(shapes[i]); + HelperMethods.AssertPolygonsEqual(shapes[i] as Polygon, expectedResult[i]); + } + } + + [Test] + public void ReadAllShapes_ReadAllPolygonsFromUnifiedWithNullAtEnd_ShouldReturnCorrectValues() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterialNullAtEnd")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + Polygon[] expectedResult = new Polygon[] + { + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })), + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + })) + }; + + // Act. + var shapes = m_Reader.ReadAllShapes(factory).ToArray(); + + Assert.IsNotNull(shapes); + Assert.AreEqual(shapes.Length, 2); + + for (int i = 0; i < shapes.Length; i++) + { + Assert.IsInstanceOf(shapes[i]); + HelperMethods.AssertPolygonsEqual(shapes[i] as Polygon, expectedResult[i]); + } + } + + [Test] + public void ReadAllShapes_TryReadAfterDisposed_ShouldThrowException() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + // Act. + m_Reader.Dispose(); + Assert.Catch(() => + { + m_Reader.ReadAllShapes(factory); + }); + } + + [Test] + public void ReadShapeAtIndex_SendNullFactory_ShouldThrowException() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + + // Act. + Assert.Catch(() => + { + m_Reader.ReadShapeAtIndex(0, null); + }); + } + + [Test] + public void ReadShapeAtIndex_SendNegativeIndex_ShouldThrowException() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + // Act. + Assert.Catch(() => + { + m_Reader.ReadShapeAtIndex(-1, factory); + }); + } + + [Test] + public void ReadShapeAtIndex_SendOutOfBoundIndex_ShouldThrowException() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + // Act. + Assert.Catch(() => + { + m_Reader.ReadShapeAtIndex(2, factory); + }); + } + + [Test] + public void ReadShapeAtIndex_ReadFirstUnifiedCheckMaterialShape_ShouldReturnRectangle() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + var expectedPolygon = new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })); + + // Act. + var polygon = m_Reader.ReadShapeAtIndex(0, factory); + + Assert.IsNotNull(polygon); + Assert.IsInstanceOf(polygon); + HelperMethods.AssertPolygonsEqual(polygon as Polygon, expectedPolygon); + } + + [Test] + public void ReadShapeAtIndex_ReadSecondUnifiedCheckMaterialShape_ShouldReturnTriangle() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + var expectedPolygon = new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + })); + + // Act. + var polygon = m_Reader.ReadShapeAtIndex(1, factory); + + Assert.IsNotNull(polygon); + Assert.IsInstanceOf(polygon); + HelperMethods.AssertPolygonsEqual(polygon as Polygon, expectedPolygon); + } + + [Test] + public void ReadShapeAtIndex_ReadUnifiedCheckMaterialWithNullAtStart_ShouldReturnBothShapesCorrectly() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterialNullAtStart")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + Polygon[] expectedResult = new Polygon[] + { + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })), + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + })) + }; + + // Act. + for (int i = 0; i < expectedResult.Length; i++) + { + var result = m_Reader.ReadShapeAtIndex(i, factory); + + Assert.IsNotNull(result); + Assert.IsInstanceOf(result); + + HelperMethods.AssertPolygonsEqual(expectedResult[i], result as Polygon); + } + } + + [Test] + public void ReadShapeAtIndex_ReadUnifiedCheckMaterialWithNullAtEnd_ShouldReturnBothShapesCorrectly() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterialNullAtEnd")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + Polygon[] expectedResult = new Polygon[] + { + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })), + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + })) + }; + + // Act. + for (int i = 0; i < expectedResult.Length; i++) + { + var result = m_Reader.ReadShapeAtIndex(i, factory); + + Assert.IsNotNull(result); + Assert.IsInstanceOf(result); + + HelperMethods.AssertPolygonsEqual(expectedResult[i], result as Polygon); + } + } + + [Test] + public void ReadShapeAtIndex_ReadUnifiedCheckMaterialWithNulLInMiddle_ShouldReturnBothShapesCorrectly() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterialNullInMiddle")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + Polygon[] expectedResult = new Polygon[] + { + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(-0.815656565656566, -0.439393939393939), + new Coordinate(-0.353535353535354, -0.795454545454545), + new Coordinate(-0.888888888888889,-0.929292929292929), + new Coordinate(-1.151515151515152, -0.419191919191919), + new Coordinate(-0.815656565656566,-0.439393939393939), + })), + new Polygon(new LinearRing(new Coordinate[] + { + new Coordinate(0.068181818181818,0.578282828282829), + new Coordinate(0.421717171717172,0.070707070707071), + new Coordinate(-0.457070707070707,0.080808080808081), + new Coordinate(0.068181818181818,0.578282828282829), + })) + }; + + // Act. + for (int i = 0; i < expectedResult.Length; i++) + { + var result = m_Reader.ReadShapeAtIndex(i, factory); + + Assert.IsNotNull(result); + Assert.IsInstanceOf(result); + + HelperMethods.AssertPolygonsEqual(expectedResult[i], result as Polygon); + } + } + + [Test] + public void ReadShapeAtIndex_TryReadAfterDisposed_ShouldThrowException() + { + // Arrange. + m_TmpFile = new TempFileWriter(".shp", ShpFiles.Read("UnifiedChecksMaterial")); + m_Reader = new IO.ShapeFile.Extended.ShapeReader(m_TmpFile.Path); + GeometryFactory factory = new GeometryFactory(); + + // Act. + m_Reader.Dispose(); + Assert.Catch(() => + { + m_Reader.ReadShapeAtIndex(0, factory); + }); + } + + [TearDown] + public void TestCleanup() + { + if (m_Reader != null) + { + m_Reader.Dispose(); + m_Reader = null; + } + + if (m_TmpFile != null) + { + m_TmpFile.Dispose(); + m_TmpFile = null; + } + } + } + + static class ShpFiles + { + public static byte[] Read(string filename) + { + string file = Path.ChangeExtension(filename, "shp"); + string path = Path.Combine(CommonHelpers.TestShapefilesDirectory, file); + Assert.That(File.Exists(path), Is.True, "file not found: " + filename); + return File.ReadAllBytes(path); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/ShapefileDataWriterTests.cs b/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/ShapefileDataWriterTests.cs new file mode 100644 index 0000000..55e11bc --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/ShapefileDataWriterTests.cs @@ -0,0 +1,90 @@ +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using NUnit.Framework; +using System; +using System.Linq; + +namespace NetTopologySuite.IO.Tests.ShapeFile.Extended +{ + /// + /// Contains tests for the shapefile data writer. + /// + [TestFixture] + public class ShapefileDataWriterTests + { + /// + /// Tests creating a header from a feature. + /// + [Test] + public void TestGetHeaderFromFeature() + { + var feature = new Feature(new Point(0, 0), + new AttributesTable()); + feature.Attributes.Add("c_long", (long)12345678900000000); + feature.Attributes.Add("c_ulong", (ulong)12345678900000000); + feature.Attributes.Add("c_int", int.MinValue); + feature.Attributes.Add("c_uint", uint.MinValue); + feature.Attributes.Add("c_short", short.MaxValue); + feature.Attributes.Add("c_ushort", ushort.MaxValue); + feature.Attributes.Add("c_string", string.Empty); + feature.Attributes.Add("c_double", double.MinValue); + feature.Attributes.Add("c_bool", false); + feature.Attributes.Add("c_datetime", new DateTime(1999, 01, 01)); + + var header = ShapefileDataWriter.GetHeader(feature, 1); + + Assert.IsNotNull(header); + Assert.AreEqual(10, header.Fields.Count); + var field = header.Fields.FirstOrDefault(x => x.Name == "c_long"); + Assert.IsNotNull(field); + Assert.AreEqual(78, field.FieldType); + Assert.AreEqual(0, field.DecimalCount); + Assert.AreEqual(18, field.Length); + field = header.Fields.FirstOrDefault(x => x.Name == "c_ulong"); + Assert.IsNotNull(field); + Assert.AreEqual(78, field.FieldType); + Assert.AreEqual(0, field.DecimalCount); + Assert.AreEqual(18, field.Length); + field = header.Fields.FirstOrDefault(x => x.Name == "c_int"); + Assert.IsNotNull(field); + Assert.AreEqual(78, field.FieldType); + Assert.AreEqual(0, field.DecimalCount); + Assert.AreEqual(10, field.Length); + field = header.Fields.FirstOrDefault(x => x.Name == "c_uint"); + Assert.IsNotNull(field); + Assert.AreEqual(78, field.FieldType); + Assert.AreEqual(0, field.DecimalCount); + Assert.AreEqual(10, field.Length); + field = header.Fields.FirstOrDefault(x => x.Name == "c_short"); + Assert.IsNotNull(field); + Assert.AreEqual(78, field.FieldType); + Assert.AreEqual(0, field.DecimalCount); + Assert.AreEqual(10, field.Length); + field = header.Fields.FirstOrDefault(x => x.Name == "c_ushort"); + Assert.IsNotNull(field); + Assert.AreEqual(78, field.FieldType); + Assert.AreEqual(0, field.DecimalCount); + Assert.AreEqual(10, field.Length); + field = header.Fields.FirstOrDefault(x => x.Name == "c_string"); + Assert.IsNotNull(field); + Assert.AreEqual(67, field.FieldType); + Assert.AreEqual(0, field.DecimalCount); + Assert.AreEqual(254, field.Length); + field = header.Fields.FirstOrDefault(x => x.Name == "c_double"); + Assert.IsNotNull(field); + Assert.AreEqual(78, field.FieldType); + Assert.AreEqual(8, field.DecimalCount); + Assert.AreEqual(18, field.Length); + field = header.Fields.FirstOrDefault(x => x.Name == "c_bool"); + Assert.IsNotNull(field); + Assert.AreEqual(76, field.FieldType); + Assert.AreEqual(0, field.DecimalCount); + Assert.AreEqual(1, field.Length); + field = header.Fields.FirstOrDefault(x => x.Name == "c_datetime"); + Assert.IsNotNull(field); + Assert.AreEqual(68, field.FieldType); + Assert.AreEqual(0, field.DecimalCount); + Assert.AreEqual(8, field.Length); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/TempFileWriter.cs b/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/TempFileWriter.cs new file mode 100644 index 0000000..216f5b2 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/ShapeFile.Extended/TempFileWriter.cs @@ -0,0 +1,37 @@ +using System; +using System.IO; + +using NUnit.Framework; + +namespace NetTopologySuite.IO.Tests.ShapeFile.Extended +{ + internal sealed class TempFileWriter : IDisposable + { + public TempFileWriter(string ext, byte[] data) + { + this.Path = System.IO.Path.GetFullPath(System.IO.Path.ChangeExtension(TestContext.CurrentContext.Test.ID, ext)); + File.WriteAllBytes(this.Path, data); + } + + ~TempFileWriter() => this.InternalDispose(); + + public string Path { get; } + + public void Dispose() + { + this.InternalDispose(); + GC.SuppressFinalize(this); + } + + private void InternalDispose() + { + try + { + File.Delete(this.Path); + } + catch + { + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/ShapeFileDataReaderTest.cs b/test/NetTopologySuite.IO.Esri.Test/ShapeFileDataReaderTest.cs new file mode 100644 index 0000000..c4efc28 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/ShapeFileDataReaderTest.cs @@ -0,0 +1,263 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.Geometries.Implementation; +using NetTopologySuite.IO.Shapefile; +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.Data; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + [TestFixture] + public class ShapeFileDataReaderTest + { + protected GeometryFactory Factory { get; private set; } + + protected WKTReader Reader { get; private set; } + + [SetUp] + public void SetUp() + { + // Set current dir to shapefiles dir + Environment.CurrentDirectory = CommonHelpers.TestShapefilesDirectory; + + this.Factory = new GeometryFactory(); + this.Reader = new WKTReader(); + } + + // see https://code.google.com/p/nettopologysuite/issues/detail?id=175 + [Test] + public void TestIssue175_ReadingShapeFileUsingShpExtension() + { + using (var reader = Shapefile.Core.ShapefileReader.Open("crustal_test.shp")) + { + int length = reader.Fields.Count; + while (reader.Read(out _)) + { + Debug.WriteLine(reader.Fields[(length - 1)]); + } + } + } + + [Test] + public void TestReadingCrustalTestShapeFile() + { + // Original file with characters '°' in NAME field. + using (var reader = Shapefile.Core.ShapefileReader.Open("crustal_test_bugged.shp")) + { + int length = reader.Fields.Count; + while (reader.Read(out _)) + { + Debug.WriteLine(reader.Fields[length - 1]); + } + } + + // Removed NAME field characters + using (var reader = Shapefile.Core.ShapefileReader.Open("crustal_test.shp")) + { + int length = reader.Fields.Count; + while (reader.Read(out _)) + { + Debug.WriteLine(reader.Fields[(length - 1)]); + } + } + } + + [Test] + [Ignore("File aaa.shp not exists")] + public void TestReadingAaaShapeFile() + { + Assert.Catch(() => + { + using (var reader = Shapefile.Core.ShapefileReader.Open("aaa")) + { + int length = reader.Fields.Count; + while (reader.Read(out _)) + { + Debug.WriteLine(reader.Fields[(length - 1)]); + } + } + Assert.Fail(); + }); + } + + [Test] + public void TestReadingShapeFileWithNulls() + { + using (var reader = ShapefileReader.Open("AllNulls")) + { + foreach (var feature in reader) + { + Assert.IsNotNull(feature.Geometry); + Assert.IsNotNull(feature.Attributes); + } + } + } + [Test] + public void TestReadingShapeFileZ() + { + //Use a factory with a coordinate sequence factor that can handle measure values + var factory = new GeometryFactory(DotSpatialAffineCoordinateSequenceFactory.Instance); + + const int distance = 500; + using (var reader = ShapefileReader.Open("with_M")) + { // "" + int index = 0; + + reader.Read(out var geom, out var attributes, out _); + double firstM = geom.GetOrdinates(Ordinate.M).First(); + Assert.AreEqual(400, firstM); + + while (reader.Read(out geom, out attributes, out _)) + { + Assert.IsNotNull(geom); + Assert.IsTrue(geom.IsValid); + Debug.WriteLine(string.Format("Geom {0}: {1}", index++, geom)); + + var buff = geom.Buffer(distance); + Assert.IsNotNull(buff); + + foreach (double m in geom.GetOrdinates(Ordinate.M)) + { + Assert.IsFalse(double.IsNaN(m)); + } + } + } + } + + [Test] + public void TestReadingShapeFileAfvalbakken() + { + var factory = GeometryFactory.Default; + var polys = new List(); + const int distance = 500; + using (var reader = ShapefileReader.Open("afvalbakken.shp")) + { + int index = 0; + foreach (var feature in reader) + { + Assert.IsNotNull(feature.Geometry); + Assert.IsTrue(feature.Geometry.IsValid); + Debug.WriteLine(string.Format("Geom {0}: {1}", index++, feature.Geometry)); + + var buff = feature.Geometry.Buffer(distance); + Assert.IsNotNull(buff); + + var pg = (Polygon)feature.Geometry; + polys.Add(pg); + } + } + + var multiPolygon = factory.CreateMultiPolygon(polys.ToArray()); + Assert.IsNotNull(multiPolygon); + Assert.IsTrue(multiPolygon.IsValid); + + var multiBuffer = (MultiPolygon)multiPolygon.Buffer(distance); + Assert.IsNotNull(multiBuffer); + Assert.IsTrue(multiBuffer.IsValid); + + // ShapefileGeometryWriter.WriteGeometryCollection(@"test_buffer", multiBuffer); + } + + [Test] + public void TestSeptPolygones() + { + const string wktGeom9 = + "MULTIPOLYGON ( " + + "((-73.8706030450129 45.425307895968558, -73.8691180248536 45.425712901466682, -73.862907940551338 45.425949154673731, -73.862739188260548 45.423181617104319, -73.864662964375952 45.423384119853267, -73.8654729753718 45.42220285381751, -73.865979232244342 45.421730347403241, -73.866822993698463 45.42088658594912, -73.866485489116826 45.420481580450996, -73.865202971706537 45.42041407953468, -73.864629213917681 45.421325341905117, -73.864156707503412 45.422236604275611, -73.863481698340081 45.422405356566514, -73.863414197423765 45.421899099693974, -73.863414197423765 45.421190340072485, -73.8635491992564 45.4200765749531, -73.864122957045254 45.419165312582606, -73.864797966208585 45.419064061208076, -73.866316736825922 45.419030310749974, -73.867092997363727 45.419266563957194, -73.867295500112789 45.419536567622515, -73.867396751487263 45.420751584116317, -73.867092997363727 45.421527844654122, -73.866384237742238 45.422506607941045, -73.866046733160658 45.423215367562364, -73.8669579955311 45.423721624434904, -73.868881771646556 45.423485371227684, -73.8694555294353 45.423417870311312, -73.8700630376822 45.423991628100168, -73.870434292722109 45.424497884972709, -73.8706030450129 45.425307895968558), " + + "(-73.86921927622808 45.425139143677825, -73.868983023020974 45.424464134514437, -73.868544267064863 45.423991628100168, -73.86813926156691 45.423991628100168, -73.867092997363727 45.423991628100168, -73.86533797353917 45.423620373060317, -73.864966718499375 45.424059129016484, -73.864966718499375 45.424497884972709, -73.865304223081068 45.42534164642683, -73.866451738658668 45.425409147343146, -73.86756550377811 45.425274145510514, -73.86921927622808 45.425139143677825), " + + "(-73.865937695291677 45.419884197388171, -73.865599517078863 45.419585804847259, -73.86432637557175 45.4198046260438, -73.864167232883347 45.4205605538138, -73.864565089604355 45.420500875305606, -73.865937695291677 45.419884197388171)), " + + "((-73.868038010192436 45.424869140012561, -73.866620490949458 45.424869140012561, -73.865844230411653 45.424970391386921, -73.865742979037179 45.42436288314002, -73.865979232244342 45.42402537855844, -73.866687991865717 45.424295382223704, -73.867869257901532 45.424396633598121, -73.868038010192436 45.424869140012561), " + + "(-73.86744733356926 45.424703767127937, -73.867371896498639 45.42446991220919, -73.867002254852821 45.424454824795021, -73.866232796733016 45.424432193673908, -73.866345952338861 45.4246509611786, -73.86744733356926 45.424703767127937)), " + + "((-73.86512208901371 45.419923983060187, -73.864604875276427 45.420301946945074, -73.8644059469159 45.420043340076518, -73.86512208901371 45.419923983060187)))"; + + var factory = GeometryFactory.Default; //new GeometryFactory(new PrecisionModel(Math.Pow(10, 13))); + var wktReader = new WKTReader(factory); + var polys = new List(); + using (var reader = ShapefileReader.Open("Sept_polygones")) + { + int index = 0; + foreach (var feature in reader) + { + var geom = feature.Geometry; + Assert.IsNotNull(geom); + Assert.IsTrue(geom.IsValid); + geom.Normalize(); + Debug.WriteLine(string.Format("Geom {0}: {1}", ++index, geom)); + polys.Add(geom); + } + } + + var expected = wktReader.Read(wktGeom9); + expected.Normalize(); + + var e1 = expected.EnvelopeInternal; + var e2 = polys[8].EnvelopeInternal; + Assert.IsTrue(e1.Equals(e2), string.Format("{0}\ndoes not match\n{1}", e1, e2)); + Assert.IsTrue(expected.EqualsTopologically(polys[8]), string.Format("{0}\ndoes not match\n{1}", expected, polys[8])); + } + + [Test] + // see https://code.google.com/p/nettopologysuite/issues/detail?id=167 + public void Issue167_EnsureAllBinaryContentIsReaded() + { + int i = 0; + foreach (var geom in ShapefileReader.ReadAllGeometries("Issue167.shp")) + { + Assert.That(geom, Is.Not.Null, "geom null"); + Console.WriteLine("geom {0}: {1}", ++i, geom); + } + Assert.That(i, Is.EqualTo(201)); + } + + [Test()] + // see https://github.com/NetTopologySuite/NetTopologySuite/issues/112 + public void Issue_GH_112_ReadingShapeFiles() + { + /* + * In this case SHP file is broken. It's records have invalid Content Length value. + * SHX file has correct Content Legnth value, so it could be used instead. + * But what to do with opposite case when SHX file is broken and SHP is not? + * + * Bearing in mind that nowadays SHX is useless this issue won't be fixed. + + int i = 0; + foreach (var geom in ShapefileReader.ReadAllGeometries("Volume2.shp")) + { + Assert.That(geom, Is.Not.Null, "geom null"); + Console.WriteLine("geom {0}: {1}", ++i, geom); + } + */ + } + + [Test] + [NtsIssueNumber(39)] // see also #52 for input data + public void EnsureSpecifiedEncodingIsUsed() + { + /* + * https://docs.microsoft.com/en-us/dotnet/api/system.text.encoding?redirectedfrom=MSDN&view=net-5.0 + * 936 gb2312 Chinese Simplified (GB2312) + */ + + Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); + var encoding = Encoding.GetEncoding(936); + Assert.IsNotNull(encoding); + + using (var reader = Shapefile.Core.ShapefileReader.Open("chinese_encoding.shp", encoding)) + { + Assert.AreEqual(encoding, reader.Encoding); + Trace.WriteLine(string.Join(",", reader.Fields.Select(f => f.Name))); + int length = reader.Fields.Count; + while (reader.Read(out _)) + { + Assert.AreEqual("安徽", reader.Fields[1].Value); + } + + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/ShapeFileDataWriterTest.cs b/test/NetTopologySuite.IO.Esri.Test/ShapeFileDataWriterTest.cs new file mode 100644 index 0000000..fd6dbc4 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/ShapeFileDataWriterTest.cs @@ -0,0 +1,760 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.Algorithm.Match; +using NetTopologySuite.Features; +using NetTopologySuite.Geometries.Implementation; +using NUnit.Framework; +using System; +using System.Collections.ObjectModel; +using System.IO; +using System.Text; +using System.Collections.Generic; +using NetTopologySuite.IO.Shapefile; +using NetTopologySuite.IO.Dbf; +using System.Linq; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + + [TestFixture] + public class ShapeFileDataWriterTest + { + protected GeometryFactory Factory { get; private set; } + + protected WKTReader Reader { get; private set; } + + public ShapeFileDataWriterTest() + { + // Set current dir to shapefiles dir + Environment.CurrentDirectory = CommonHelpers.TestShapefilesDirectory; + + this.Factory = new GeometryFactory(); + this.Reader = new WKTReader(); + } + + [Test] + public void TestCreateEmptyShapefile() + { + const string filename = "__empty"; + const string emptyShp = filename + ".shp"; + const string emptyShx = filename + ".shx"; + const string emptyDbf = filename + ".dbf"; + if (File.Exists(emptyShp)) + File.Delete(emptyShp); + if (File.Exists(emptyShx)) + File.Delete(emptyShx); + if (File.Exists(emptyDbf)) + File.Delete(emptyDbf); + + var idField = new DbfNumericField("Id"); + using (var writer = ShapefileWriter.Open(filename, ShapeType.Point, idField)) + { + writer.Write(new IFeature[0]); + } + + Assert.That(File.Exists(emptyShp), Is.True); + Assert.That(File.Exists(emptyShx), Is.True); + Assert.That(File.Exists(emptyDbf), Is.True); + } + + [Test] + public void TestWriteZValuesShapeFile() + { + TestWriteZMValuesShapeFile(false); + } + + [Test] + public void TestWriteZMValuesShapeFile() + { + TestWriteZMValuesShapeFile(true); + } + + private void TestWriteZMValuesShapeFile(bool testM) + { + var points = new Coordinate[3]; + points[0] = new Coordinate(0, 0); + points[1] = new Coordinate(1, 0); + points[2] = new Coordinate(1, 1); + + var csFactory = DotSpatialAffineCoordinateSequenceFactory.Instance; + var sequence = csFactory.Create(3, Ordinates.XYZM); + for (int i = 0; i < 3; i++) + { + sequence.SetOrdinate(i, Ordinate.X, points[i].X); + sequence.SetOrdinate(i, Ordinate.Y, points[i].Y); + sequence.SetOrdinate(i, Ordinate.Z, 1 + i); + if (testM) + sequence.SetOrdinate(i, Ordinate.M, 11 + i); + } + var lineString = Factory.CreateLineString(sequence); + + var attributes = new AttributesTable(); + attributes.Add("FOO", "Trond"); + + var feature = new Feature(Factory.CreateMultiLineString(new[] { lineString }), attributes); + var features = new Feature[1]; + features[0] = feature; + + features.SaveToShapefile("ZMtest"); + + // Now let's read the file and verify that we got Z and M back + var factory = new GeometryFactory(DotSpatialAffineCoordinateSequenceFactory.Instance); + + using (var reader = ShapefileReader.Open("ZMtest")) + { + var firstFeature = reader.First(); + var geom = firstFeature.Geometry; + + for (int i = 0; i < 3; i++) + { + var c = geom.Coordinates[i]; + Assert.AreEqual(i + 1, c.Z); + } + + if (testM) + { + var ln = (LineString)geom; + sequence = ln.CoordinateSequence; + for (int i = 0; i < 3; i++) + { + Assert.AreEqual(sequence.GetOrdinate(i, Ordinate.M), 11 + i); + } + } + + // Run a simple attribute test too + string v = reader.Fields[0].Value?.ToString(); + Assert.AreEqual(v, "Trond"); + } + } + + [Test] + public void TestWriteSimpleShapeFile() + { + var p1 = Factory.CreatePoint(new Coordinate(100, 100)); + var p2 = Factory.CreatePoint(new Coordinate(200, 200)); + + var coll = new GeometryCollection(new Geometry[] { p1, p2, }); + coll.ToFeatures().SaveToShapefile(@"test_arcview"); + + // Not read by ArcView!!! + } + + [Test] + public void TestReadWritePoint() + { + var geomsWrite = ShapeFileShapeFactory.CreateShapes(OgcGeometryType.Point, Ordinates.XY); + DoTest(geomsWrite, Ordinates.XY); + geomsWrite = ShapeFileShapeFactory.CreateShapes(OgcGeometryType.Point, Ordinates.XYM); + DoTest(geomsWrite, Ordinates.XYM); + geomsWrite = ShapeFileShapeFactory.CreateShapes(OgcGeometryType.Point, Ordinates.XYZM); + DoTest(geomsWrite, Ordinates.XYZM); + } + + [Test] + public void TestReadWriteMultiPoint() + { + var geomsWrite = ShapeFileShapeFactory.CreateShapes(OgcGeometryType.MultiPoint, Ordinates.XY); + DoTest(geomsWrite, Ordinates.XY); + geomsWrite = ShapeFileShapeFactory.CreateShapes(OgcGeometryType.MultiPoint, Ordinates.XYM); + DoTest(geomsWrite, Ordinates.XYM); + geomsWrite = ShapeFileShapeFactory.CreateShapes(OgcGeometryType.MultiPoint, Ordinates.XYZM); + DoTest(geomsWrite, Ordinates.XYZM); + } + + [Test] + public void TestReadWriteLineal() + { + var geomsWrite = ShapeFileShapeFactory.CreateShapes(OgcGeometryType.LineString, Ordinates.XY); + DoTest(geomsWrite, Ordinates.XY); + geomsWrite = ShapeFileShapeFactory.CreateShapes(OgcGeometryType.LineString, Ordinates.XYM); + DoTest(geomsWrite, Ordinates.XYM); + geomsWrite = ShapeFileShapeFactory.CreateShapes(OgcGeometryType.LineString, Ordinates.XYZM); + DoTest(geomsWrite, Ordinates.XYZM); + } + + [Test] + public void TestReadWritePolygonal() + { + var geomsWrite = ShapeFileShapeFactory.CreateShapes(OgcGeometryType.Polygon, Ordinates.XY); + DoTest(geomsWrite, Ordinates.XY); + geomsWrite = ShapeFileShapeFactory.CreateShapes(OgcGeometryType.Polygon, Ordinates.XYM); + DoTest(geomsWrite, Ordinates.XYM); + geomsWrite = ShapeFileShapeFactory.CreateShapes(OgcGeometryType.Polygon, Ordinates.XYZM); + DoTest(geomsWrite, Ordinates.XYZM, false); + } + + private static void DoTest(GeometryCollection geomsWrite, Ordinates ordinates, bool testGetOrdinate = true) + { + string fileName = string.Empty; + + try + { + + fileName = Path.GetTempFileName(); + fileName = Path.ChangeExtension(fileName, "shp"); + + var attr = new AttributesTable(); + attr.Add("Id", 1); + var features = new Feature[geomsWrite.Count]; + for (int i = 0; i < geomsWrite.Count; i++) + { + features[i] = new Feature(geomsWrite[i], attr); + } + features.SaveToShapefile(fileName); + + var geomsRead = ShapefileReader.ReadAllGeometries(fileName).ToGeometryCollection(); + + // This tests x- and y- values + if (!geomsWrite.EqualsExact(geomsRead)) + { + Assert.AreEqual(geomsWrite.NumGeometries, geomsRead.NumGeometries); + // + // This akward test is necessary since EqualsTopologically throws currently exceptions + bool equal = true; + for (int i = 0; i < geomsRead.NumGeometries; i++) + { + var gw = geomsWrite.GetGeometryN(i); + var gr = geomsRead.GetGeometryN(i); + if (gw.IsEmpty && gr.IsEmpty) + { + if ((gw is ILineal && gr is ILineal) || + (gw is IPolygonal && gr is IPolygonal)) + { + // suppose these are equal + } + else + { + Console.WriteLine(string.Format("Geometries don't match at index {0}", i)); + Console.WriteLine(string.Format(" written: {0}", gw.AsText())); + Console.WriteLine(string.Format(" read : {0}", gr.AsText())); + equal = false; + Assert.IsTrue(equal, "Differenced found in geometries written and read!"); + } + } + else if (!gw.EqualsExact(gr)) + { + double hsm = new HausdorffSimilarityMeasure().Measure(gw, gr); + double asm = new AreaSimilarityMeasure().Measure(gw, gr); + double smc = SimilarityMeasureCombiner.Combine(hsm, asm); + if (!gw.EqualsNormalized(gr) || (1d - smc) > 1e-7) + { + Console.WriteLine(string.Format("Geometries don't match at index {0}", i)); + Console.WriteLine(string.Format(" written: {0}", gw.AsText())); + Console.WriteLine(string.Format(" read : {0}", gr.AsText())); + equal = false; + Assert.IsTrue(equal, "Differenced found in geometries written and read!"); + } + } + } + + //For polygons this has a tendency to fail, since the polygonhandler might rearrange the whole thing + if (testGetOrdinate) + { + if ((ordinates & Ordinates.Z) == Ordinates.Z) + { + double[] writeZ = geomsWrite.GetOrdinates(Ordinate.Z); + double[] readZ = geomsRead.GetOrdinates(Ordinate.Z); + Assert.IsTrue(ArraysEqual(writeZ, readZ)); + } + + if ((ordinates & Ordinates.M) == Ordinates.M) + { + double[] writeM = geomsWrite.GetOrdinates(Ordinate.M); + double[] readM = geomsRead.GetOrdinates(Ordinate.M); + Assert.IsTrue(ArraysEqual(writeM, readM)); + } + } + + } + + // delete sample files + File.Delete(fileName); + File.Delete(Path.ChangeExtension(fileName, "shx")); + File.Delete(Path.ChangeExtension(fileName, "dbf")); + } + catch (AssertionException ex) + { + Console.WriteLine("Failed test with {0}", ordinates); + Console.WriteLine(ex.Message); + Console.WriteLine(" Testfile '{0}' not deleted!", fileName); + throw; + } + + } + + private static bool ArraysEqual(double[] writeZ, double[] readZ) + { + if (writeZ == null ^ readZ == null) + return false; + + if (writeZ == null) + return true; + + if (writeZ.Length != readZ.Length) + return false; + + for (int i = 0; i < writeZ.Length; i++) + if (Math.Abs(writeZ[i] - readZ[i]) > 1E-7) return false; + + return true; + } + + private static class ShapeFileShapeFactory + { + public static GeometryCollection CreateShapes(OgcGeometryType type, Ordinates ordinates, int number = 50) + { + bool[] empty = new bool[number]; + empty[Rnd.Next(2, number / 2)] = true; + empty[Rnd.Next(number / 2, number)] = true; + + var result = new Geometry[number]; + for (int i = 0; i < number; i++) + { + switch (type) + { + case OgcGeometryType.Point: + result[i] = CreatePoint(ordinates, empty[i]); + break; + case OgcGeometryType.MultiPoint: + result[i] = CreateMultiPoint(ordinates, empty[i]); + break; + + case OgcGeometryType.LineString: + case OgcGeometryType.MultiLineString: + result[i] = CreateLineal(ordinates, empty[i]); + break; + case OgcGeometryType.Polygon: + case OgcGeometryType.MultiPolygon: + result[i] = CreatePolygonal(ordinates, empty[i]); + break; + } + + /* + // Ensure no empty elements + if (result[i] == null || (result[i].IsEmpty && result[i].OgcGeometryType == OgcGeometryType.GeometryCollection)) + i--; + */ + // Ensure not null and not geometry collection + if (result[i] == null || result[i].OgcGeometryType == OgcGeometryType.GeometryCollection) + i--; + } + + return Factory.CreateGeometryCollection(result); + } + + private static readonly Random Rnd = new Random(9936528); + + private static readonly CoordinateSequenceFactory CsFactory = + DotSpatialAffineCoordinateSequenceFactory.Instance; + + public static readonly GeometryFactory FactoryRead = new GeometryFactory(new PrecisionModel(PrecisionModels.Floating), 4326, CsFactory); + + public static readonly GeometryFactory Factory = new GeometryFactory(new PrecisionModel(1000), 4326, CsFactory); + + private static Geometry CreatePoint(Ordinates ordinates, bool empty) + { + if (empty) + { + return Factory.CreatePoint((CoordinateSequence)null); + } + + var seq = CsFactory.Create(1, ordinates); + foreach (var o in ToOrdinateArray(ordinates)) + seq.SetOrdinate(0, o, RandomOrdinate(o, Factory.PrecisionModel)); + return Factory.CreatePoint(seq); + } + + private static Geometry CreateMultiPoint(Ordinates ordinates, bool empty) + { + if (empty) + { + return Factory.CreateMultiPoint((CoordinateSequence)null); + } + int numPoints = Rnd.Next(75, 101); + var seq = CsFactory.Create(numPoints, ordinates); + for (int i = 0; i < numPoints; i++) + foreach (var o in ToOrdinateArray(ordinates)) + seq.SetOrdinate(i, o, RandomOrdinate(o, Factory.PrecisionModel)); + + return Factory.CreateMultiPoint(seq); + } + + private static Geometry CreateLineal(Ordinates ordinates, bool empty) + { + switch (Rnd.Next(2)) + { + case 0: + return CreateLineString(ordinates, empty); + default: + return CreateMultiLineString(ordinates, empty); + } + } + + private static Geometry CreateLineString(Ordinates ordinates, bool empty) + { + if (empty) + { + return Factory.CreateLineString((CoordinateSequence)null); + } + + int numPoints = Rnd.Next(75, 101); + var seq = CsFactory.Create(numPoints, ordinates); + for (int i = 0; i < numPoints; i++) + foreach (var o in ToOrdinateArray(ordinates)) + seq.SetOrdinate(i, o, RandomOrdinate(o, Factory.PrecisionModel)); + + return Factory.CreateLineString(seq); + } + + private static Geometry CreateMultiLineString(Ordinates ordinates, bool empty) + { + if (empty) + { + Factory.CreateMultiLineString(null); + } + + int numLineStrings = Rnd.Next(0, 11); + if (numLineStrings <= 2) + numLineStrings = 0; + + var lineString = new LineString[numLineStrings]; + for (int i = 0; i < numLineStrings; i++) + lineString[i] = (LineString)CreateLineString(ordinates, false); + + return Factory.CreateMultiLineString(lineString); + } + + private static Geometry CreatePolygonal(Ordinates ordinates, bool empty) + { + if (Rnd.Next(2) == 0) + return CreatePolygon(ordinates, empty); + return CreateMultiPolygon(ordinates, empty); + } + + private static Geometry CreatePolygon(Ordinates ordinates, bool empty, int nextKind = -1) + { + if (empty) + { + Factory.CreatePolygon((CoordinateSequence)null); + } + if (nextKind == -1) nextKind = Rnd.Next(0, 5); + + double x = RandomOrdinate(Ordinate.X, Factory.PrecisionModel); + double y = RandomOrdinate(Ordinate.Y, Factory.PrecisionModel); + + switch (nextKind) + { + case 0: // circle + var ring = CreateCircleRing(ordinates, x, y, 3 * Rnd.NextDouble()); + return Factory.CreatePolygon(ring, null); + case 1: // rectangle + ring = CreateRectangleRing(ordinates, x, y, 6 * Rnd.NextDouble(), 3 * Rnd.NextDouble()); + return Factory.CreatePolygon(ring, null); + case 2: // cirle with hole + double radius = 3 * Rnd.NextDouble(); + var shell = CreateCircleRing(ordinates, x, y, radius); + var hole = CreateCircleRing(ordinates, x, y, 0.66 * radius, true); + return Factory.CreatePolygon(shell, new[] { hole }); + case 3: // rectanglee with hole + double width = 6 * Rnd.NextDouble(); + double height = 3 * Rnd.NextDouble(); + shell = CreateRectangleRing(ordinates, x, y, width, height); + hole = CreateRectangleRing(ordinates, x, y, 0.66 * width, 0.66 * height, true); + return Factory.CreatePolygon(shell, new[] { hole }); + case 4: // rectanglee with hole + width = 6 * Rnd.NextDouble(); + height = 3 * Rnd.NextDouble(); + shell = CreateRectangleRing(ordinates, x, y, width, height); + hole = CreateCircleRing(ordinates, x, y, 0.33 * Math.Min(width, height), true); + return Factory.CreatePolygon(shell, new[] { hole }); + default: + throw new NotSupportedException(); + } + } + + private static LinearRing CreateCircleRing(Ordinates ordinates, double x, double y, double radius, bool reverse = false) + { + var seq = CsFactory.Create(4 * 12 + 1, ordinates); + double angle = Math.PI * 2; + const double quandrantStep = Math.PI / 2d / 12d; + int k = 0; + for (int i = 0; i < 4; i++) + { + for (int j = 0; j < 12; j++) + { + double dx = radius * Math.Cos(angle); + double dy = radius * Math.Sin(angle); + seq.SetOrdinate(k, Ordinate.X, Factory.PrecisionModel.MakePrecise(x + dx)); + seq.SetOrdinate(k, Ordinate.Y, Factory.PrecisionModel.MakePrecise(y + dy)); + if ((ordinates & Ordinates.Z) == Ordinates.Z) + seq.SetOrdinate(k, Ordinate.Z, RandomOrdinate(Ordinate.Z, Factory.PrecisionModel)); + if ((ordinates & Ordinates.Z) == Ordinates.Z) + seq.SetOrdinate(k, Ordinate.M, RandomOrdinate(Ordinate.M, Factory.PrecisionModel)); + k++; + angle -= quandrantStep; + } + } + seq.SetOrdinate(k, Ordinate.X, seq.GetOrdinate(0, Ordinate.X)); + seq.SetOrdinate(k, Ordinate.Y, seq.GetOrdinate(0, Ordinate.Y)); + if ((ordinates & Ordinates.Z) == Ordinates.Z) + seq.SetOrdinate(k, Ordinate.Z, seq.GetOrdinate(0, Ordinate.Z)); + if ((ordinates & Ordinates.M) == Ordinates.M) + seq.SetOrdinate(k, Ordinate.M, seq.GetOrdinate(0, Ordinate.M)); + + return Factory.CreateLinearRing(reverse ? seq.Reversed() : seq); + } + + private static LinearRing CreateRectangleRing(Ordinates ordinates, double x, double y, double width, double height, bool reverse = false) + { + double dx = Factory.PrecisionModel.MakePrecise(width / 2); + double dy = Factory.PrecisionModel.MakePrecise(height / 2); + + var seq = CsFactory.Create(5, ordinates); + + seq.SetOrdinate(0, Ordinate.X, Factory.PrecisionModel.MakePrecise(x - dx)); + seq.SetOrdinate(0, Ordinate.Y, Factory.PrecisionModel.MakePrecise(y - dy)); + seq.SetOrdinate(1, Ordinate.X, Factory.PrecisionModel.MakePrecise(x - dx)); + seq.SetOrdinate(1, Ordinate.Y, Factory.PrecisionModel.MakePrecise(y + dy)); + seq.SetOrdinate(2, Ordinate.X, Factory.PrecisionModel.MakePrecise(x + dx)); + seq.SetOrdinate(2, Ordinate.Y, Factory.PrecisionModel.MakePrecise(y + dy)); + seq.SetOrdinate(3, Ordinate.X, Factory.PrecisionModel.MakePrecise(x + dx)); + seq.SetOrdinate(3, Ordinate.Y, Factory.PrecisionModel.MakePrecise(y - dy)); + seq.SetOrdinate(4, Ordinate.X, Factory.PrecisionModel.MakePrecise(x - dx)); + seq.SetOrdinate(4, Ordinate.Y, Factory.PrecisionModel.MakePrecise(y - dy)); + + if ((ordinates & (Ordinates.Z | Ordinates.M)) != Ordinates.None) + { + int k = 0; + for (; k < 4; k++) + { + if ((ordinates & Ordinates.Z) == Ordinates.Z) + seq.SetOrdinate(k, Ordinate.Z, RandomOrdinate(Ordinate.Z, Factory.PrecisionModel)); + if ((ordinates & Ordinates.Z) == Ordinates.Z) + seq.SetOrdinate(k, Ordinate.M, RandomOrdinate(Ordinate.M, Factory.PrecisionModel)); + } + if ((ordinates & Ordinates.Z) == Ordinates.Z) + seq.SetOrdinate(k, Ordinate.Z, seq.GetOrdinate(0, Ordinate.Z)); + if ((ordinates & Ordinates.M) == Ordinates.M) + seq.SetOrdinate(k, Ordinate.M, seq.GetOrdinate(0, Ordinate.M)); + } + + return Factory.CreateLinearRing(reverse ? seq.Reversed() : seq); + } + + private static Geometry CreateMultiPolygon(Ordinates ordinates, bool empty) + { + if (empty) + { + Factory.CreateMultiPolygon(null); + } + + switch (Rnd.Next(2)) + { + case 0: + int numPolygons = Rnd.Next(4); + var polygons = new Polygon[numPolygons]; + for (int i = 0; i < numPolygons; i++) + polygons[i] = (Polygon)CreatePolygon(ordinates, false); + return Factory.BuildGeometry(new Collection(polygons)).Union(); + + case 1: + polygons = new Polygon[2]; + double radius = 5 * Rnd.NextDouble(); + double x = RandomOrdinate(Ordinate.X, Factory.PrecisionModel); + double y = RandomOrdinate(Ordinate.Y, Factory.PrecisionModel); + var shell = CreateCircleRing(ordinates, x, y, radius); + var hole = CreateCircleRing(ordinates, x, y, 0.66 * radius, true); + polygons[0] = Factory.CreatePolygon(shell, new[] { hole }); + shell = CreateCircleRing(ordinates, x, y, 0.5 * radius); + hole = CreateCircleRing(ordinates, x, y, 0.15 * radius, true); + polygons[1] = Factory.CreatePolygon(shell, new[] { hole }); + return Factory.CreateMultiPolygon(polygons); + + default: + throw new NotSupportedException(); + } + } + + private static double RandomOrdinate(Ordinate o, PrecisionModel pm) + { + switch (o) + { + case Ordinate.X: + return pm.MakePrecise(-180 + 360 * Rnd.NextDouble()); + case Ordinate.Y: + return pm.MakePrecise(-90 + 180 * Rnd.NextDouble()); + case Ordinate.Z: + return 200 * Rnd.NextDouble(); + case Ordinate.M: + return 200 + 200 * Rnd.NextDouble(); + default: + throw new NotSupportedException(); + } + } + } + + [Test /*, ExpectedException(typeof(ArgumentException))*/] + // see https://code.google.com/p/nettopologysuite/issues/detail?id=146 + public void Issue146_ShapeCreationWithInvalidAttributeName() + { + var points = new Coordinate[3]; + points[0] = new Coordinate(0, 0); + points[1] = new Coordinate(1, 0); + points[2] = new Coordinate(1, 1); + var ls = new LineString(points); + var mls = GeometryFactory.Default.CreateMultiLineString(new LineString[] { ls }); + + var attrs = new AttributesTable(); + attrs.Add("Simulation name", "FOO"); + + var features = new[] { new Feature(mls, attrs) }; + + Assert.Throws(() => features.SaveToShapefile("invalid_line_string")); + + //Assert.Throws(() => shp_writer.Write(features)); + } + + [Test/*, ExpectedException(typeof(ArgumentException))*/] + // see: https://github.com/NetTopologySuite/NetTopologySuite/issues/111 + public void issue_111_pointhandler_with_invalid_values() + { + var factory = GeometryFactory.Default; + var points = new Coordinate[3]; + points[0] = new CoordinateZ(0, 0); + points[1] = new CoordinateZ(1, 0); + points[2] = new CoordinateZ(1, 1); + + var p = factory.CreatePoint(new CoordinateZ(0, 0)); + var ln = factory.CreateLineString(points); + Geometry[] arr = { p, ln }; // GeometryCollection.Empty }; <= Empty GeometryCollection is perfect NullShape + var geometries = factory.CreateGeometryCollection(arr); + var features = geometries.ToFeatures(); + + var shapeType = geometries.GetShapeType(); + Assert.AreEqual(ShapeType.PointZM, shapeType); + + string tempPath = Path.GetFileNameWithoutExtension(Path.GetTempFileName()); + Assert.Throws(() => features.SaveToShapefile(tempPath)); + } + + [Test/*, ExpectedException(typeof(ArgumentException))*/] + // see: https://github.com/NetTopologySuite/NetTopologySuite/issues/111 + public void issue_111_multiPointhandler_with_invalid_values() + { + var factory = GeometryFactory.Default; + var points = new Coordinate[3]; + points[0] = new CoordinateZ(0, 0); + points[1] = new CoordinateZ(1, 0); + points[2] = new CoordinateZ(1, 1); + + var p = factory.CreatePoint(new CoordinateZ(0, 0)); + var mp = factory.CreateMultiPoint(new[] { p }); + var ln = factory.CreateLineString(points); + Geometry[] arr = { mp, ln}; // GeometryCollection.Empty }; <= Empty GeometryCollection is perfect NullShape + var geometries = factory.CreateGeometryCollection(arr); + var features = geometries.ToFeatures(); + + var shapeType = geometries.GetShapeType(); + Assert.AreEqual(ShapeType.MultiPointZM, shapeType); + + string tempPath = Path.GetFileNameWithoutExtension(Path.GetTempFileName()); + Assert.Throws(() => features.SaveToShapefile(tempPath)); + + } + + [Test/*, ExpectedException(typeof(ArgumentException))*/] + // see: https://github.com/NetTopologySuite/NetTopologySuite/issues/111 + public void issue_111_multilinehandler_with_invalid_values() + { + var factory = GeometryFactory.Default; + + var points = new Coordinate[3]; + points[0] = new CoordinateZ(0, 0); + points[1] = new CoordinateZ(1, 0); + points[2] = new CoordinateZ(1, 1); + var ls = factory.CreateLineString(points); + + var mls = factory.CreateMultiLineString(new[] { ls }); + var p = factory.CreatePoint(new CoordinateZ(0, 0)); + Geometry[] arr = { mls, p }; // GeometryCollection.Empty }; <= Empty GeometryCollection is perfect NullShape + var geometries = factory.CreateGeometryCollection(arr); + var features = geometries.ToFeatures(); + + var shapeType = geometries.GetShapeType(); + Assert.AreEqual(ShapeType.PolyLineZM, shapeType); + + string tempPath = Path.GetFileNameWithoutExtension(Path.GetTempFileName()); + Assert.Throws(() => features.SaveToShapefile(tempPath)); + } + + [Test/*, ExpectedException(typeof(ArgumentException))*/] + // see: https://github.com/NetTopologySuite/NetTopologySuite/issues/111 + public void issue_111_polygonhandler_with_invalid_values() + { + var factory = GeometryFactory.Default; + + var points = new Coordinate[5]; + points[0] = new CoordinateZ(0, 0); + points[1] = new CoordinateZ(1, 0); + points[2] = new CoordinateZ(1, 1); + points[3] = new CoordinateZ(0, 1); + points[4] = new CoordinateZ(0, 0); + var polygon = factory.CreatePolygon(points); + + var multiPoligon = factory.CreateMultiPolygon(new[] { polygon }); + var line = factory.CreateLineString(points); + Geometry[] arr = { multiPoligon, line }; // GeometryCollection.Empty }; <= Empty GeometryCollection is perfect NullShape + var geometries = factory.CreateGeometryCollection(arr); // { MultiPolygon, GeometryCollection } + + var shapeType = geometries.GetShapeType(); + Assert.AreEqual(ShapeType.PolygonZM, shapeType); + + string tempPath = Path.GetTempFileName(); + var features = geometries.ToFeatures(); + + Assert.Throws(() => features.SaveToShapefile(tempPath)); + } + + private static Ordinate[] ToOrdinateArray(Ordinates ordinates) + { + var result = new Ordinate[OrdinatesUtility.OrdinatesToDimension(ordinates)]; + int nextIndex = 0; + for (int i = 0; i < 32; i++) + { + if (ordinates.HasFlag((Ordinates)(1 << i))) + { + result[nextIndex++] = (Ordinate)i; + } + } + + return result; + } + + [Test, ShapeFileIssueNumber(24)] + public void WriteShouldWriteMultiPoints() + { + var attribs = new AttributesTable + { + { "Id", 10 } + }; + var coors = new Coordinate[2] + { + new Coordinate(123.0, 023.0), + new Coordinate(123.0, 100.0) + }; + var points = Factory.CreateMultiPointFromCoords(coors); + var feature = new Feature(points, attribs); + var features = new[] { feature }; + + var filename = Path.ChangeExtension(Path.GetTempFileName(), ".shp"); + features.SaveToShapefile(filename); + + foreach (var readFeature in ShapefileReader.ReadAll(filename)) + { + Assert.IsNotNull(readFeature.Geometry); + Assert.IsInstanceOf(readFeature.Geometry); + Assert.AreEqual("Id", feature.Attributes.GetNames()[0]); + Assert.AreEqual(10, feature.Attributes["Id"]); + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/ShapeFileEncodingTest.cs b/test/NetTopologySuite.IO.Esri.Test/ShapeFileEncodingTest.cs new file mode 100644 index 0000000..f698cd9 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/ShapeFileEncodingTest.cs @@ -0,0 +1,56 @@ +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Dbf; +using NUnit.Framework; +using System.Collections.Generic; +using System.Text; +using NetTopologySuite.IO.Shapefile; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + [TestFixture] + public class ShapeFileEncodingTest + { + [SetUp] + public void Setup() + { + var idColumn = new DbfNumericField("id", 8, 0); + var testColumn = new DbfCharacterField("Test", 15); + var alderColumn = new DbfNumericField("Ålder", 8, 0); + var odestextColumn = new DbfCharacterField("Ödestext", 254); + + var at = new AttributesTable(); + at.Add("id", "0"); + at.Add("Test", "Testar"); + at.Add("Ålder", 10); + at.Add("Ödestext", "Lång text med åäö etc"); + + var feature = new Feature(new Point(0, 0), at); + var columns = new DbfField[] {idColumn, testColumn, alderColumn, odestextColumn}; + + Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); + var encoding = Encoding.GetEncoding(1252); + using (var shp = new ShapefilePointWriter("encoding_sample.shp", Shapefile.ShapeType.Point, columns, encoding)) + { + shp.Write(feature); + } + } + + [Test] + public void TestLoadShapeFileWithEncoding() + { + using (var shp = Shapefile.Core.ShapefileReader.Open("encoding_sample.shp")) + { + Assert.AreEqual(shp.Encoding, CodePagesEncodingProvider.Instance.GetEncoding(1252), "Invalid encoding!"); + + Assert.AreEqual(shp.Fields[1].Name, "Test"); + Assert.AreEqual(shp.Fields[2].Name, "Ålder"); + Assert.AreEqual(shp.Fields[3].Name, "Ödestext"); + + Assert.IsTrue(shp.Read(out var deleted), "Error reading file"); + Assert.AreEqual(shp.Fields["Test"].Value, "Testar"); + Assert.AreEqual(shp.Fields["Ödestext"].Value, "Lång text med åäö etc"); + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/ShapeFileInvalidHeaderTest.cs b/test/NetTopologySuite.IO.Esri.Test/ShapeFileInvalidHeaderTest.cs new file mode 100644 index 0000000..e673c8d --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/ShapeFileInvalidHeaderTest.cs @@ -0,0 +1,34 @@ +using NetTopologySuite.IO.Dbf; +using NUnit.Framework; +using System.Collections; +using System.IO; +using System.Linq; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + [TestFixture] + [Ignore("Sample file(s) not published")] + public class ShapeFileInvalidHeaderTest + { + private readonly string _invalidPath = Path.Combine(CommonHelpers.TestShapefilesDirectory, "invalidheader.shp"); + + [Test] + public void TestInvalidShapeFile() + { + /* + var s = new NetTopologySuite.IO.ShapefileReader(_invalidPath); + var sh = s.Header; + var g = s.ReadAll(); + */ + string dbfFile = Path.ChangeExtension(_invalidPath, ".dbf"); + using (var dbf = new DbfReader(dbfFile)) + { + + var de = (dbf as IEnumerable).GetEnumerator(); + Assert.IsNull(de.Current); + de.MoveNext(); + Assert.IsNotNull(de.Current); + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/ShapeFileIssueNumberAttribute.cs b/test/NetTopologySuite.IO.Esri.Test/ShapeFileIssueNumberAttribute.cs new file mode 100644 index 0000000..c097fc8 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/ShapeFileIssueNumberAttribute.cs @@ -0,0 +1,20 @@ +using System; +using NUnit.Framework; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + /// + /// The issue number used in this test (or fixture) refers to an issue on + /// https://github.com/NetTopologySuite/NetTopologySuite.IO.ShapeFile, created + /// after this project was split out on its own (and thus, it got its own + /// set of issue numbers). + /// + [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class)] + public sealed class ShapeFileIssueNumberAttribute : PropertyAttribute + { + public ShapeFileIssueNumberAttribute(int issueNumber) + : base("NetTopologySuite.IO.ShapeFile issue", issueNumber) + { + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/ShapeRead.cs b/test/NetTopologySuite.IO.Esri.Test/ShapeRead.cs new file mode 100644 index 0000000..929445c --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/ShapeRead.cs @@ -0,0 +1,151 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Shapefile; +using System; +using System.IO; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + /// + /// + /// + public class ShapeRead + { + protected GeometryFactory Factory { get; private set; } + + protected WKTReader Reader { get; private set; } + + /// + /// + /// + public ShapeRead() + { + // Set current dir to shapefiles dir + Environment.CurrentDirectory = CommonHelpers.TestShapefilesDirectory; + + this.Factory = new GeometryFactory(); + this.Reader = new WKTReader(); + } + + /// + /// + /// + public void Start() + { + //TestBugMultipolygonHShuntao(); + //TestBugCimino(); + + //// Bug with a.shp and b.shp and intersection + //GeometryCollection aColl = ReadShape("a.shp"); + //GeometryCollection bColl = ReadShape("b.shp"); + //Geometry result = aColl.Intersection(bColl); + + //// Point shapefile + //TestShapeReadWrite("tnp_pts.shp", "Test_tnp_pts.shp"); + + //// Arc shapefile + TestShapeReadWrite("tnp_arc.shp", "arc.shp"); + TestShapeReadWrite("Stato_Fatto.shp", "Test_Stato_Fatto.shp"); + TestShapeReadWrite("Stato_Progetto.shp", "Test_Stato_Progetto.shp"); + TestShapeReadWrite("Zone_ISTAT.shp", "Test_Zone_ISTAT.shp"); + TestShapeReadWrite("Strade.shp", "Test_Strade.shp"); + + //// Polygon shapefile + //TestShapeReadWrite("tnp_pol.shp", "Test_tnp_pol.shp"); + + //// MultiPoint shapefile + //TestShapeReadWrite("tnp_multiPoint.shp", "Test_tnp_multiPoint.shp"); + + // TestShapeReadWrite("a.shp", "Test_a.shp"); + // TestShapeReadWrite("b.shp", "Test_b.shp"); + } + + //private void TestBugMultipolygonHShuntao() + //{ + // GeometryCollection gc1 = null; + // GeometryCollection gc2 = null; + // string file = "BJmultipolygon.shp"; + // if (!File.Exists(file)) + // throw new FileNotFoundException(); + + // // Test with Default ShapefileReader + // try + // { + // var sfr = new ShapefileReader(file); + // gc1 = sfr.ReadAll(); + // } + // catch (Exception ex) + // { + // throw ex; + // } + + // //// Test with MyShapefileReader (only for debug purpose!) + // //try + // //{ + // // MyShapeFileReader reader = new MyShapeFileReader(); + // // gc2 = reader.Read(file); + // //} + // //catch (Exception ex) + // //{ + // // throw ex; + // //} + + // //// Check for equality + // //if (!gc1.EqualsExact(gc2)) + // // throw new TopologyException("Both geometries must be equals!"); + //} + + //private void TestBugCimino() + //{ + // try + // { + // string file = "countryCopy.shp"; + // if (!File.Exists(file)) + // throw new FileNotFoundException(); + + // var sfr = new ShapefileReader(file); + + // var gc = sfr.ReadAll(); + // for (int i = 0; i < gc.NumGeometries; i++) + // Console.WriteLine(i + " " + gc.Geometries[i].Envelope); + + // // IsValidOp.CheckShellsNotNested molto lento nell'analisi di J == 7 (Poligono con 11600 punti) + // string write = Path.Combine(Path.GetTempPath(), "copy_countryCopy"); + // var sfw = new ShapefileWriter(gc.Factory); + // sfw.Write(write, gc); + // Console.WriteLine("Write Complete!"); + // } + // catch (Exception ex) + // { + // throw ex; + // } + //} + + private static GeometryCollection ReadShape(string shapepath) + { + if (!File.Exists(shapepath)) + throw new ArgumentException("File " + shapepath + " not found!"); + + var geometries = ShapefileReader.ReadAllGeometries(shapepath).ToGeometryCollection(); + return geometries; + } + + private static void WriteShape(GeometryCollection geometries, string shapepath) + { + if (File.Exists(shapepath)) + File.Delete(shapepath); + //var sfw = new ShapefileGeometryWriter(geometries.Factory); + //ShapefileGeometryWriter.WriteGeometryCollection(Path.GetFileNameWithoutExtension(shapepath), geometries); + } + + private static void TestShapeReadWrite(string shapepath, string outputpath) + { + var collection = ReadShape(shapepath); + WriteShape(collection, outputpath); + var testcollection = ReadShape(outputpath); + + if (!collection.EqualsExact(testcollection)) + throw new ArgumentException("Geometries are not equals"); + Console.WriteLine("TEST OK!"); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/AllNulls.dbf b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/AllNulls.dbf new file mode 100644 index 0000000..ba2881d Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/AllNulls.dbf differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/AllNulls.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/AllNulls.shp new file mode 100644 index 0000000..272ad87 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/AllNulls.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/CA_Cable_region.dbf b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/CA_Cable_region.dbf new file mode 100644 index 0000000..c58bd76 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/CA_Cable_region.dbf differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/CA_Cable_region.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/CA_Cable_region.shp new file mode 100644 index 0000000..ffe2306 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/CA_Cable_region.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/EmptyShapeFile.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/EmptyShapeFile.shp new file mode 100644 index 0000000..29de63a Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/EmptyShapeFile.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Issue167.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Issue167.shp new file mode 100644 index 0000000..25fc301 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Issue167.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Sept_polygones.dbf b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Sept_polygones.dbf new file mode 100644 index 0000000..68480aa Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Sept_polygones.dbf differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Sept_polygones.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Sept_polygones.shp new file mode 100644 index 0000000..276fd1b Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Sept_polygones.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/US_DMA_region.dbf b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/US_DMA_region.dbf new file mode 100644 index 0000000..09c0f19 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/US_DMA_region.dbf differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/US_DMA_region.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/US_DMA_region.shp new file mode 100644 index 0000000..752e1fe Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/US_DMA_region.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterial.dbf b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterial.dbf new file mode 100644 index 0000000..241ab8b Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterial.dbf differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterial.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterial.shp new file mode 100644 index 0000000..675a882 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterial.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterialNullAtEnd.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterialNullAtEnd.shp new file mode 100644 index 0000000..7accf20 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterialNullAtEnd.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterialNullAtStart.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterialNullAtStart.shp new file mode 100644 index 0000000..44cfa8c Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterialNullAtStart.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterialNullInMiddle.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterialNullInMiddle.shp new file mode 100644 index 0000000..dc121e8 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/UnifiedChecksMaterialNullInMiddle.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Victoria North.dbf b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Victoria North.dbf new file mode 100644 index 0000000..b76934f Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Victoria North.dbf differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Victoria North.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Victoria North.shp new file mode 100644 index 0000000..f5326f3 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Victoria North.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Victoria North.shx b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Victoria North.shx new file mode 100644 index 0000000..665af3e Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Victoria North.shx differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Volume2.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Volume2.shp new file mode 100644 index 0000000..2cedba2 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/Volume2.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/afvalbakken.dbf b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/afvalbakken.dbf new file mode 100644 index 0000000..e6f05f2 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/afvalbakken.dbf differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/afvalbakken.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/afvalbakken.shp new file mode 100644 index 0000000..33c40b9 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/afvalbakken.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/chinese_encoding.dbf b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/chinese_encoding.dbf new file mode 100644 index 0000000..e919183 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/chinese_encoding.dbf differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/chinese_encoding.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/chinese_encoding.shp new file mode 100644 index 0000000..13c8fab Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/chinese_encoding.shp differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/chinese_encoding.shx b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/chinese_encoding.shx new file mode 100644 index 0000000..1c684b1 Binary files /dev/null and b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/chinese_encoding.shx differ diff --git a/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/christchurch-canterbury-h.shp b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/christchurch-canterbury-h.shp new file mode 100644 index 0000000..1a7281b --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/TestShapefiles/christchurch-canterbury-h.shp @@ -0,0 +1,592 @@ + ' + z  /d se@w2EYo,e@> |~E  + )(Pe@^"SE  + $ Ξe@ܗE  + =* + gp1e@4+zE ? + ᩰe@\QB-E @ + __e@Z E A + Ede@!0E B + 5jLe@jE C + Tbe@Ԟo_E D + B=%e@"^E E + ܯe@E F + >:[e@aœgE G + Ub)e@)_P}%E H + e@\KE I + |*e@CmɌE J + iEe@BpuE K + B˜e@2E L + 6Te@3%E M + !pe@x]:E N + cΰe@dE O + XVe@X |~E \ + 8#)e@ʟx5E ] + J6e@PE ^ + ,ڴee@ fE _ + ze@zDME ` + j^e@ 絨E a + J22œe@*E b + Fe@IE c + غe@;uCE d + *fe@xE e + HĖe@N-u.E f + 9Ɖe@nE g + d#e@E h + f e@u#E i + ×e@eE j + KYUBe@ȽzE k + ԧe@1E { + ƪƀ=e@5E | + bue@+lE } + *y?ke@'E ~ + ~ue@-QE  + 2yJNe@!\E + e@ݹ>=E + n۰Ye@sI<E + z/Ϟe@IE + e@sݭE + ءje@`)E + 3i"e@)E + e@C-E + R!e@*`~E + ue@70E + Bpٞe@d=>E + X,e@. J E + )e@_iE + {W'|؞e@DVE + R'e@ 03]E + &r.e@uE + E"Ȟe@kE + YG\ e@œ"E + HTô +e@֟aE + .Şe@E + l})e@wroE + Ae@:E + JBݓe@i6?zE + =ke@^E + -e@jVE + >e@R+zE + e@=iE + ΐFfe@)IE + I{f1e@}eE + O A&"e@sE + I{f1e@}eE + ?4e@6aE + E?e@ E + |)e@Jm%E + [ɍe@×E + -*e@^E + xגe@GE + ;e@S@E + e@2bQE + >e@ E + >e@ E + >e@ E + :L e@X E + e@?E + {Ge@5E + wPe@[GS6E + +Rce@HJDYE + &e@j5vxE + qe@;ݕE + {/[Ke@)lE + F#d3he@K\E + 4 +ғe@DIfE + -?pe@GE + 5{e@z;x_E + )Ie@EۈE + Nёe@NrΩE + [e@2E + >g;ie@DdE + }de@ .E + Te@AdC{E + ,e@ce'rE + 嬆m0e@bE + QMe@!sE + ]ntؓe@u]E + tJe@eE + \?e@']ӥE + !e@ XE + )e@؇Q>E + F;e@ oE + ꖌe@N+E + <e@`=E + { e@E + b"e@VE + Ke@^E + 9+_e@мE + ,ye@$E + De@F|'E + Ahe@ ~VE + #87e@I6E + e@԰kE + ʷke@%E + le@:=rE + Phe@0E + 6ee@ysE + -Se@VE + Es Je@iWE + hHe@ZdE + le@?^E + QTe@)E + ]Dqe@3_E + p(e@FE + Y?e@ղE + gvse@i;[E + ,$re@4E + yke@kE + ~:e@flE  + .ee@('E  + m;W#e@GQE  + >g#3e@`E  + UN e@rZ;E  + U)=e@ΘjE  + tPe@6.ɲE  + ڊre@BRHE  + X8ړe@n0 E  + .e@E ! + Q0e@wE " + X K8e@%/E & + \e@h=?e@ʷEE 1 + Ʈڔe@jE 2 + *e@zAE 3 + #X{e@W%E 4 + kfe@=E 5 + t%7e@*E 6 + Mxe@8[3E 7 + %2e@g CE 8 + )Ie@EۈE 9 + P?e@}E : + NѶhe@uE ; + ΄/Q7e@YE < + %~e@G\E = + q de@RqE > + nK#\e@nsE ? + \e@agE @ + +he@3KE A + je@|E B + 4u%e@ڰzE C + /o>e@1E D + dcFPe@(z8E E + @Pe@r6 E F + A^Z7e@MqE G + Ie3e@nqE H + ࡓe@E I + 1e@aOtE J + YE V + <;e@K +WE W + Koe@ LE X + Lbߵie@_bE Y + `C,e@ DE Z + `C,e@ DE [ + e@R+zE _ + F4e@+6ݰE ` + ?T5e@ g2˰E a + s/e@5E b + %ue@ME c + De@󐉓E d + Te@~E e + 9e@/E f + Дe@V衰E g + e@,FE h + >e@R+zE i + >e@R+zE j + O$e@0W*E k + 0Ne@#iE l + e@䗧E m + tAe@)ME n + SD{e@ 3E o + q@,e@t78-5E p + iV{e@xWtE q + iN*e@өNצE r + "e@YR)E s + 8(ߒe@&=E t + 2ƒe@k E u + *4e@U?*E v + SY/e@ץ#,E w + ɒe@[T4TE x + NÒe@@qWE y + Y e@'#@SҦE z + # e@nۦE { + u/e@1)E | + I|:e@E } + 1.e@9#E ~ + r8#te@zE  + ? ge@!ptE  + +pe@_E  + ue@70E  + +pe@_E  + Ah֜e@E  + +pe@_E  + +pe@_E  + 3{ƙe@fJDE  + ɞe@Y E  + _e@|-E  + e@$E  + &r.e@uE  + +=Ӕe@+E  + (\e@wE  + De@,(E  + q)Ke@E  + j ’e@6LE  + wKe@NF E  + 4+̞e@E  + ٦e@YeE  + J,e@ E  + \-a̞e@p E  + $e@bXwE  + ,e@GE  + A +e@Uci>tE  + Xe@0 E  + ue@70E  + 9Gue@tE  + 2e@MZE  + h0Me@ڷ1E  + Ʈڔe@jE  + Ʈڔe@jE  + ƞe@E  + ᩰe@\QB-E  + *K}e@N4 E  + GJe@6\E  + PHݔe@V`E  + b32e@ԄJE  + Z.#j&e@(OME  + ge@[ڦ E  + Dye@E%E  + Dye@E%E  + Һee@R<3E  + \$Tϓe@Ϥ*nE  + DKDe@ ,=E  + 'dve@OE  + ?,e@yTkE  + záe@ܰ08E  + SObe@%rΙ6E  + ype@a=uPE  + Ps e@\E  + nˏc͖e@pDE  + Y&e@\E  + Y&e@\E  + ]{-e@͚ E  + mwm@e@E  + Z]Te@x68E  + Z]Te@x68E  + Z]Te@x68E  + Z]Te@x68E  + Z]Te@x68E  + Ȝe@ʼ[YFe@-YE  + 聰]e@pM93E  + ige@P5AE  + n*He@ͻE ! + Vce@NE " + RD{Me@mZE # + ^e@3E +E $ + TB%ޓe@yHE % + zke@TE & + RD{Me@mZE ' + ae@!JE ( + gTe@̲E ) + 3e@^3w-E * + jk3ae@9E + +  +Ge@p E , + $gme@YmE - + ⿘'e@͉@E . + @9e@ԫC>E / + @e@: SE 0 + ZILe@1wE 1 + U; ge@l,E 2 + h1e@Z8E 3 + ar؞e@AE 4 + N^oe@M CDE 5 + x;ݤe@uE 6 + 3cAye@ߏu"E 7 + lQe@BE 8 + J"e@4VE 9 + RYoe@o6E : + Me@ُޙE ; + e@ geometries) + { + return new GeometryCollection(geometries.ToArray()); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Various/CascadedPolygonUnionFixture.cs b/test/NetTopologySuite.IO.Esri.Test/Various/CascadedPolygonUnionFixture.cs new file mode 100644 index 0000000..9f44a9a --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Various/CascadedPolygonUnionFixture.cs @@ -0,0 +1,42 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Shapefile; +using NetTopologySuite.Operation.Overlay; +using NetTopologySuite.Operation.Overlay.Snap; +using NetTopologySuite.Operation.Union; +using NUnit.Framework; +using System; +using System.Linq; + +namespace NetTopologySuite.IO.ShapeFile.Test.Various +{ + [TestFixture] + public class CascadedPolygonUnionFixture + { + protected GeometryFactory Factory { get; private set; } + + protected WKTReader Reader { get; private set; } + + public CascadedPolygonUnionFixture() + { + // Set current dir to shapefiles dir + Environment.CurrentDirectory = CommonHelpers.TestShapefilesDirectory; + + this.Factory = new GeometryFactory(); + this.Reader = new WKTReader(); + } + + [Test] + public void PerformCascadedPolygonUnion() + { + var collection = ShapefileReader.ReadAllGeometries("tnp_pol.shp").ToList(); + + var u1 = collection[0]; + for (int i = 1; i < collection.Count; i++) + u1 = SnapIfNeededOverlayOp.Overlay(u1, collection[i], SpatialFunction.Union); + + var u2 = CascadedPolygonUnion.Union(collection); + if (!u1.EqualsTopologically(u2)) + Assert.Fail("failure"); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Various/GraphBuilder2.cs b/test/NetTopologySuite.IO.Esri.Test/Various/GraphBuilder2.cs new file mode 100644 index 0000000..9cf77f0 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Various/GraphBuilder2.cs @@ -0,0 +1,304 @@ +using NetTopologySuite.Geometries; +using QuickGraph; +using QuickGraph.Algorithms.Observers; +using QuickGraph.Algorithms.ShortestPath; +using System; +using System.Collections.Generic; + +namespace NetTopologySuite.IO.ShapeFile.Test.Various +{ + + /// + /// A class that manages shortest path computation. + /// + public class GraphBuilder2 + { + #region Delegates + + /// + /// A delegate that defines how to calculate the weight + /// of a line. + /// + /// A line. + /// The weight of the line. + public delegate double ComputeWeightDelegate(LineString line); + + #endregion + + private static readonly ComputeWeightDelegate DefaultComputer = line => line.Length; + + private readonly bool bidirectional; + + private readonly AdjacencyGraph> graph; + private readonly IList strings; + private IDictionary, double> consts; + private GeometryFactory factory; + + /// + /// Initializes a new instance of the class. + /// + /// + /// Specify if the graph must be build using both edges directions. + /// + public GraphBuilder2(bool bidirectional) + { + this.bidirectional = bidirectional; + + factory = null; + strings = new List(); + graph = new AdjacencyGraph>(true); + } + + /// + /// Initializes a new instance of the class, + /// using a directed graph. + /// + public GraphBuilder2() : this(false) + { + } // TODO: maybe the default value must be true... + + /// + /// Adds each line to the graph structure. + /// + /// + /// + /// true if all lines + /// are added, false otherwise. + /// + /// + /// If geometries don't have the same factory. + /// + public bool Add(params LineString[] lines) + { + bool result = true; + foreach (var line in lines) + { + var newfactory = line.Factory; + if (factory == null) + factory = newfactory; + else if (!newfactory.PrecisionModel.Equals(factory.PrecisionModel)) + throw new TopologyException("all geometries must have the same precision model"); + + bool lineFound = strings.Contains(line); + result &= !lineFound; + if (!lineFound) + strings.Add(line); + else continue; // Skip vertex check because line is already present + + foreach (var coord in line.Coordinates) + { + if (!graph.ContainsVertex(coord)) + graph.AddVertex(coord); + } + } + return result; + } + + /// + /// Initialize the algorithm using the default + /// weight computer, + /// that uses string length + /// as weight value. + /// + /// + /// If you've don't added two or more geometries to the builder. + /// + /// + /// If builder is already initialized. + /// + public void Initialize() + { + BuildEdges(DefaultComputer); + } + + /// + /// Initialize the algorithm using the specified + /// weight computer + /// + /// + /// A function that computes the weight + /// of any edge of the graph. + /// + /// + /// If you've don't added two or more geometries to the builder. + /// + /// + /// If builder is already initialized. + /// + public void Initialize(ComputeWeightDelegate computer) + { + BuildEdges(computer); + } + + /// + /// + /// + /// + private void BuildEdges(ComputeWeightDelegate computer) + { + if (strings.Count < 2) + throw new TopologyException("you must specify two or more geometries to build a graph"); + + // Counts the number of edges in the set we pass to this method. + int numberOfEdgesInLines = 0; + foreach (var str in strings) + { + int edges = str.Coordinates.GetUpperBound(0); + numberOfEdgesInLines += edges; + } + + // Double values because we use also reversed edges... + if (bidirectional) + numberOfEdgesInLines *= 2; + + consts = new Dictionary, double>(numberOfEdgesInLines); + + foreach (var line in strings) + { + // A line has to have at least two dimensions + int bound = line.Coordinates.GetUpperBound(0); + if (bound > 0) + { + for (int counter = 0; counter < bound; counter++) + { + // Prepare a segment + var src = line.Coordinates[counter]; + var dst = line.Coordinates[counter + 1]; + + // Here we calculate the weight of the edge + var lineString = factory.CreateLineString( + new[] { src, dst, }); + double weight = computer(lineString); + + // Add the edge + IEdge localEdge = new Edge(src, dst); + graph.AddEdge(localEdge); + consts.Add(localEdge, weight); + + if (!bidirectional) + continue; + + // Add the reversed edge + IEdge localEdgeRev = new Edge(dst, src); + graph.AddEdge(localEdgeRev); + consts.Add(localEdgeRev, weight); + } + } + } + } + + /// + /// Carries out the shortest path anlayis between the two + /// nodes + /// passed as variables and returns an + /// giveing the shortest path. + /// + /// The source geom + /// The destination geom + /// A line string geometric shape of the path + public LineString Perform(Geometry source, Geometry destination) + { + return Perform(source.Coordinate, destination.Coordinate); + } + + /// + /// Carries out the shortest path between the two nodes + /// ids passed as variables and returns an + /// giveing the shortest path. + /// + /// The source node + /// The destination node + /// A line string geometric shape of the path + public LineString Perform(Coordinate source, Coordinate destination) + { + if (!graph.ContainsVertex(source)) + throw new ArgumentException("key not found in the graph", "source"); + if (!graph.ContainsVertex(destination)) + throw new ArgumentException("key not found in the graph", "destination"); + + // Build algorithm + var dijkstra = + new DijkstraShortestPathAlgorithm>(graph, edge => consts[edge]); + + // Attach a Distance observer to give us the distances between edges + var distanceObserver = + new VertexDistanceRecorderObserver>(edge => consts[edge]); + distanceObserver.Attach(dijkstra); + + // Attach a Vertex Predecessor Recorder Observer to give us the paths + var predecessorObserver = + new VertexPredecessorRecorderObserver>(); + predecessorObserver.Attach(dijkstra); + + // Run the algorithm with A set to be the source + dijkstra.Compute(source); + + // Get the path computed to the destination. + IEnumerable> path; + bool result = predecessorObserver.TryGetPath(destination, out path); + + // Then we need to turn that into a geomery. + return result ? BuildString(new List>(path)) : null; + + // if the count is greater than one then a + // path could not be found, so we return null + } + + /// + /// Takes the path returned from QuickGraph library and uses the + /// list of coordinates to reconstruct the path into a geometric + /// "shape" + /// + /// Shortest path from the QucikGraph Library + /// + private LineString BuildString(IList> path) + { + // if the path has no links then return a null reference + if (path.Count < 1) + return null; + + // if we get here then we now that there is at least one + // edge in the path. + var links = new Coordinate[path.Count + 1]; + + // Add each node to the list of coordinates in to the array. + int i; + for (i = 0; i < path.Count; i++) + links[i] = path[i].Source; + + // Add the target node to the last loction in the list + links[i] = path[i - 1].Target; + + // Turn the list of coordinates into a geometry. + var thePath = factory.CreateLineString(links); + return thePath; + } + + /// + /// Outputs the graph as a DIMACS Graph + /// + /// The name of the output graph + /// Indicates if the method was worked. + public bool WriteAsDIMACS(string fileName) + { + // The DIMACS format is a reasonabley standard method + // of preparing graphs for analys in SP algortihm. + // This method *could* be used to prepare the graph beforehand + // so the turning from a GIS layer into a graph is not so + // intensive. + // + // NOTE: Follows the 9th DIMACS format: http://www.dis.uniroma1.it/~challenge9/format.shtml + return false; + } + + /// + /// + /// + /// + /// + public bool ReadFromDIMACS(string fileName) + { + return false; + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Various/GraphBuilder2Test.cs b/test/NetTopologySuite.IO.Esri.Test/Various/GraphBuilder2Test.cs new file mode 100644 index 0000000..244348d --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Various/GraphBuilder2Test.cs @@ -0,0 +1,402 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.Features; +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.IO; +using NetTopologySuite.IO.Shapefile; +using System.Linq; + +namespace NetTopologySuite.IO.ShapeFile.Test.Various +{ + [TestFixture] + public class GraphBuilder2Test + { + #region Setup/Teardown + + [SetUp] + public void Setup() + { + factory = GeometryFactory.Fixed; + + a = factory.CreateLineString(new Coordinate[] + { + new Coordinate(0, 0), + new Coordinate(100, 0), + new Coordinate(200, 100), + new Coordinate(200, 200), + }); + b = factory.CreateLineString(new Coordinate[] + { + new Coordinate(0, 0), + new Coordinate(100, 100), + new Coordinate(200, 200), + }); + c = factory.CreateLineString(new Coordinate[] + { + new Coordinate(0, 0), + new Coordinate(0, 100), + new Coordinate(100, 200), + new Coordinate(200, 200), + }); + d = factory.CreateLineString(new Coordinate[] + { + new Coordinate(0, 0), + new Coordinate(300, 0), + new Coordinate(300, 200), + new Coordinate(150, 200), + new Coordinate(150, 300), + }); + e = factory.CreateLineString(new Coordinate[] + { + new Coordinate(100, 300), + new Coordinate(150, 300), + new Coordinate(200, 300), + }); + + result = factory.CreateLineString(new Coordinate[] + { + new Coordinate(0, 0), + new Coordinate(300, 0), + new Coordinate(300, 200), + new Coordinate(150, 200), + new Coordinate(150, 300), + }); + revresult = (LineString)result.Reverse(); + + start = a.StartPoint; + end = d.EndPoint; + } + + #endregion + + private const string shp = ".shp"; + private const string shx = ".shx"; + private const string dbf = ".dbf"; + + private GeometryFactory factory; + private LineString a, b, c, d, e; + private LineString result, revresult; + private Point start, end; + + /// + /// Loads the shapefile as a graph allowing SP analysis to be carried out + /// + /// The name of the shape file we want to load + /// + /// + public LineString TestGraphBuilder2WithSampleGeometries(string fileName, Coordinate src, Coordinate dst) + { + var edges = ShapefileReader.ReadAllGeometries(fileName).ToGeometryCollection(); + return TestGraphBuilder2WithSampleGeometries(edges, src, dst); + } + + /// + /// Uses the passed geometry collection to generate a QuickGraph. + /// + /// + /// + /// + public LineString TestGraphBuilder2WithSampleGeometries(GeometryCollection edges, Coordinate src, + Coordinate dst) + { + var builder = new GraphBuilder2(true); + foreach (MultiLineString edge in edges.Geometries) + foreach (LineString line in edge.Geometries) + builder.Add(line); + builder.Initialize(); + + return builder.Perform(src, dst); + } + + [SetUp] + public void FixtureSetup() + { + Environment.CurrentDirectory = CommonHelpers.TestShapefilesDirectory; + } + + private void SaveGraphResult(Geometry path) + { + if (path == null) + throw new ArgumentNullException("path"); + + const string shapepath = "graphresult"; + if (File.Exists(shapepath + shp)) + File.Delete(shapepath + shp); + Assert.IsFalse(File.Exists(shapepath + shp)); + if (File.Exists(shapepath + shx)) + File.Delete(shapepath + shx); + Assert.IsFalse(File.Exists(shapepath + shx)); + if (File.Exists(shapepath + dbf)) + File.Delete(shapepath + dbf); + Assert.IsFalse(File.Exists(shapepath + dbf)); + + const string field1 = "OBJECTID"; + var feature = new Feature(path, new AttributesTable()); + feature.Attributes.Add(field1, 0); + + var features = new List(new[] { feature, }); + features.SaveToShapefile(shapepath); + + Assert.IsTrue(File.Exists(shapepath + shp)); + Assert.IsTrue(File.Exists(shapepath + shx)); + Assert.IsTrue(File.Exists(shapepath + dbf)); + } + + [Test] + [Ignore("graph.shp not present")] + public void BuildGraphFromCompleteGraphShapefile() + { + const string shapepath = "graph.shp"; + const int count = 1179; + + Assert.IsTrue(File.Exists(shapepath)); + var edges = ShapefileReader.ReadAllGeometries(shapepath).ToGeometryCollection(); + Assert.IsNotNull(edges); + Assert.IsInstanceOf(typeof(GeometryCollection), edges); + Assert.AreEqual(count, edges.NumGeometries); + + var startls = edges.GetGeometryN(515).GetGeometryN(0) as LineString; + Assert.IsNotNull(startls); + var startPoint = startls.EndPoint; + Assert.AreEqual(2317300d, startPoint.X); + Assert.AreEqual(4843961d, startPoint.Y); + + var endls = edges.GetGeometryN(141).GetGeometryN(0) as LineString; + ; + Assert.IsNotNull(endls); + var endPoint = endls.StartPoint; + Assert.AreEqual(2322739d, endPoint.X); + Assert.AreEqual(4844539d, endPoint.Y); + + var builder = new GraphBuilder2(true); + foreach (MultiLineString mlstr in edges.Geometries) + { + Assert.AreEqual(1, mlstr.NumGeometries); + var str = mlstr.GetGeometryN(0) as LineString; + Assert.IsNotNull(str); + Assert.IsTrue(builder.Add(str)); + } + builder.Initialize(); + + var path = builder.Perform(startPoint, endPoint); + Assert.IsNotNull(path); + SaveGraphResult(path); + + var reverse = builder.Perform(endPoint, startPoint); + Assert.IsNotNull(reverse); + Assert.AreEqual(path, reverse.Reverse()); + } + + [Test] + [Ignore("minimalgraph.shp not present")] + public void BuildGraphFromMinimalGraphShapefile() + { + const string shapepath = "minimalgraph.shp"; + const int count = 15; + + Assert.IsTrue(File.Exists(shapepath)); + var edges = ShapefileReader.ReadAllGeometries(shapepath).ToGeometryCollection(); + Assert.IsNotNull(edges); + Assert.IsInstanceOf(typeof(GeometryCollection), edges); + Assert.AreEqual(count, edges.NumGeometries); + + var startls = edges.GetGeometryN(0).GetGeometryN(0) as LineString; + Assert.IsNotNull(startls); + var endls = edges.GetGeometryN(5).GetGeometryN(0) as LineString; + ; + Assert.IsNotNull(endls); + + var builder = new GraphBuilder2(true); + foreach (MultiLineString mlstr in edges.Geometries) + { + Assert.AreEqual(1, mlstr.NumGeometries); + var str = mlstr.GetGeometryN(0) as LineString; + Assert.IsNotNull(str); + Assert.IsTrue(builder.Add(str)); + } + builder.Initialize(); + + var path = builder.Perform(startls.StartPoint, endls.EndPoint); + Assert.IsNotNull(path); + } + + [Test] + [Ignore("strade_fixed.shp not present")] + public void BuildGraphFromStradeShapefile() + { + string shapepath = "strade_fixed.shp"; + int count = 703; + + Assert.IsTrue(File.Exists(shapepath)); + var edges = ShapefileReader.ReadAllGeometries(shapepath).ToGeometryCollection(); + Assert.IsNotNull(edges); + Assert.IsInstanceOf(typeof(GeometryCollection), edges); + Assert.AreEqual(count, edges.NumGeometries); + + var startCoord = new Coordinate(2317300d, 4843961d); + var endCoord = new Coordinate(2322739d, 4844539d); + + bool startFound = false; + bool endFound = false; + var builder = new GraphBuilder2(true); + foreach (MultiLineString mlstr in edges.Geometries) + { + Assert.AreEqual(1, mlstr.NumGeometries); + var str = mlstr.GetGeometryN(0) as LineString; + Assert.IsNotNull(str); + Assert.IsTrue(builder.Add(str)); + + if (!startFound) + { + var coords = new List(str.Coordinates); + if (coords.Contains(startCoord)) + startFound = true; + } + + if (!endFound) + { + var coords = new List(str.Coordinates); + if (coords.Contains(endCoord)) + endFound = true; + } + } + builder.Initialize(); + Assert.IsTrue(startFound); + Assert.IsTrue(endFound); + + var path = builder.Perform(startCoord, endCoord); + Assert.IsNotNull(path); + SaveGraphResult(path); + + var reverse = builder.Perform(startCoord, endCoord); + Assert.IsNotNull(reverse); + Assert.AreEqual(path, reverse.Reverse()); + } + + [Ignore("")] + [Test] + public void BuildStradeFixed() + { + string path = "strade" + shp; + Assert.IsTrue(File.Exists(path)); + + var features = ShapefileReader.ReadAll(path).ToList(); + Assert.AreEqual(703, features.Count); + + string shapepath = "strade_fixed"; + if (File.Exists(shapepath + shp)) + File.Delete(shapepath + shp); + Assert.IsFalse(File.Exists(shapepath + shp)); + if (File.Exists(shapepath + shx)) + File.Delete(shapepath + shx); + Assert.IsFalse(File.Exists(shapepath + shx)); + if (File.Exists(shapepath + dbf)) + File.Delete(shapepath + dbf); + Assert.IsFalse(File.Exists(shapepath + dbf)); + + features.SaveToShapefile(shapepath); + + Assert.IsTrue(File.Exists(shapepath + shp)); + Assert.IsTrue(File.Exists(shapepath + shx)); + Assert.IsTrue(File.Exists(shapepath + dbf)); + } + + [Test] + public void CheckGraphBuilder2ExceptionUsingARepeatedGeometry() + { + Assert.Catch(() => + { + var builder = new GraphBuilder2(); + Assert.IsTrue(builder.Add(a)); + Assert.IsFalse(builder.Add(a)); + Assert.IsFalse(builder.Add(a, a)); + builder.Initialize(); + }); + } + + [Test] + public void CheckGraphBuilder2ExceptionUsingDifferentFactories() + { + Assert.Catch(() => + { + var builder = new GraphBuilder2(); + Assert.IsTrue(builder.Add(a)); + Assert.IsTrue(builder.Add(b, c)); + Assert.IsTrue(builder.Add(d)); + builder.Add(GeometryFactory.Default.CreateLineString(new Coordinate[] + { + new Coordinate(0, 0), + new Coordinate(50, 50), + })); + }); + } + + [Test] + public void CheckGraphBuilder2ExceptionUsingDoubleInitialization() + { + var builder = new GraphBuilder2(); + builder.Add(a); + builder.Add(b, c); + builder.Add(d); + builder.Add(e); + builder.Initialize(); + builder.Initialize(); + } + + [Test] + public void CheckGraphBuilder2ExceptionUsingNoGeometries() + { + Assert.Catch(() => + { + var builder = new GraphBuilder2(); + builder.Initialize(); + }); + } + + [Test] + public void CheckGraphBuilder2ExceptionUsingOneGeometry() + { + Assert.Catch(() => + { + var builder = new GraphBuilder2(); + Assert.IsTrue(builder.Add(a)); + builder.Initialize(); + }); + } + + [Test] + public void TestBidirectionalGraphBuilder2WithSampleGeometries() + { + var builder = new GraphBuilder2(true); + builder.Add(a); + builder.Add(b, c); + builder.Add(d); + builder.Add(e); + builder.Initialize(); + + var path = builder.Perform(start.Coordinate, end.Coordinate); + Assert.IsNotNull(path); + Assert.AreEqual(result, path); + + var revpath = builder.Perform(end, start); + Assert.IsNotNull(revpath); + Assert.AreEqual(revresult, revpath); + } + + [Test] + public void TestGraphBuilder2WithSampleGeometries() + { + var builder = new GraphBuilder2(); + builder.Add(a); + builder.Add(b, c); + builder.Add(d); + builder.Add(e); + builder.Initialize(); + + var path = builder.Perform(start, end); + Assert.IsNotNull(path); + Assert.AreEqual(result, path); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Various/NormalizeTest.cs b/test/NetTopologySuite.IO.Esri.Test/Various/NormalizeTest.cs new file mode 100644 index 0000000..8e27cab --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Various/NormalizeTest.cs @@ -0,0 +1,78 @@ +using NetTopologySuite.Geometries; +using NUnit.Framework; + +namespace NetTopologySuite.IO.ShapeFile.Test.Various +{ + /// + /// + /// + [TestFixture] + public class NormalizeTest + { + protected GeometryFactory Factory { get; private set; } + + protected WKTReader Reader { get; private set; } + + private Polygon _polygon = null; + private LinearRing _shell = null; + private LinearRing _hole = null; + + /// + /// Initializes a new instance of the class. + /// + public NormalizeTest() : base() { } + + /// + /// Method called prior to every test in this fixture + /// + [SetUp] + public void Init() + { + this.Factory = new GeometryFactory(); + this.Reader = new WKTReader(); + + _shell = Factory.CreateLinearRing(new Coordinate[] { new Coordinate(100,100), + new Coordinate(200,100), + new Coordinate(200,200), + new Coordinate(100,200), + new Coordinate(100,100), }); + // NOTE: Hole is created with not correct order for holes + _hole = Factory.CreateLinearRing(new Coordinate[] { new Coordinate(120,120), + new Coordinate(180,120), + new Coordinate(180,180), + new Coordinate(120,180), + new Coordinate(120,120), }); + _polygon = Factory.CreatePolygon(_shell, new LinearRing[] { _hole, }); + } + + /// + /// + /// + [Test] + public void NotNormalizedGDBOperation() + { + //byte[] bytes = new GDBWriter().Write(_polygon); + //var test = new GDBReader().Read(bytes); + + //This is no longer true + //Assert.IsNull(test); + //Assert.IsTrue(test.IsEmpty); + //Assert.IsTrue(test is IPolygonal); + } + + /// + /// + /// + [Test] + public void NormalizedGDBOperation() + { + _polygon.Normalize(); + + //byte[] bytes = new GDBWriter().Write(_polygon); + //var test = new GDBReader().Read(bytes); + + //Assert.IsNotNull(test); + //Assert.IsTrue(_polygon.EqualsExact(test)); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Various/PathFinder.cs b/test/NetTopologySuite.IO.Esri.Test/Various/PathFinder.cs new file mode 100644 index 0000000..9b78b33 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Various/PathFinder.cs @@ -0,0 +1,291 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.Operation.Linemerge; +using QuickGraph; +using QuickGraph.Algorithms.Observers; +using QuickGraph.Algorithms.ShortestPath; +using System; +using System.Collections.Generic; + +namespace NetTopologySuite.IO.ShapeFile.Test.Various +{ + /// + /// A class that manages shortest path computation. + /// + public class PathFinder + { + /// + /// A delegate that defines how to calculate the weight + /// of a line. + /// + /// A line. + /// The weight of the line. + public delegate double ComputeWeightDelegate(LineString line); + + private static readonly ComputeWeightDelegate DefaultComputer = line => line.Length; + + private readonly bool bidirectional; + + private GeometryFactory factory; + private readonly List strings; + + private readonly AdjacencyGraph> graph; + private IDictionary, double> consts; + + /// + /// Initializes a new instance of the class. + /// + /// + /// Specify if the graph must be build using both edges directions. + /// + public PathFinder(bool bidirectional) + { + this.bidirectional = bidirectional; + + factory = null; + strings = new List(); + graph = new AdjacencyGraph>(true); + } + + /// + /// Initializes a new instance of the class, + /// using a directed graph. + /// + public PathFinder() : this(false) { } + + /// + /// Adds each line to the graph structure. + /// + /// + /// + /// true if all lines + /// are added, false otherwise. + /// + /// + /// If geometries don't have the same factory. + /// + public bool Add(params LineString[] lines) + { + bool result = true; + foreach (var line in lines) + { + var newfactory = line.Factory; + if (factory == null) + factory = newfactory; + else if (!newfactory.PrecisionModel.Equals(factory.PrecisionModel)) + throw new TopologyException("all geometries must have the same precision model"); + + bool lineFound = strings.Contains(line); + result &= !lineFound; + if (!lineFound) + strings.Add(line); + else continue; // Skip vertex check because line is already present + + var coordinates = line.Coordinates; + int start = 0; + int end = coordinates.GetUpperBound(0); + AddCoordinateToGraph(coordinates[start]); // StartPoint + AddCoordinateToGraph(coordinates[end]); // EndPoint + } + return result; + } + + /// + /// + /// + /// + private void AddCoordinateToGraph(Coordinate coord) + { + if (!graph.ContainsVertex(coord)) + graph.AddVertex(coord); + } + + /// + /// Initialize the algorithm using the default + /// weight computer, + /// that uses string length + /// as weight value. + /// + /// + /// If you've don't added two or more geometries to the builder. + /// + /// + /// If builder is already initialized. + /// + public void Initialize() + { + BuildEdges(DefaultComputer); + } + + /// + /// Initialize the algorithm using the specified + /// weight computer + /// + /// + /// A function that computes the weight + /// of any edge of the graph. + /// + /// + /// If you've don't added two or more geometries to the builder. + /// + /// + /// If builder is already initialized. + /// + public void Initialize(ComputeWeightDelegate computer) + { + BuildEdges(computer); + } + + /// + /// + /// + /// + private void BuildEdges(ComputeWeightDelegate computer) + { + if (strings.Count < 2) + throw new TopologyException("you must specify two or more geometries to build a graph"); + + // Counts the number of edges in the set we pass to this method. + int numberOfEdgesInLines = strings.Count * 2; + + // Double values because we use also reversed edges... + if (bidirectional) + numberOfEdgesInLines *= 2; + + consts = new Dictionary, double>(numberOfEdgesInLines); + + foreach (var line in strings) + { + // Prepare a segment + var coordinates = line.Coordinates; + int start = 0; + int end = coordinates.GetUpperBound(0); + var src = coordinates[start]; + var dst = coordinates[end]; + + // Here we calculate the weight of the edge + double weight = computer(line); + + // Add the edge + IEdge localEdge = new Edge(src, dst); + graph.AddEdge(localEdge); + consts.Add(localEdge, weight); + + if (bidirectional) + { + // Add the reversed edge + IEdge localEdgeRev = new Edge(dst, src); + graph.AddEdge(localEdgeRev); + consts.Add(localEdgeRev, weight); + } + } + } + + /// + /// Carries out the shortest path anlayis between the two + /// nodes + /// passed as variables and returns an + /// giveing the shortest path. + /// + /// The source geom + /// The destination geom + /// A or a + /// with all the elements of the graph that composes the shortest path, + /// sequenced using a . + /// + public Geometry Find(Geometry source, Geometry destination) + { + return Find(source.Coordinate, destination.Coordinate); + } + + /// + /// Carries out the shortest path between the two nodes + /// ids passed as variables and returns an + /// giveing the shortest path. + /// + /// The source node + /// The destination node + /// A or a + /// with all the elements of the graph that composes the shortest path, + /// sequenced using a . + /// + public Geometry Find(Coordinate source, Coordinate destination) + { + if (!graph.ContainsVertex(source)) + throw new ArgumentException("key not found in the graph", "source"); + if (!graph.ContainsVertex(destination)) + throw new ArgumentException("key not found in the graph", "destination"); + + // Build algorithm + var dijkstra = + new DijkstraShortestPathAlgorithm>(graph, edge => consts[edge]); + + // Attach a Distance observer to give us the distances between edges + var distanceObserver = + new VertexDistanceRecorderObserver>(edge => consts[edge]); + distanceObserver.Attach(dijkstra); + + // Attach a Vertex Predecessor Recorder Observer to give us the paths + var predecessorObserver = + new VertexPredecessorRecorderObserver>(); + predecessorObserver.Attach(dijkstra); + + // Run the algorithm with A set to be the source + dijkstra.Compute(source); + + // Get the path computed to the destination. + IEnumerable> path; + bool result = predecessorObserver.TryGetPath(destination, out path); + + // Then we need to turn that into a geomery. + return result ? BuildString(new List>(path)) : null; + } + + /// + /// Takes the path returned from QuickGraph library and uses the + /// list of coordinates to reconstruct the path into a geometric + /// "shape" + /// + /// Shortest path from the QucikGraph Library + /// + /// A or a + /// with all the elements of the graph that composes the shortest path, + /// sequenced using a . + /// + private Geometry BuildString(ICollection> paths) + { + // if the path has no links then return a null reference + if (paths.Count < 1) + return null; + + var collector = new LineSequencer(); + foreach (var path in paths) + { + var src = path.Source; + var dst = path.Target; + foreach (var str in strings) + { + if (IsBound(str, src) && IsBound(str, dst)) + collector.Add(str); + } + } + + var sequence = collector.GetSequencedLineStrings(); + return sequence; + } + + /// + /// + /// + /// + /// + /// + private static bool IsBound(Geometry str, Coordinate src) + { + var coordinates = str.Coordinates; + int start = 0; + int end = str.Coordinates.GetUpperBound(0); + return coordinates[start].Equals(src) || + coordinates[end].Equals(src); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Various/PathFinderTest.cs b/test/NetTopologySuite.IO.Esri.Test/Various/PathFinderTest.cs new file mode 100644 index 0000000..63e7beb --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Various/PathFinderTest.cs @@ -0,0 +1,182 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.Features; +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.IO; +using NetTopologySuite.IO.Shapefile; +using System.Linq; + +namespace NetTopologySuite.IO.ShapeFile.Test.Various +{ + [TestFixture] + public class PathFinderTest + { + private const string shp = ".shp"; + private const string shx = ".shx"; + private const string dbf = ".dbf"; + + private GeometryFactory factory; + + [SetUp] + public void FixtureSetup() + { + Environment.CurrentDirectory = CommonHelpers.TestShapefilesDirectory; + + factory = GeometryFactory.Fixed; + } + + [Ignore("strade.shp not present")] + [Test] + public void BuildStradeFixed() + { + string path = "strade" + shp; + Assert.IsTrue(File.Exists(path)); + + var features = ShapefileReader.ReadAll(path); + Assert.AreEqual(703, features.Length); + + string shapepath = "strade_fixed"; + if (File.Exists(shapepath + shp)) + File.Delete(shapepath + shp); + Assert.IsFalse(File.Exists(shapepath + shp)); + if (File.Exists(shapepath + shx)) + File.Delete(shapepath + shx); + Assert.IsFalse(File.Exists(shapepath + shx)); + if (File.Exists(shapepath + dbf)) + File.Delete(shapepath + dbf); + Assert.IsFalse(File.Exists(shapepath + dbf)); + + features.SaveToShapefile(shapepath); + + Assert.IsTrue(File.Exists(shapepath + shp)); + Assert.IsTrue(File.Exists(shapepath + shx)); + Assert.IsTrue(File.Exists(shapepath + dbf)); + } + + private Geometry LoadGraphResult() + { + string path = "graphresult.shp"; + Assert.IsTrue(Path.GetExtension(path) == shp); + + var coll = ShapefileReader.ReadAllGeometries(path).ToGeometryCollection(); + Assert.AreEqual(1, coll.Count); + + var geom = coll.GetGeometryN(0); + Assert.IsInstanceOf(typeof(MultiLineString), geom); + var str = geom.GetGeometryN(0); + Assert.IsInstanceOf(typeof(LineString), str); + return str; + } + + private void SaveGraphResult(Geometry path) + { + if (path == null) + throw new ArgumentNullException("path"); + + string shapepath = "graphresult"; + if (File.Exists(shapepath + shp)) + File.Delete(shapepath + shp); + Assert.IsFalse(File.Exists(shapepath + shp)); + if (File.Exists(shapepath + shx)) + File.Delete(shapepath + shx); + Assert.IsFalse(File.Exists(shapepath + shx)); + if (File.Exists(shapepath + dbf)) + File.Delete(shapepath + dbf); + Assert.IsFalse(File.Exists(shapepath + dbf)); + + string field1 = "OBJECTID"; + var feature = new Feature(path, new AttributesTable()); + feature.Attributes.Add(field1, 0); + + var features = new List(new[] { feature, }); + features.SaveToShapefile(shapepath); + + Assert.IsTrue(File.Exists(shapepath + shp)); + Assert.IsTrue(File.Exists(shapepath + shx)); + Assert.IsTrue(File.Exists(shapepath + dbf)); + } + + [Test] + [Ignore("graph.shp is not present")] + public void BuildGraphFromCompleteGraphShapefile() + { + string shapepath = "graph.shp"; + int count = 1179; + + Assert.IsTrue(File.Exists(shapepath), string.Format("File not found: '{0}'", shapepath)); + var edges = ShapefileReader.ReadAllGeometries(shapepath).ToGeometryCollection(); + Assert.IsNotNull(edges); + Assert.IsInstanceOf(typeof(GeometryCollection), edges); + Assert.AreEqual(count, edges.NumGeometries); + + // Insert arbitrary userdata + for (int i = 0; i < count; i++) + { + var g = edges.GetGeometryN(i) as MultiLineString; + Assert.IsNotNull(g); + var ls = g.GetGeometryN(0) as LineString; + Assert.IsNotNull(ls); + + Assert.IsNull(ls.UserData); + ls.UserData = i; + Assert.IsNotNull(ls.UserData); + } + + var startls = edges.GetGeometryN(515).GetGeometryN(0) as LineString; + Assert.IsNotNull(startls); + var startPoint = startls.EndPoint; + Assert.AreEqual(2317300d, startPoint.X); + Assert.AreEqual(4843961d, startPoint.Y); + + var endls = edges.GetGeometryN(141).GetGeometryN(0) as LineString; ; + Assert.IsNotNull(endls); + var endPoint = endls.StartPoint; + Assert.AreEqual(2322739d, endPoint.X); + Assert.AreEqual(4844539d, endPoint.Y); + + var finder = new PathFinder(true); + foreach (MultiLineString mlstr in edges.Geometries) + { + Assert.AreEqual(1, mlstr.NumGeometries); + var str = mlstr.GetGeometryN(0) as LineString; + Assert.IsNotNull(str); + Assert.IsNotNull(str.UserData); + Assert.IsTrue(finder.Add(str)); + } + finder.Initialize(); + + int expectedResultCount = 8; + var path = finder.Find(startPoint, endPoint); + Assert.IsNotNull(path); + Assert.IsInstanceOf(typeof(MultiLineString), path); + var strings = (MultiLineString)path; + Assert.AreEqual(expectedResultCount, strings.NumGeometries); + foreach (var g in strings.Geometries) + { + Assert.IsNotNull(g.UserData); + Console.WriteLine("{0} : {1}", g.UserData, g); + } + + var reversedPath = finder.Find(endPoint, startPoint); + Assert.IsNotNull(reversedPath); + Assert.IsInstanceOf(typeof(MultiLineString), reversedPath); + + var reversedStrings = (MultiLineString)reversedPath; + Assert.AreEqual(expectedResultCount, reversedStrings.NumGeometries); + foreach (var g in reversedStrings.Geometries) + { + Assert.IsNotNull(g.UserData); + Console.WriteLine("{0} : {1}", g.UserData, g); + } + + for (int i = 0; i < expectedResultCount; i++) + { + var item = strings.GetGeometryN(i); + var itemReversed = strings.GetGeometryN(expectedResultCount - 1 - i); + Assert.AreNotEqual(item.UserData, itemReversed.UserData); + Assert.AreNotEqual(item, itemReversed); + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/Various/UnionAggregateTest.cs b/test/NetTopologySuite.IO.Esri.Test/Various/UnionAggregateTest.cs new file mode 100644 index 0000000..0e34a98 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/Various/UnionAggregateTest.cs @@ -0,0 +1,179 @@ +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Shapefile; +using NetTopologySuite.Operation.Union; +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; + +namespace NetTopologySuite.IO.ShapeFile.Test.Various +{ + /// + /// + /// + [TestFixture] + public class UnionAggregateTest + { + /// + /// + /// + [SetUp] + public void SetUp() + { + // Set current dir to shapefiles dir + Environment.CurrentDirectory = CommonHelpers.TestShapefilesDirectory; + } + + /// + /// + /// + [Test()] + [Ignore("sa_region.shp not present")] + public void PerformUnionAggregateTest1() + { + Assert.Catch(() => + { + Assert.IsNotNull(CheckShapefile("sa_region")); + }); + } + + /// + /// + /// + [Test] + public void PerformUnionAggregateTest2() + { + Assert.IsNotNull(CheckShapefile("CA_Cable_region")); + } + + /// + /// + /// + [Test] + public void PerformUnionAggregateTest3() + { + Assert.IsNotNull(CheckShapefile("US_DMA_region.shp")); + } + + /// + /// + /// + /// + /// + private static Geometry CheckShapefile(string fileName) + { + //int count = 0; + var geoms = new List(); + using (var shp = ShapefileReader.Open(fileName)) + { + foreach (var feature in shp) + { + var current = feature.Geometry; + if (!current.IsValid) + { + Debug.WriteLine("Imvalid geometry found: " + current); + continue; + } + geoms.Add(current); + //if (result == null) + // result = current; + //else result = result.Union(current); + //Debug.WriteLine("Iteration => " + ++count); + } + } + var result = UnaryUnionOp.Union(geoms); + var write = new WKTWriter { Formatted = true, MaxCoordinatesPerLine = 3, Tab = 2 }; + Debug.WriteLine("Operation result: " + write.Write(result)); + return result; + } + + /// + /// + /// + const string s1 = "MULTIPOLYGON(" + + "((267225 195936,267063 195904,266895 195872,266825 195858,266822 195891,266850 196044,266934 196203,267047 196249,267352 196374,267281 196302,267255 196275,267222 196244,267166 196191,267160 196153,267157 196139,267196 196136,267225 195936))," + + "((265704 193056,265703 193055,265692 193057,265678 193060,265663 193056,265659 193055,265655 193053,265653 193053,265651 193052,265648 193052,265646 193052,265631 193058,265616 193064,265612 193066,265609 193068,265605 193071,265603 193076,265595 193089,265596 193103,265597 193107,265623 193128,265631 193130,265630 193130,265609 193142,265608 193153,265608 193157,265607 193161,265607 193163,265606 193164,265603 193169,265619 193172,265623 193173,265619 193183,265618 193185,265618 193187,265613 193209,265618 193209,265622 193209,265620 193220,265626 193220,265626 193230,265617 193230,265616 193235,265616 193237,265615 193238,265604 193252,265606 193257,265607 193259,265607 193262,265607 193264,265607 193266,265607 193270,265607 193274,265607 193279,265606 193284,265605 193289,265602 193294,265595 193312,265598 193318,265600 193322,265601 193326,265602 193330,265602 193334,265602 193336,265602 193337,265599 193344,265610 193360,265611 193361,265612 193364,265613 193369,265615 193375,265615 193377,265615 193380,265613 193393,265609 193392,265607 193392,265605 193393,265587 193403,265588 193412,265589 193419,265610 193422,265629 193424,265631 193418,265633 193409,265637 193402,265642 193392,265659 193409,265661 193410,265662 193414,265662 193416,265663 193418,265664 193419,265664 193420,265665 193422,265665 193424,265666 193426,265666 193428,265665 193452,265657 193455,265652 193457,265640 193441,265636 193435,265623 193444,265642 193493,265645 193490,265649 193487,265665 193513,265673 193526,265681 193532,265683 193534,265684 193536,265685 193540,265686 193544,265687 193545,265687 193546,265690 193576,265682 193584,265655 193612,265667 193624,265668 193624,265670 193627,265679 193624,265683 193623,265693 193623,265692 193632,265692 193633,265693 193635,265695 193642,265693 193642,265692 193643,265691 193644,265690 193644,265689 193644,265679 193645,265679 193655,265678 193655,265677 193673,265676 193682,265670 193684,265659 193688,265654 193700,265652 193704,265631 193701,265629 193701,265628 193703,265619 193718,265617 193718,265616 193709,265612 193683,265614 193678,265609 193678,265571 193681,265551 193657,265495 193716,265492 193726,265481 193765,265481 193774,265480 193846,265479 193902,265478 193936,265526 193966,265563 193990,265548 194006,265547 194006,265546 194005,265544 194004,265543 194003,265542 194003,265541 194002,265540 194002,265540 194003,265521 194040,265525 194046,265527 194049,265527 194050,265528 194059,265528 194061,265543 194059,265544 194059,265544 194060,265549 194064,265554 194068,265555 194070,265557 194073,265558 194073,265558 194074,265559 194077,265561 194079,265564 194084,265567 194088,265568 194090,265570 194090,265581 194093,265569 194117,265567 194120,265567 194124,265567 194125,265568 194126,265569 194127,265569 194128,265570 194129,265571 194130,265572 194131,265573 194132,265574 194133,265575 194134,265576 194134,265577 194135,265596 194153,265583 194158,265540 194205,265530 194216,265473 194277,265462 194289,265461 194290,265451 194299,265432 194314,265410 194332,265394 194348,265372 194370,265371 194369,265359 194355,265362 194353,265362 194352,265367 194342,265342 194356,265341 194357,265340 194358,265339 194358,265338 194359,265337 194360,265336 194361,265335 194362,265335 194363,265334 194363,265333 194364,265332 194365,265331 194366,265330 194367,265325 194347,265308 194363,265302 194369,265326 194391,265308 194398,265306 194400,265304 194401,265303 194402,265302 194402,265301 194402,265300 194402,265294 194402,265288 194404,265286 194404,265286 194405,265285 194406,265284 194407,265283 194408,265282 194409,265282 194410,265281 194411,265280 194412,265279 194413,265278 194414,265277 194415,265276 194416,265275 194418,265275 194419,265274 194420,265273 194421,265272 194422,265272 194423,265271 194424,265270 194425,265269 194426,265268 194428,265268 194429,265267 194430,265266 194431,265266 194432,265265 194433,265264 194434,265263 194435,265262 194435,265261 194436,265260 194438,265258 194438,265253 194438,265254 194450,265254 194452,265253 194453,265252 194454,265251 194455,265243 194464,265234 194454,265238 194471,265238 194473,265237 194473,265235 194475,265234 194476,265232 194477,265231 194478,265230 194478,265229 194478,265228 194477,265227 194477,265226 194476,265225 194475,265224 194474,265224 194473,265223 194472,265222 194472,265222 194471,265221 194470,265220 194469,265217 194466,265216 194465,265166 194437,265165 194447,265164 194476,265158 194508,265158 194511,265173 194549,265176 194555,265199 194612,265205 194606,265214 194603,265238 194592,265244 194601,265246 194605,265257 194601,265263 194598,265265 194598,265266 194599,265268 194601,265270 194602,265272 194604,265274 194604,265278 194605,265281 194605,265282 194605,265283 194605,265292 194603,265290 194595,265284 194577,265320 194599,265336 194580,265351 194590,265302 194548,265300 194546,265298 194545,265295 194542,265295 194541,265298 194536,265285 194522,265285 194521,265284 194520,265283 194519,265282 194518,265281 194517,265280 194516,265279 194514,265279 194512,265247 194500,265262 194484,265277 194470,265278 194469,265287 194460,265304 194444,265306 194442,265325 194424,265330 194429,265370 194431,265371 194430,265372 194429,265374 194428,265376 194427,265377 194426,265378 194425,265379 194425,265380 194424,265381 194423,265382 194423,265383 194422,265384 194421,265385 194420,265386 194419,265387 194419,265388 194418,265389 194417,265390 194417,265390 194416,265391 194415,265392 194415,265393 194414,265395 194413,265396 194412,265398 194411,265399 194409,265401 194408,265402 194407,265403 194406,265404 194405,265405 194405,265406 194404,265407 194403,265408 194402,265409 194402,265410 194401,265411 194400,265412 194399,265413 194398,265414 194398,265414 194397,265416 194396,265417 194396,265418 194395,265419 194394,265420 194393,265421 194393,265422 194392,265423 194391,265424 194390,265425 194390,265426 194389,265427 194388,265428 194387,265429 194387,265430 194386,265431 194385,265432 194384,265433 194383,265434 194383,265435 194382,265436 194381,265437 194381,265438 194380,265439 194379,265440 194379,265441 194378,265442 194377,265443 194376,265444 194376,265445 194375,265446 194374,265447 194373,265448 194373,265448 194372,265449 194371,265450 194370,265451 194370,265451 194368,265453 194363,265466 194371,265467 194372,265467 194373,265468 194374,265469 194375,265469 194376,265470 194377,265471 194378,265471 194379,265471 194380,265469 194381,265469 194382,265468 194382,265466 194384,265462 194386,265425 194414,265430 194411,265430 194410,265428 194412,265433 194418,265448 194406,265463 194421,265464 194421,265465 194421,265466 194421,265467 194422,265487 194442,265507 194463,265548 194465,265548 194473,265578 194475,265578 194477,265579 194477,265602 194498,265595 194503,265594 194505,265593 194507,265592 194510,265590 194513,265590 194514,265589 194515,265589 194516,265589 194517,265588 194518,265587 194520,265587 194521,265586 194523,265585 194525,265585 194526,265582 194536,265579 194545,265578 194547,265577 194548,265576 194550,265575 194552,265574 194553,265573 194555,265572 194556,265571 194557,265570 194558,265570 194560,265569 194561,265569 194562,265568 194563,265568 194565,265567 194566,265566 194569,265565 194572,265565 194574,265564 194575,265562 194579,265560 194583,265559 194585,265557 194588,265557 194590,265556 194591,265555 194592,265555 194593,265554 194594,265553 194596,265553 194597,265552 194598,265552 194599,265551 194601,265551 194602,265550 194603,265549 194605,265548 194606,265548 194607,265548 194609,265547 194623,265538 194624,265508 194628,265508 194629,265501 194629,265461 194633,265456 194639,265458 194655,265462 194682,265463 194689,265491 194705,265527 194725,265534 194729,265561 194745,265575 194752,265589 194755,265593 194755,265598 194755,265603 194755,265607 194752,265630 194734,265651 194712,265652 194710,265654 194708,265656 194707,265657 194705,265658 194704,265658 194703,265659 194702,265659 194701,265659 194700,265659 194698,265660 194697,265660 194696,265663 194693,265666 194689,265666 194688,265666 194686,265666 194683,265666 194678,265666 194677,265667 194676,265671 194671,265675 194667,265678 194663,265682 194660,265689 194653,265698 194650,265714 194645,265700 194593,265699 194589,265701 194586,265704 194583,265706 194580,265711 194573,265716 194567,265724 194559,265730 194551,265734 194547,265737 194543,265739 194539,265742 194535,265747 194528,265749 194519,265752 194507,265754 194495,265755 194490,265757 194486,265769 194467,265779 194448,265780 194447,265780 194445,265780 194444,265780 194443,265780 194442,265780 194441,265779 194440,265779 194439,265779 194437,265779 194436,265780 194435,265780 194434,265779 194433,265780 194431,265780 194430,265779 194429,265779 194428,265779 194427,265779 194426,265779 194424,265779 194423,265779 194422,265778 194421,265778 194419,265777 194418,265777 194417,265777 194416,265777 194414,265777 194413,265777 194411,265777 194410,265777 194409,265778 194408,265784 194399,265791 194390,265791 194389,265791 194388,265790 194387,265790 194386,265790 194385,265789 194384,265789 194383,265789 194381,265788 194380,265788 194379,265788 194378,265787 194376,265787 194375,265787 194374,265786 194373,265786 194372,265786 194371,265786 194370,265785 194364,265785 194358,265785 194357,265785 194356,265784 194355,265784 194353,265784 194352,265783 194351,265783 194350,265783 194348,265782 194347,265782 194346,265782 194341,265783 194336,265783 194335,265783 194334,265783 194332,265783 194331,265783 194330,265784 194328,265784 194327,265785 194326,265785 194325,265786 194323,265787 194321,265788 194319,265788 194318,265787 194318,265785 194317,265785 194318,265775 194318,265775 194313,265768 194310,265748 194302,265754 194293,265761 194282,265765 194272,265773 194252,265780 194235,265781 194231,265780 194230,265778 194225,265775 194219,265775 194218,265775 194216,265774 194215,265774 194213,265770 194202,265767 194190,265767 194189,265767 194187,265768 194181,265802 194173,265813 194153,265814 194148,265834 194139,265836 194138,265838 194136,265840 194135,265841 194135,265842 194134,265843 194133,265845 194132,265847 194130,265848 194130,265849 194129,265850 194129,265851 194128,265852 194127,265853 194127,265854 194126,265857 194121,265858 194115,265859 194113,265860 194110,265860 194108,265862 194107,265876 194093,265869 194090,265868 194089,265868 194088,265867 194087,265867 194085,265866 194084,265866 194083,265866 194082,265865 194081,265865 194079,265865 194078,265864 194077,265864 194076,265864 194075,265863 194073,265863 194071,265862 194069,265862 194068,265861 194068,265849 194059,265853 194017,265853 194016,265852 194014,265852 194013,265852 194011,265851 194010,265851 194009,265851 194008,265850 194007,265849 194004,265848 194002,265848 194000,265847 193997,265845 193992,265845 193991,265845 193989,265853 193957,265844 193964,265806 193997,265780 194074,265743 193975,265748 193972,265748 193971,265746 193968,265740 193965,265731 193960,265727 193957,265732 193952,265733 193950,265734 193948,265735 193947,265734 193946,265734 193944,265734 193943,265734 193942,265734 193940,265734 193939,265734 193938,265734 193936,265734 193935,265734 193934,265733 193932,265733 193930,265732 193928,265726 193922,265732 193902,265732 193900,265732 193897,265730 193879,265723 193879,265716 193879,265714 193873,265744 193875,265749 193874,265715 193833,265717 193832,265717 193824,265718 193817,265718 193816,265763 193761,265764 193762,265767 193773,265768 193780,265775 193785,265783 193791,265773 193814,265770 193821,265768 193829,265766 193839,265762 193848,265759 193856,265758 193862,265758 193872,265763 193881,265768 193889,265781 193866,265790 193850,265794 193845,265840 193774,265837 193806,265837 193809,265831 193827,265860 193838,265882 193774,265905 193718,266059 193881,266139 193966,266196 193861,266170 193795,266180 193724,266194 193619,266209 193557,266221 193508,266225 193492,266224 193475,266222 193325,266223 193318,266225 193297,266227 193280,266228 193266,266234 193210,266137 193221,266121 193247,266068 193233,266066 193191,266066 193188,266064 193152,266039 193057,266035 193043,266025 192991,266019 192991,266019 192981,266023 192981,266021 192972,265989 192940,266005 192938,266006 192953,266064 192989,266071 192986,266087 192979,266104 192977,266228 192963,266223 192995,266219 193016,266270 193017,266400 192817,266413 192797,266561 192570,266615 192536,266620 192553,266632 192599,266607 192635,266547 192722,266661 192907,266693 192959,266872 192896,266899 192887,266926 192889,266972 192892,266968 192741,266967 192695,266871 192667,266737 192629,266764 192622,266727 192409,266717 192352,266808 192287,266885 192231,266873 192268,266868 192283,266894 192340,266987 192540,267003 192550,267304 192739,267285 192641,267270 192566,266933 192196,266928 192191,266906 192166,266889 192142,266873 192118,266813 192175,266797 192190,266763 192137,266358 192253,266369 192237,266370 192235,266367 192235,266296 192224,266281 192203,266023 191829,266016 191826,266005 191816,266002 191816,265998 191816,265991 191822,265991 191829,265991 191834,265989 191839,265987 191844,265983 191847,265980 191850,265976 191851,265974 191852,265969 191850,265969 191842,265967 191837,265966 191834,265964 191831,265960 191827,265958 191826,265950 191829,265949 191862,265932 191858,265932 191853,265931 191849,265929 191845,265925 191841,265922 191838,265899 191861,265888 191860,265886 191858,265883 191854,265881 191850,265880 191847,265879 191840,265866 191833,265859 191846,265852 191842,265846 191839,265840 191837,265838 191837,265837 191839,265835 191844,265834 191850,265835 191858,265834 191859,265831 191862,265828 191864,265825 191864,265821 191864,265818 191862,265802 191848,265800 191847,265796 191847,265790 191847,265785 191849,265768 191855,265765 191856,265761 191855,265755 191852,265742 191848,265739 191834,265725 191801,265720 191793,265715 191789,265709 191785,265705 191783,265696 191780,265684 191778,265681 191779,265680 191780,265679 191782,265679 191784,265683 191799,265681 191831,265677 191841,265663 191836,265652 191833,265648 191836,265643 191839,265638 191840,265632 191839,265631 191838,265629 191821,265626 191817,265624 191818,265623 191818,265620 191825,265618 191834,265615 191841,265612 191848,265608 191854,265603 191859,265597 191864,265592 191868,265589 191869,265586 191869,265579 191868,265575 191868,265567 191865,265559 191865,265556 191865,265552 191867,265548 191870,265543 191876,265537 191874,265533 191873,265528 191873,265523 191874,265515 191877,265491 191878,265479 191881,265468 191886,265456 191883,265445 191877,265413 191877,265399 191875,265389 191874,265372 191875,265360 191874,265334 191870,265329 191869,265398 192004,265565 192332,265564 192333,265557 192337,265556 192341,265555 192343,265555 192344,265556 192345,265559 192363,265563 192381,265564 192384,265565 192386,265580 192409,265596 192429,265599 192432,265603 192434,265610 192436,265616 192438,265621 192440,265626 192442,265648 192448,265670 192455,265673 192456,265676 192457,265679 192458,265682 192460,265694 192468,265708 192475,265717 192479,265726 192483,265729 192484,265731 192485,265734 192487,265738 192488,265748 192491,265759 192494,265772 192498,265784 192504,265807 192515,265824 192531,265847 192551,265855 192574,265865 192601,265864 192627,265864 192659,265802 192646,265842 192760,265835 192794,265823 192853,265779 192863,265804 192879,265823 192892,265812 192890,265808 192890,265807 192890,265806 192891,265805 192892,265803 192900,265801 192908,265800 192916,265800 192924,265800 192927,265772 192935,265767 192936,265765 192936,265764 192931,265761 192923,265760 192923,265757 192924,265756 192923,265729 192905,265724 192909,265715 192914,265704 192914,265698 192914,265698 192916,265695 192916,265687 192942,265704 192951,265719 192954,265715 192957,265710 192960,265706 192962,265706 192974,265707 192976,265705 192978,265704 192979,265704 192981,265701 192987,265698 192994,265695 192994,265691 192993,265687 192994,265680 192994,265677 193008,265676 193010,265685 193012,265712 193020,265712 193017,265713 193015,265711 193012,265711 193010,265726 193011,265725 193038,265721 193041,265704 193056)," + + "(265681 194364,265681 194367,265671 194367,265671 194358,265671 194357,265681 194357,265681 194364)," + + "(265672 194259,265672 194252,265682 194252,265682 194261,265682 194262,265672 194262,265672 194259)," + + "(265660 194241,265669 194241,265669 194251,265659 194251,265659 194249,265659 194241,265660 194241)," + + "(265730 193180,265761 193187,265770 193167,265773 193196,265778 193238,265780 193284,265780 193290,265781 193296,265781 193300,265781 193305,265781 193310,265781 193315,265737 193357,265704 193365,265692 193357,265689 193334,265698 193344,265699 193335,265701 193319,265703 193305,265718 193219,265723 193191,265728 193192,265730 193180)," + + "(265829 193249,265833 193249,265833 193259,265828 193259,265823 193259,265823 193250,265823 193249,265829 193249)," + + "(265892 193510,265906 193514,266004 193467,265951 193602,265890 193576,265899 193549,265892 193510)," + + "(266286 192856,266219 192756,266168 192757,266168 192748,266169 192722,266220 192689,266281 192648,266286 192670,266318 192815,266290 192850,266286 192856)," + + "(266044 192606,266037 192606,266037 192596,266047 192596,266047 192603,266047 192606,266044 192606)," + + "(266039 192790,266039 192800,266029 192800,266029 192790,266037 192790,266039 192790)," + + "(265749 192996,265749 192986,265759 192986,265759 192996,265749 192996)," + + "(265848 192834,265848 192824,265858 192824,265858 192834,265848 192834)," + + "(265978 192980,265978 192970,265988 192970,265988 192980,265978 192980)," + + "(266088 192809,266088 192813,266080 192813,266078 192813,266078 192803,266088 192803,266088 192809)," + + "(265981 192800,265981 192790,265991 192790,265991 192800,265981 192800)," + + "(265951 192844,265951 192834,265961 192834,265961 192844,265951 192844)," + + "(265917 193026,265917 193016,265927 193016,265927 193026,265917 193026)," + + "(265926 193055,265926 193045,265936 193045,265936 193055,265926 193055)," + + "(265966 192746,265966 192736,265976 192736,265976 192746,265966 192746)," + + "(266046 192708,266046 192698,266056 192698,266056 192708,266046 192708)," + + "(265692 193744,265687 193751,265684 193755,265682 193756,265667 193763,265635 193732,265630 193728,265650 193718,265652 193717,265662 193716,265673 193718,265676 193718,265679 193718,265681 193718,265682 193719,265683 193723,265687 193723,265687 193733,265692 193744)," + + "(265611 193820,265611 193810,265621 193810,265621 193820,265611 193820)," + + "(265731 193735,265731 193725,265741 193725,265741 193735,265731 193735)," + + "(265708 193562,265708 193552,265718 193552,265718 193562,265708 193562)," + + "(265650 193477,265640 193477,265640 193467,265648 193467,265650 193467,265650 193477)," + + "(265680 193109,265680 193099,265690 193099,265690 193109,265680 193109)," + + "(265611 193335,265611 193325,265621 193325,265621 193335,265611 193335)," + + "(265610 194005,265610 193995,265620 193995,265620 194005,265610 194005)," + + "(265570 193944,265570 193934,265580 193934,265580 193944,265570 193944)," + + "(265643 193872,265643 193862,265653 193862,265653 193872,265643 193872)," + + "(265909 193141,265909 193131,265919 193131,265919 193141,265909 193141)," + + "(265800 193015,265800 193005,265810 193005,265810 193015,265800 193015)," + + "(265728 194212,265728 194202,265738 194202,265738 194212,265728 194212)," + + "(265691 194370,265691 194360,265701 194360,265701 194370,265691 194370))," + + "((265057 193097,265050 193096,265044 193093,265043 193093,265041 193093,265038 193093,265037 193093,265035 193093,265029 193092,265031 193093,265035 193094,265060 193099,265076 193103,265058 193097,265057 193097))," + + "((265246 193239,265266 193230,265268 193230,265279 193209,265282 193211,265281 193210,265290 193199,265273 193188,265269 193196,265260 193213,265249 193234,265246 193239))," + + "((265525 192957,265525 192962,265524 192965,265526 192968,265530 192975,265533 192983,265536 192990,265540 192993,265562 193014,265575 193011,265609 193004,265606 192991,265602 192978,265607 192966,265613 192953,265573 192946,265533 192939,265527 192954,265525 192957))," + + "((265345 192929,265348 192937,265349 192941,265371 192945,265372 192945,265373 192938,265374 192935,265374 192934,265364 192919,265363 192920,265345 192929)))"; + const string s2 = "POLYGON((265433 194418,265428 194412,265430 194410,265430 194411,265425 194414,265462 194386,265466 194384,265468 194382,265469 194382,265469 194381,265471 194380,265471 194379,265471 194378,265470 194377,265469 194376,265469 194375,265468 194374,265467 194373,265467 194372,265466 194371,265453 194363,265451 194368,265451 194370,265450 194370,265449 194371,265448 194372,265448 194373,265447 194373,265446 194374,265445 194375,265444 194376,265443 194376,265442 194377,265441 194378,265440 194379,265439 194379,265438 194380,265437 194381,265436 194381,265435 194382,265434 194383,265433 194383,265432 194384,265431 194385,265430 194386,265429 194387,265428 194387,265427 194388,265426 194389,265425 194390,265424 194390,265423 194391,265422 194392,265421 194393,265420 194393,265419 194394,265418 194395,265417 194396,265416 194396,265414 194397,265414 194398,265413 194398,265412 194399,265411 194400,265410 194401,265409 194402,265408 194402,265407 194403,265406 194404,265405 194405,265404 194405,265403 194406,265402 194407,265402 194407,265401 194408,265399 194409,265398 194411,265396 194412,265395 194413,265393 194414,265392 194415,265391 194415,265390 194416,265390 194417,265389 194417,265388 194418,265387 194419,265386 194419,265385 194420,265384 194421,265383 194422,265382 194423,265381 194423,265380 194424,265379 194425,265378 194425,265377 194426,265376 194427,265374 194428,265372 194429,265371 194430,265370 194431,265330 194429,265369 194467,265379 194477,265422 194481,265427 194484,265433 194488,265467 194515,265469 194508,265487 194442,265467 194422,265466 194421,265465 194421,265464 194421,265463 194421,265448 194406,265433 194418))"; + + [Test] + //[ExpectedException(typeof(TopologyException))] + public void FailedUnionTest() + { + var reader = new WKTReader(GeometryFactory.Fixed); + Assert.IsNotNull(reader); + + var g1 = reader.Read(s1); + Assert.IsNotNull(g1); + Assert.IsFalse(g1.IsValid); + g1 = g1.Buffer(0); + + var g2 = reader.Read(s2).Buffer(0d); + Assert.IsNotNull(g2); + + var result = g1.Union(g2); + Assert.IsNotNull(result); + Debug.WriteLine(result); + } + + /// + /// + /// + [Test] + public void FixedUnionTest() + { + string s1 = "MULTIPOLYGON(((267225 195936,267063 195904,266895 195872,266825 195858,266822 195891,266850 196044,266934 196203,267047 196249,267352 196374,267281 196302,267255 196275,267222 196244,267166 196191,267160 196153,267157 196139,267196 196136,267225 195936)),((265704 193056,265703 193055,265692 193057,265678 193060,265663 193056,265659 193055,265655 193053,265653 193053,265651 193052,265648 193052,265646 193052,265631 193058,265616 193064,265612 193066,265609 193068,265605 193071,265603 193076,265595 193089,265596 193103,265597 193107,265623 193128,265631 193130,265630 193130,265609 193142,265608 193153,265608 193157,265607 193161,265607 193163,265606 193164,265603 193169,265619 193172,265623 193173,265619 193183,265618 193185,265618 193187,265613 193209,265618 193209,265622 193209,265620 193220,265626 193220,265626 193230,265617 193230,265616 193235,265616 193237,265615 193238,265604 193252,265606 193257,265607 193259,265607 193262,265607 193264,265607 193266,265607 193270,265607 193274,265607 193279,265606 193284,265605 193289,265602 193294,265595 193312,265598 193318,265600 193322,265601 193326,265602 193330,265602 193334,265602 193336,265602 193337,265599 193344,265610 193360,265611 193361,265612 193364,265613 193369,265615 193375,265615 193377,265615 193380,265613 193393,265609 193392,265607 193392,265605 193393,265587 193403,265588 193412,265589 193419,265610 193422,265629 193424,265631 193418,265633 193409,265637 193402,265642 193392,265659 193409,265661 193410,265662 193414,265662 193416,265663 193418,265664 193419,265664 193420,265665 193422,265665 193424,265666 193426,265666 193428,265665 193452,265657 193455,265652 193457,265640 193441,265636 193435,265623 193444,265642 193493,265645 193490,265649 193487,265665 193513,265673 193526,265681 193532,265683 193534,265684 193536,265685 193540,265686 193544,265687 193545,265687 193546,265690 193576,265682 193584,265655 193612,265667 193624,265668 193624,265670 193627,265679 193624,265683 193623,265693 193623,265692 193632,265692 193633,265693 193635,265695 193642,265693 193642,265692 193643,265691 193644,265690 193644,265689 193644,265679 193645,265679 193655,265678 193655,265677 193673,265676 193682,265670 193684,265659 193688,265654 193700,265652 193704,265631 193701,265629 193701,265628 193703,265619 193718,265617 193718,265616 193709,265612 193683,265614 193678,265609 193678,265571 193681,265551 193657,265495 193716,265492 193726,265481 193765,265481 193774,265480 193846,265479 193902,265478 193936,265526 193966,265563 193990,265548 194006,265547 194006,265546 194005,265544 194004,265543 194003,265542 194003,265541 194002,265540 194002,265540 194003,265521 194040,265525 194046,265527 194049,265527 194050,265528 194059,265528 194061,265543 194059,265544 194059,265544 194060,265549 194064,265554 194068,265555 194070,265557 194073,265558 194073,265558 194074,265559 194077,265561 194079,265564 194084,265567 194088,265568 194090,265570 194090,265581 194093,265569 194117,265567 194120,265567 194124,265567 194125,265568 194126,265569 194127,265569 194128,265570 194129,265571 194130,265572 194131,265573 194132,265574 194133,265575 194134,265576 194134,265577 194135,265596 194153,265583 194158,265540 194205,265530 194216,265473 194277,265462 194289,265461 194290,265451 194299,265432 194314,265410 194332,265394 194348,265372 194370,265371 194369,265359 194355,265362 194353,265362 194352,265367 194342,265342 194356,265341 194357,265340 194358,265339 194358,265338 194359,265337 194360,265336 194361,265335 194362,265335 194363,265334 194363,265333 194364,265332 194365,265331 194366,265330 194367,265325 194347,265308 194363,265302 194369,265326 194391,265308 194398,265306 194400,265304 194401,265303 194402,265302 194402,265301 194402,265300 194402,265294 194402,265288 194404,265286 194404,265286 194405,265285 194406,265284 194407,265283 194408,265282 194409,265282 194410,265281 194411,265280 194412,265279 194413,265278 194414,265277 194415,265276 194416,265275 194418,265275 194419,265274 194420,265273 194421,265272 194422,265272 194423,265271 194424,265270 194425,265269 194426,265268 194428,265268 194429,265267 194430,265266 194431,265266 194432,265265 194433,265264 194434,265263 194435,265262 194435,265261 194436,265260 194438,265258 194438,265253 194438,265254 194450,265254 194452,265253 194453,265252 194454,265251 194455,265243 194464,265234 194454,265238 194471,265238 194473,265237 194473,265235 194475,265234 194476,265232 194477,265231 194478,265230 194478,265229 194478,265228 194477,265227 194477,265226 194476,265225 194475,265224 194474,265224 194473,265223 194472,265222 194472,265222 194471,265221 194470,265220 194469,265217 194466,265216 194465,265166 194437,265165 194447,265164 194476,265158 194508,265158 194511,265173 194549,265176 194555,265199 194612,265205 194606,265214 194603,265238 194592,265244 194601,265246 194605,265257 194601,265263 194598,265265 194598,265266 194599,265268 194601,265270 194602,265272 194604,265274 194604,265278 194605,265281 194605,265282 194605,265283 194605,265292 194603,265290 194595,265284 194577,265320 194599,265336 194580,265351 194590,265302 194548,265300 194546,265298 194545,265295 194542,265295 194541,265298 194536,265285 194522,265285 194521,265284 194520,265283 194519,265282 194518,265281 194517,265280 194516,265279 194514,265279 194512,265247 194500,265262 194484,265277 194470,265278 194469,265287 194460,265304 194444,265306 194442,265325 194424,265330 194429,265370 194431,265371 194430,265372 194429,265374 194428,265376 194427,265377 194426,265378 194425,265379 194425,265380 194424,265381 194423,265382 194423,265383 194422,265384 194421,265385 194420,265386 194419,265387 194419,265388 194418,265389 194417,265390 194417,265390 194416,265391 194415,265392 194415,265393 194414,265395 194413,265396 194412,265398 194411,265399 194409,265401 194408,265402 194407,265403 194406,265404 194405,265405 194405,265406 194404,265407 194403,265408 194402,265409 194402,265410 194401,265411 194400,265412 194399,265413 194398,265414 194398,265414 194397,265416 194396,265417 194396,265418 194395,265419 194394,265420 194393,265421 194393,265422 194392,265423 194391,265424 194390,265425 194390,265426 194389,265427 194388,265428 194387,265429 194387,265430 194386,265431 194385,265432 194384,265433 194383,265434 194383,265435 194382,265436 194381,265437 194381,265438 194380,265439 194379,265440 194379,265441 194378,265442 194377,265443 194376,265444 194376,265445 194375,265446 194374,265447 194373,265448 194373,265448 194372,265449 194371,265450 194370,265451 194370,265451 194368,265453 194363,265466 194371,265467 194372,265467 194373,265468 194374,265469 194375,265469 194376,265470 194377,265471 194378,265471 194379,265471 194380,265469 194381,265469 194382,265468 194382,265466 194384,265462 194386,265425 194414,265430 194411,265430 194410,265428 194412,265433 194418,265448 194406,265463 194421,265464 194421,265465 194421,265466 194421,265467 194422,265487 194442,265507 194463,265548 194465,265548 194473,265578 194475,265578 194477,265579 194477,265602 194498,265595 194503,265594 194505,265593 194507,265592 194510,265590 194513,265590 194514,265589 194515,265589 194516,265589 194517,265588 194518,265587 194520,265587 194521,265586 194523,265585 194525,265585 194526,265582 194536,265579 194545,265578 194547,265577 194548,265576 194550,265575 194552,265574 194553,265573 194555,265572 194556,265571 194557,265570 194558,265570 194560,265569 194561,265569 194562,265568 194563,265568 194565,265567 194566,265566 194569,265565 194572,265565 194574,265564 194575,265562 194579,265560 194583,265559 194585,265557 194588,265557 194590,265556 194591,265555 194592,265555 194593,265554 194594,265553 194596,265553 194597,265552 194598,265552 194599,265551 194601,265551 194602,265550 194603,265549 194605,265548 194606,265548 194607,265548 194609,265547 194623,265538 194624,265508 194628,265508 194629,265501 194629,265461 194633,265456 194639,265458 194655,265462 194682,265463 194689,265491 194705,265527 194725,265534 194729,265561 194745,265575 194752,265589 194755,265593 194755,265598 194755,265603 194755,265607 194752,265630 194734,265651 194712,265652 194710,265654 194708,265656 194707,265657 194705,265658 194704,265658 194703,265659 194702,265659 194701,265659 194700,265659 194698,265660 194697,265660 194696,265663 194693,265666 194689,265666 194688,265666 194686,265666 194683,265666 194678,265666 194677,265667 194676,265671 194671,265675 194667,265678 194663,265682 194660,265689 194653,265698 194650,265714 194645,265700 194593,265699 194589,265701 194586,265704 194583,265706 194580,265711 194573,265716 194567,265724 194559,265730 194551,265734 194547,265737 194543,265739 194539,265742 194535,265747 194528,265749 194519,265752 194507,265754 194495,265755 194490,265757 194486,265769 194467,265779 194448,265780 194447,265780 194445,265780 194444,265780 194443,265780 194442,265780 194441,265779 194440,265779 194439,265779 194437,265779 194436,265780 194435,265780 194434,265779 194433,265780 194431,265780 194430,265779 194429,265779 194428,265779 194427,265779 194426,265779 194424,265779 194423,265779 194422,265778 194421,265778 194419,265777 194418,265777 194417,265777 194416,265777 194414,265777 194413,265777 194411,265777 194410,265777 194409,265778 194408,265784 194399,265791 194390,265791 194389,265791 194388,265790 194387,265790 194386,265790 194385,265789 194384,265789 194383,265789 194381,265788 194380,265788 194379,265788 194378,265787 194376,265787 194375,265787 194374,265786 194373,265786 194372,265786 194371,265786 194370,265785 194364,265785 194358,265785 194357,265785 194356,265784 194355,265784 194353,265784 194352,265783 194351,265783 194350,265783 194348,265782 194347,265782 194346,265782 194341,265783 194336,265783 194335,265783 194334,265783 194332,265783 194331,265783 194330,265784 194328,265784 194327,265785 194326,265785 194325,265786 194323,265787 194321,265788 194319,265788 194318,265787 194318,265785 194317,265785 194318,265775 194318,265775 194313,265768 194310,265748 194302,265754 194293,265761 194282,265765 194272,265773 194252,265780 194235,265781 194231,265780 194230,265778 194225,265775 194219,265775 194218,265775 194216,265774 194215,265774 194213,265770 194202,265767 194190,265767 194189,265767 194187,265768 194181,265802 194173,265813 194153,265814 194148,265834 194139,265836 194138,265838 194136,265840 194135,265841 194135,265842 194134,265843 194133,265845 194132,265847 194130,265848 194130,265849 194129,265850 194129,265851 194128,265852 194127,265853 194127,265854 194126,265857 194121,265858 194115,265859 194113,265860 194110,265860 194108,265862 194107,265876 194093,265869 194090,265868 194089,265868 194088,265867 194087,265867 194085,265866 194084,265866 194083,265866 194082,265865 194081,265865 194079,265865 194078,265864 194077,265864 194076,265864 194075,265863 194073,265863 194071,265862 194069,265862 194068,265861 194068,265849 194059,265853 194017,265853 194016,265852 194014,265852 194013,265852 194011,265851 194010,265851 194009,265851 194008,265850 194007,265849 194004,265848 194002,265848 194000,265847 193997,265845 193992,265845 193991,265845 193989,265853 193957,265844 193964,265806 193997,265780 194074,265743 193975,265748 193972,265748 193971,265746 193968,265740 193965,265731 193960,265727 193957,265732 193952,265733 193950,265734 193948,265735 193947,265734 193946,265734 193944,265734 193943,265734 193942,265734 193940,265734 193939,265734 193938,265734 193936,265734 193935,265734 193934,265733 193932,265733 193930,265732 193928,265726 193922,265732 193902,265732 193900,265732 193897,265730 193879,265723 193879,265716 193879,265714 193873,265744 193875,265749 193874,265715 193833,265717 193832,265717 193824,265718 193817,265718 193816,265763 193761,265764 193762,265767 193773,265768 193780,265775 193785,265783 193791,265773 193814,265770 193821,265768 193829,265766 193839,265762 193848,265759 193856,265758 193862,265758 193872,265763 193881,265768 193889,265781 193866,265790 193850,265794 193845,265840 193774,265837 193806,265837 193809,265831 193827,265860 193838,265882 193774,265905 193718,266059 193881,266139 193966,266196 193861,266170 193795,266180 193724,266194 193619,266209 193557,266221 193508,266225 193492,266224 193475,266222 193325,266223 193318,266225 193297,266227 193280,266228 193266,266234 193210,266137 193221,266121 193247,266068 193233,266066 193191,266066 193188,266064 193152,266039 193057,266035 193043,266025 192991,266019 192991,266019 192981,266023 192981,266021 192972,265989 192940,266005 192938,266006 192953,266064 192989,266071 192986,266087 192979,266104 192977,266228 192963,266223 192995,266219 193016,266270 193017,266400 192817,266413 192797,266561 192570,266615 192536,266620 192553,266632 192599,266607 192635,266547 192722,266661 192907,266693 192959,266872 192896,266899 192887,266926 192889,266972 192892,266968 192741,266967 192695,266871 192667,266737 192629,266764 192622,266727 192409,266717 192352,266808 192287,266885 192231,266873 192268,266868 192283,266894 192340,266987 192540,267003 192550,267304 192739,267285 192641,267270 192566,266933 192196,266928 192191,266906 192166,266889 192142,266873 192118,266813 192175,266797 192190,266763 192137,266358 192253,266369 192237,266370 192235,266367 192235,266296 192224,266281 192203,266023 191829,266016 191826,266005 191816,266002 191816,265998 191816,265991 191822,265991 191829,265991 191834,265989 191839,265987 191844,265983 191847,265980 191850,265976 191851,265974 191852,265969 191850,265969 191842,265967 191837,265966 191834,265964 191831,265960 191827,265958 191826,265950 191829,265949 191862,265932 191858,265932 191853,265931 191849,265929 191845,265925 191841,265922 191838,265899 191861,265888 191860,265886 191858,265883 191854,265881 191850,265880 191847,265879 191840,265866 191833,265859 191846,265852 191842,265846 191839,265840 191837,265838 191837,265837 191839,265835 191844,265834 191850,265835 191858,265834 191859,265831 191862,265828 191864,265825 191864,265821 191864,265818 191862,265802 191848,265800 191847,265796 191847,265790 191847,265785 191849,265768 191855,265765 191856,265761 191855,265755 191852,265742 191848,265739 191834,265725 191801,265720 191793,265715 191789,265709 191785,265705 191783,265696 191780,265684 191778,265681 191779,265680 191780,265679 191782,265679 191784,265683 191799,265681 191831,265677 191841,265663 191836,265652 191833,265648 191836,265643 191839,265638 191840,265632 191839,265631 191838,265629 191821,265626 191817,265624 191818,265623 191818,265620 191825,265618 191834,265615 191841,265612 191848,265608 191854,265603 191859,265597 191864,265592 191868,265589 191869,265586 191869,265579 191868,265575 191868,265567 191865,265559 191865,265556 191865,265552 191867,265548 191870,265543 191876,265537 191874,265533 191873,265528 191873,265523 191874,265515 191877,265491 191878,265479 191881,265468 191886,265456 191883,265445 191877,265413 191877,265399 191875,265389 191874,265372 191875,265360 191874,265334 191870,265329 191869,265398 192004,265565 192332,265564 192333,265557 192337,265556 192341,265555 192343,265555 192344,265556 192345,265559 192363,265563 192381,265564 192384,265565 192386,265580 192409,265596 192429,265599 192432,265603 192434,265610 192436,265616 192438,265621 192440,265626 192442,265648 192448,265670 192455,265673 192456,265676 192457,265679 192458,265682 192460,265694 192468,265708 192475,265717 192479,265726 192483,265729 192484,265731 192485,265734 192487,265738 192488,265748 192491,265759 192494,265772 192498,265784 192504,265807 192515,265824 192531,265847 192551,265855 192574,265865 192601,265864 192627,265864 192659,265802 192646,265842 192760,265835 192794,265823 192853,265779 192863,265804 192879,265823 192892,265812 192890,265808 192890,265807 192890,265806 192891,265805 192892,265803 192900,265801 192908,265800 192916,265800 192924,265800 192927,265772 192935,265767 192936,265765 192936,265764 192931,265761 192923,265760 192923,265757 192924,265756 192923,265729 192905,265724 192909,265715 192914,265704 192914,265698 192914,265698 192916,265695 192916,265687 192942,265704 192951,265719 192954,265715 192957,265710 192960,265706 192962,265706 192974,265707 192976,265705 192978,265704 192979,265704 192981,265701 192987,265698 192994,265695 192994,265691 192993,265687 192994,265680 192994,265677 193008,265676 193010,265685 193012,265712 193020,265712 193017,265713 193015,265711 193012,265711 193010,265726 193011,265725 193038,265721 193041,265704 193056),(265681 194364,265681 194367,265671 194367,265671 194358,265671 194357,265681 194357,265681 194364),(265672 194259,265672 194252,265682 194252,265682 194261,265682 194262,265672 194262,265672 194259),(265660 194241,265669 194241,265669 194251,265659 194251,265659 194249,265659 194241,265660 194241),(265730 193180,265761 193187,265770 193167,265773 193196,265778 193238,265780 193284,265780 193290,265781 193296,265781 193300,265781 193305,265781 193310,265781 193315,265737 193357,265704 193365,265692 193357,265689 193334,265698 193344,265699 193335,265701 193319,265703 193305,265718 193219,265723 193191,265728 193192,265730 193180),(265829 193249,265833 193249,265833 193259,265828 193259,265823 193259,265823 193250,265823 193249,265829 193249),(265892 193510,265906 193514,266004 193467,265951 193602,265890 193576,265899 193549,265892 193510),(266286 192856,266219 192756,266168 192757,266168 192748,266169 192722,266220 192689,266281 192648,266286 192670,266318 192815,266290 192850,266286 192856),(266044 192606,266037 192606,266037 192596,266047 192596,266047 192603,266047 192606,266044 192606),(266039 192790,266039 192800,266029 192800,266029 192790,266037 192790,266039 192790),(265749 192996,265749 192986,265759 192986,265759 192996,265749 192996),(265848 192834,265848 192824,265858 192824,265858 192834,265848 192834),(265978 192980,265978 192970,265988 192970,265988 192980,265978 192980),(266088 192809,266088 192813,266080 192813,266078 192813,266078 192803,266088 192803,266088 192809),(265981 192800,265981 192790,265991 192790,265991 192800,265981 192800),(265951 192844,265951 192834,265961 192834,265961 192844,265951 192844),(265917 193026,265917 193016,265927 193016,265927 193026,265917 193026),(265926 193055,265926 193045,265936 193045,265936 193055,265926 193055),(265966 192746,265966 192736,265976 192736,265976 192746,265966 192746),(266046 192708,266046 192698,266056 192698,266056 192708,266046 192708),(265692 193744,265687 193751,265684 193755,265682 193756,265667 193763,265635 193732,265630 193728,265650 193718,265652 193717,265662 193716,265673 193718,265676 193718,265679 193718,265681 193718,265682 193719,265683 193723,265687 193723,265687 193733,265692 193744),(265611 193820,265611 193810,265621 193810,265621 193820,265611 193820),(265731 193735,265731 193725,265741 193725,265741 193735,265731 193735),(265708 193562,265708 193552,265718 193552,265718 193562,265708 193562),(265650 193477,265640 193477,265640 193467,265648 193467,265650 193467,265650 193477),(265680 193109,265680 193099,265690 193099,265690 193109,265680 193109),(265611 193335,265611 193325,265621 193325,265621 193335,265611 193335),(265610 194005,265610 193995,265620 193995,265620 194005,265610 194005),(265570 193944,265570 193934,265580 193934,265580 193944,265570 193944),(265643 193872,265643 193862,265653 193862,265653 193872,265643 193872),(265909 193141,265909 193131,265919 193131,265919 193141,265909 193141),(265800 193015,265800 193005,265810 193005,265810 193015,265800 193015),(265728 194212,265728 194202,265738 194202,265738 194212,265728 194212),(265691 194370,265691 194360,265701 194360,265701 194370,265691 194370)),((265057 193097,265050 193096,265044 193093,265043 193093,265041 193093,265038 193093,265037 193093,265035 193093,265029 193092,265031 193093,265035 193094,265060 193099,265076 193103,265058 193097,265057 193097)),((265246 193239,265266 193230,265268 193230,265279 193209,265282 193211,265281 193210,265290 193199,265273 193188,265269 193196,265260 193213,265249 193234,265246 193239)),((265525 192957,265525 192962,265524 192965,265526 192968,265530 192975,265533 192983,265536 192990,265540 192993,265562 193014,265575 193011,265609 193004,265606 192991,265602 192978,265607 192966,265613 192953,265573 192946,265533 192939,265527 192954,265525 192957)),((265345 192929,265348 192937,265349 192941,265371 192945,265372 192945,265373 192938,265374 192935,265374 192934,265364 192919,265363 192920,265345 192929)))"; + string s2 = "POLYGON((265433 194418,265428 194412,265430 194410,265430 194411,265425 194414,265462 194386,265466 194384,265468 194382,265469 194382,265469 194381,265471 194380,265471 194379,265471 194378,265470 194377,265469 194376,265469 194375,265468 194374,265467 194373,265467 194372,265466 194371,265453 194363,265451 194368,265451 194370,265450 194370,265449 194371,265448 194372,265448 194373,265447 194373,265446 194374,265445 194375,265444 194376,265443 194376,265442 194377,265441 194378,265440 194379,265439 194379,265438 194380,265437 194381,265436 194381,265435 194382,265434 194383,265433 194383,265432 194384,265431 194385,265430 194386,265429 194387,265428 194387,265427 194388,265426 194389,265425 194390,265424 194390,265423 194391,265422 194392,265421 194393,265420 194393,265419 194394,265418 194395,265417 194396,265416 194396,265414 194397,265414 194398,265413 194398,265412 194399,265411 194400,265410 194401,265409 194402,265408 194402,265407 194403,265406 194404,265405 194405,265404 194405,265403 194406,265402 194407,265402 194407,265401 194408,265399 194409,265398 194411,265396 194412,265395 194413,265393 194414,265392 194415,265391 194415,265390 194416,265390 194417,265389 194417,265388 194418,265387 194419,265386 194419,265385 194420,265384 194421,265383 194422,265382 194423,265381 194423,265380 194424,265379 194425,265378 194425,265377 194426,265376 194427,265374 194428,265372 194429,265371 194430,265370 194431,265330 194429,265369 194467,265379 194477,265422 194481,265427 194484,265433 194488,265467 194515,265469 194508,265487 194442,265467 194422,265466 194421,265465 194421,265464 194421,265463 194421,265448 194406,265433 194418))"; + + var reader = new WKTReader(GeometryFactory.Fixed); + Assert.IsNotNull(reader); + + var g1 = reader.Read(s1).Buffer(0); + Assert.IsNotNull(g1); + + var g2 = reader.Read(s2).Buffer(0); + Assert.IsNotNull(g2); + + var result = g1.Union(g2); + Assert.IsNotNull(result); + Debug.WriteLine(result); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.Test/WKTReaderTests.cs b/test/NetTopologySuite.IO.Esri.Test/WKTReaderTests.cs new file mode 100644 index 0000000..8ab6176 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.Test/WKTReaderTests.cs @@ -0,0 +1,26 @@ +using NetTopologySuite.Geometries; +using NUnit.Framework; +using NetTopologySuite.IO.Shapefile; + +namespace NetTopologySuite.IO.ShapeFile.Test +{ + public class WKTReaderTests + { + private readonly WKTReader _wktReader; + + public WKTReaderTests() + { + _wktReader = new WKTReader(new GeometryFactory(new PrecisionModel(), 4326)) { IsOldNtsCoordinateSyntaxAllowed = false }; + } + + [Test] + public void TestShapeType() + { + string wkt = "POLYGON ((-86.7605020509258 41.5101338613656, -86.7604972038273 41.5100611525915, -86.7604971708084 41.5100606308085, -86.7604611720717 41.5094596307695, -86.7604611426546 41.5094591103497, -86.7604291439208 41.5088571103154, -86.760429130715 41.508856853856, -86.7603991319814 41.5082548538241, -86.7603991259966 41.5082547317887, -86.7603701303631 41.5076537960468, -86.7603401446338 41.5070530565908, -86.7603071566895 41.5064532528163, -86.7603071500912 41.506453131098, -86.7602814240795 41.5059715533315, -86.7605549835241 41.5059607024218, -86.7605808466407 41.5064448078787, -86.760613844555 41.5070447469854, -86.7606138651484 41.5070451395365, -86.7606438664126 41.5076461395046, -86.7606438727239 41.5076462680791, -86.7606728710439 41.5082472070294, -86.7607028628788 41.5088490177453, -86.7607348434949 41.5094506292495, -86.7607708135428 41.5100511081057, -86.760776407335 41.5101350123382, -86.7605020509258 41.5101338613656))"; + var geometry = (Polygon)_wktReader.Read(wkt); + + Assert.IsTrue(geometry.Shell.CoordinateSequence.Ordinates == Ordinates.XY); + Assert.IsTrue(geometry.GetShapeType() == ShapeType.Polygon); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/App.config b/test/NetTopologySuite.IO.Esri.TestConsole/App.config new file mode 100644 index 0000000..56efbc7 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/App.config @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/NetTopologySuite.IO.Esri.TestConsole.csproj b/test/NetTopologySuite.IO.Esri.TestConsole/NetTopologySuite.IO.Esri.TestConsole.csproj new file mode 100644 index 0000000..09d8772 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/NetTopologySuite.IO.Esri.TestConsole.csproj @@ -0,0 +1,86 @@ + + + + + Debug + AnyCPU + {AA37EFC6-737F-4E8A-A79C-2D179C1B38EC} + Exe + NetTopologySuite.IO.Esri.TestConsole + NetTopologySuite.IO.Esri.TestConsole + v4.7.2 + 512 + true + true + + + AnyCPU + true + full + false + bin\Debug\ + TRACE;DEBUG;DEBUG_BINARY + prompt + 4 + + + AnyCPU + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {2be3cbae-cbf5-4b67-b1cb-ca64365a6157} + NetTopologySuite.IO.Esri.Core + + + {960d89db-f534-4207-931e-4bb8b4dca483} + NetTopologySuite.IO.Esri + + + + + 2.1.0 + + + 2.1.0 + + + + \ No newline at end of file diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Program.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Program.cs new file mode 100644 index 0000000..63ff707 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Program.cs @@ -0,0 +1,128 @@ +using NetTopologySuite.IO.Esri.TestConsole.Tests; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole +{ + class Program + { + + private static Test[] TestList = { + new DbaseRreaderTest("arcmap/shp/fields_utf8.dbf"), + new ReadArcMapFilesTest(), + new CopyArcMapFilesTest(), + new CreateShapefileTest(), + new CoreReadDbf(), + new CoreReadShp(), + new CoreReadShapefile1(), + new CoreReadShapefile2(), + new CoreWriteShapefile1(), + new CoreWriteShapefile2(), + new NtsReadShapefile(), + new NtsWriteShapefile1(), + }; + + + static void Main(string[] args) + { + Console.WriteLine("NetTopologySuite.IO.Esri.TestConsole"); + + if (string.IsNullOrEmpty(Test.TestDataDir)) + { + Console.WriteLine("ERROR: TestData folder not found."); + Console.WriteLine("Pres any key to exit."); + Console.ReadKey(); + return; + } + + Console.WriteLine("TestData directory: " + Test.TestDataDir); + Console.WriteLine(); + + WriteTestList(); + + var testNumber = Console.ReadLine(); + while (!string.IsNullOrEmpty(testNumber)) + { + RunTest(testNumber); + WriteTestList(); + testNumber = Console.ReadLine(); + } + } + + private static void WriteTestList() + { + Console.ForegroundColor = ConsoleColor.Cyan; + Console.WriteLine("TEST LIST"); + Console.WriteLine("---------"); + Console.ForegroundColor = ConsoleColor.DarkCyan; + for (int i = 0; i < TestList.Length; i++) + { + var test = TestList[i]; + Console.WriteLine((i + 1).ToString() + ": " + test.ToString()); + } + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Cyan; + Console.Write("Write test number or pres ENTER to exit: "); + } + + private static void WriteError(Exception ex, bool exitApp = false) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine("ERROR: " + ex.Message); + ex = ex.InnerException; + while (ex != null) + { + Console.WriteLine("- " + ex.Message); + ex = ex.InnerException; + } + if (exitApp) + { + Console.WriteLine("Pres any key to exit."); + Console.ReadKey(); + Environment.Exit(1); + } + } + + private static void RunTest(string testNumber) + { + Console.Clear(); + Console.ForegroundColor = ConsoleColor.Gray; + if (int.TryParse(testNumber, out int number) && number > 0 && number <= TestList.Length) + { + var test = TestList[number - 1]; + var testName = test.ToString(); ; + Console.WriteLine(testName); + Console.WriteLine(new string('=', testName.Length)); + Console.WriteLine(); + + test.Run(); + try + { + //test.Run(); + } + catch (Exception ex) + { + WriteError(ex); + throw; + } + finally + { + testName = test.Title + " finished."; + Console.WriteLine(testName); + Console.WriteLine(new string('=', testName.Length)); + Console.WriteLine(); + } + + } + else + { + Console.WriteLine("Invalid test number."); + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Properties/AssemblyInfo.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Properties/AssemblyInfo.cs new file mode 100644 index 0000000..39e5960 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("NetTopologySuite.IO.Esri.TestConsole")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("NetTopologySuite.IO.Esri.TestConsole")] +[assembly: AssemblyCopyright("Copyright © 2021")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("aa37efc6-737f-4e8a-a79c-2d179c1b38ec")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreReadDbf.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreReadDbf.cs new file mode 100644 index 0000000..430dfa9 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreReadDbf.cs @@ -0,0 +1,34 @@ +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class CoreReadDbf : Test + { + public override void Run() + { + var dbfPath = GetTestFilePath("arcmap/shp/pt_utf8.dbf"); + + using (var dbf = new DbfReader(dbfPath)) + { + foreach (var field in dbf.Fields) + { + Console.WriteLine(field); + } + + foreach (var record in dbf) + { + Console.WriteLine("Record ID: " + record["Id"]); + foreach (var attr in record) + { + Console.WriteLine($" {attr.Key}: {attr.Value}"); + } + } + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreReadShapefile1.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreReadShapefile1.cs new file mode 100644 index 0000000..c72bcaa --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreReadShapefile1.cs @@ -0,0 +1,38 @@ +using NetTopologySuite.IO.Dbf; +using NetTopologySuite.IO.Shapefile.Core; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class CoreReadShapefile1 : Test + { + public override void Run() + { + var shpPath = GetTestFilePath("arcmap/shp/pt_utf8.shp"); + + using (var shp = new ShapefilePointReader(shpPath)) + { + Console.WriteLine(shp.ShapeType); + foreach (var field in shp.Fields) + { + Console.WriteLine(field); + } + + foreach (var feature in shp) + { + Console.WriteLine("Record ID: " + feature.Attributes["Id"]); + foreach (var attr in feature.Attributes) + { + Console.WriteLine($" {attr.Key}: {attr.Value}"); + } + Console.WriteLine($" SHAPE: {feature.Shape}"); + } + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreReadShapefile2.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreReadShapefile2.cs new file mode 100644 index 0000000..ee773d6 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreReadShapefile2.cs @@ -0,0 +1,41 @@ +using NetTopologySuite.IO.Dbf; +using NetTopologySuite.IO.Shapefile.Core; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class CoreReadShapefile2 : Test + { + public override void Run() + { + var shpPath = GetTestFilePath("arcmap/shp/pt_utf8.shp"); + + using (var shp = ShapefileReader.Open(shpPath)) + { + Console.WriteLine(shp.ShapeType); + foreach (var field in shp.Fields) + { + Console.WriteLine(field); + } + + while (shp.Read(out var deleted)) + { + if (deleted) + continue; + + Console.WriteLine("Record ID: " + shp.Fields["Id"].Value); + foreach (var field in shp.Fields) + { + Console.WriteLine($" {field.Name}: {field.Value}"); + } + Console.WriteLine($" SHAPE: {shp.Shape}"); + } + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreReadShp.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreReadShp.cs new file mode 100644 index 0000000..e2a445b --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreReadShp.cs @@ -0,0 +1,29 @@ +using NetTopologySuite.IO.Dbf; +using NetTopologySuite.IO.Shapefile.Core; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class CoreReadShp : Test + { + public override void Run() + { + var shpPath = GetTestFilePath("arcmap/shp/pt_utf8.shp"); + + using (var shpStream = File.OpenRead(shpPath)) + using (var shp = new ShpPointReader(shpStream)) + { + Console.WriteLine(shp.ShapeType); + while (shp.Read()) + { + Console.WriteLine(shp.Shape); + } + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreWriteShapefile1.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreWriteShapefile1.cs new file mode 100644 index 0000000..98879a0 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreWriteShapefile1.cs @@ -0,0 +1,59 @@ +using NetTopologySuite.IO.Dbf; +using NetTopologySuite.IO.Shapefile; +using NetTopologySuite.IO.Shapefile.Core; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class CoreWriteShapefile1 : Test + { + public override void Run() + { + var shpPath = GetTempFilePath("abcd1.shp"); + + var features = new List(); + for (int i = 1; i < 5; i++) + { + var attributes = new Dictionary(); + attributes["date"] = new DateTime(2000, 1, i + 1); + attributes["float"] = i * 0.1; + attributes["int"] = i; + attributes["logical"] = i % 2 == 0; + attributes["text"] = i.ToString("0.00"); + + var line = new List(); + line.Add(new ShpCoordinates(i, i + 1, i)); + line.Add(new ShpCoordinates(i, i, i)); + line.Add(new ShpCoordinates(i + 1, i, i)); + + var shapeParts = new List>(); + shapeParts.Add(line); + + var feature = new ShapefileFeature(shapeParts, attributes); + features.Add(feature); + } + + var dateField = DbfField.Create("date", typeof(DateTime)); + var floatField = DbfField.Create("float", typeof(double)); + var intField = DbfField.Create("int", typeof(int)); + var LogicalField = DbfField.Create("logical", typeof(bool)); + var textField = DbfField.Create("text", typeof(string)); + + using (var shp = new ShapefileMultiPartWriter(shpPath, ShapeType.PolyLine, dateField, floatField, intField, LogicalField, textField)) + { + shp.Write(features); + } + + foreach (var feature in Shapefile.Core.ShapefileReader.ReadAll(shpPath)) + { + Console.WriteLine(feature.Attributes); + Console.WriteLine(feature.Shape); + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreWriteShapefile2.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreWriteShapefile2.cs new file mode 100644 index 0000000..726cb64 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/CoreWriteShapefile2.cs @@ -0,0 +1,49 @@ +using NetTopologySuite.IO.Dbf; +using NetTopologySuite.IO.Shapefile; +using NetTopologySuite.IO.Shapefile.Core; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class CoreWriteShapefile2 : Test + { + public override void Run() + { + var shpPath = GetTempFilePath("abcd2.shp"); + + var dateField = new DbfDateField("date"); + var floatField = new DbfFloatField("float"); + var intField = new DbfNumericField("int"); + var LogicalField = new DbfLogicalField("logical"); + var textField = new DbfCharacterField("text"); + + using (var shp = new ShapefileMultiPartWriter(shpPath, ShapeType.PolyLine, dateField, floatField, intField, LogicalField, textField)) + { + for (int i = 1; i < 5; i++) + { + // Avoid expensive boxing and unboxing value types + dateField.DateValue = new DateTime(2000, 1, i + 1); + floatField.NumericValue = i * 0.1; + intField.NumericValue = i; + LogicalField.LogicalValue = i % 2 == 0; + textField.StringValue = i.ToString("0.00"); + + // Avoid realocating new ShpCoordinates[] array over and over. + shp.Shape.Clear(); + shp.Shape.StartNewPart(); + shp.Shape.AddPoint(i, i + 1, i); + shp.Shape.AddPoint(i, i, i); + shp.Shape.AddPoint(i + 1, i, i); + + shp.Write(); + Console.WriteLine("Feature number " + i + " was written."); + } + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Samples/NtsReadShapefile.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/NtsReadShapefile.cs new file mode 100644 index 0000000..d38db7e --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/NtsReadShapefile.cs @@ -0,0 +1,29 @@ +using NetTopologySuite.IO.Dbf; +using NetTopologySuite.IO.Shapefile; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class NtsReadShapefile : Test + { + public override void Run() + { + var shpPath = GetTestFilePath("arcmap/shp/pt_utf8.shp"); + + foreach (var feature in ShapefileReader.ReadAll(shpPath)) + { + Console.WriteLine("Record ID: " + feature.Attributes["Id"]); + foreach (var attrName in feature.Attributes.GetNames()) + { + Console.WriteLine($" {attrName}: {feature.Attributes[attrName]}"); + } + Console.WriteLine($" SHAPE: {feature.Geometry}"); + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Samples/NtsWriteShapefile.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/NtsWriteShapefile.cs new file mode 100644 index 0000000..aea315b --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Samples/NtsWriteShapefile.cs @@ -0,0 +1,43 @@ +using NetTopologySuite.Features; +using NetTopologySuite.Geometries; +using NetTopologySuite.IO.Dbf; +using NetTopologySuite.IO.Shapefile; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class NtsWriteShapefile1 : Test + { + public override void Run() + { + var shpPath = GetTempFilePath("abcd3.shp"); + + var features = new List(); + for (int i = 1; i < 5; i++) + { + var attributes = new AttributesTable(); + attributes.Add("date", new DateTime(2000, 1, i + 1)); + attributes.Add("float", i * 0.1); + attributes.Add("int", i); + attributes.Add("logical", i % 2 == 0); + attributes.Add("text", i.ToString("0.00")); + + var lineCoords = new List(); + lineCoords.Add(new CoordinateZ(i, i + 1, i)); + lineCoords.Add(new CoordinateZ(i, i, i)); + lineCoords.Add(new CoordinateZ(i + 1, i, i)); + var line = new LineString(lineCoords.ToArray()); + + var feature = new Feature(line, attributes); + features.Add(feature); + } + + features.SaveToShapefile(shpPath); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Tests/ArcMapShapefilesTest.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/ArcMapShapefilesTest.cs new file mode 100644 index 0000000..ae41345 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/ArcMapShapefilesTest.cs @@ -0,0 +1,33 @@ +using NetTopologySuite.IO.Shapefile.Core; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public abstract class ArcMapShapefilesTest : Test + { + public override void Run() + { + var filePath = GetTestFilePath("arcmap/shp/point.shp"); + var shpDataDir = Path.GetDirectoryName(filePath); + + foreach (var shpFilePath in Directory.GetFiles(shpDataDir, "*.shp")) + { + var fileName = "arcmap/shp/" + Path.GetFileName(shpFilePath); + //if (!fileName.Contains("pt_utf8")) + // continue; + + using (var reader = ShapefileReader.Open(shpFilePath)) + { + RunShapefile(fileName, reader); + } + } + } + + protected abstract void RunShapefile(string fileName, ShapefileReader shp); + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Tests/CopyArcMapFilesTest.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/CopyArcMapFilesTest.cs new file mode 100644 index 0000000..3c76b11 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/CopyArcMapFilesTest.cs @@ -0,0 +1,151 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using NetTopologySuite.IO; +using NetTopologySuite.IO.Shapefile.Core; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class CopyArcMapFilesTest : ArcMapShapefilesTest + { + protected override void RunShapefile(string srcFile, ShapefileReader src) + { + Console.WriteLine(srcFile); + var copyFile = CreateFileCopyDir(srcFile); + srcFile = GetTestFilePath(srcFile); + + + + using (var copy = ShapefileWriter.Open(copyFile, src.ShapeType, src.Fields, src.Encoding, src.Projection)) + { + copy.Write(src.ToArray()); + } + + Console.WriteLine("==="); + + using (var copyReader = ShapefileReader.Open(copyFile)) + { + copyReader.ToArray(); + } + + CompareFiles(srcFile, copyFile, ".shp"); + CompareFiles(srcFile, copyFile, ".shx"); + CompareFiles(srcFile, copyFile, ".dbf"); + CompareFiles(srcFile, copyFile, ".cpg"); + CompareFiles(srcFile, copyFile, ".prj"); + + Console.WriteLine(); + } + + public void CompareFiles(string file1, string file2, string ext) + { + ext = ext.ToLowerInvariant(); + file1 = Path.ChangeExtension(file1, ext); + file2 = Path.ChangeExtension(file2, ext); + + if (!File.Exists(file1)) + return; + + Console.Write(Path.GetFileName(file2).PadRight(20) + " "); + Console.ForegroundColor = ConsoleColor.Red; + + if (!File.Exists(file2)) + { + Console.WriteLine("file does not exists."); + Console.ResetColor(); + return; + } + + var bytes1 = File.ReadAllBytes(file1); + var bytes2 = File.ReadAllBytes(file2); + if (bytes1.Length != bytes2.Length) + { + Console.WriteLine($"files have different size: {bytes1.Length} | {bytes2.Length}" ); + Console.ResetColor(); + return; + } + + var hasErrors = false; + for (int i = 0; i < bytes1.Length; i++) + { + if (hasErrors && IsFileHeaderEnd(i, ext)) + { + Console.ForegroundColor = ConsoleColor.DarkGray; + Console.WriteLine("--- File header end ---"); + Console.ForegroundColor = ConsoleColor.Red; + } + + if (i > 0 && i < 4 && ext == ".dbf") + continue; // DBF file date + + if (WriteDifferentBytes(i, bytes1[i], bytes2[i], !hasErrors)) + hasErrors = true; + } + + if (!hasErrors) + { + Console.ForegroundColor = ConsoleColor.Green; + Console.WriteLine("OK"); + } + Console.ResetColor(); + } + + public bool IsFileHeaderEnd(int pos, string ext) + { + if ((ext == ".shp" || ext == ".shx") && pos == 100) + return true; + + if (ext == ".dbf" && pos == 32) + return true; + + return false; + } + + public bool WriteDifferentBytes(int index, byte b1, byte b2, bool newLine) + { + if (b1 == b2) + return false; + + + var sb = new StringBuilder(); + sb.Append("- byte[" + index + "]: "); + + sb.Append(b1.ToString().PadLeft(3)); + sb.Append(" | "); + sb.Append(b2.ToString().PadLeft(3)); + + sb.Append(" '" + (char)b1); + sb.Append("' | '"); + sb.Append( (char)b2 + "'"); + sb.Replace(char.MinValue, '▬'); + + if (newLine) + Console.WriteLine(); + Console.WriteLine(sb.ToString()); + + return true; + } + + public void CompareShp(string shpFile1, string shpFile2) + { + + var differences = new List(); + using (var shp1 = ShapefileReader.Open(shpFile1)) + using (var shp2 = ShapefileReader.Open(shpFile1)) + { + //shp1.GetBinaryDiff(shp2, differences); + } + + foreach (var diff in differences) + { + Console.WriteLine( diff); + } + + } + + + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Tests/CreateShapefileTest.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/CreateShapefileTest.cs new file mode 100644 index 0000000..e7bc99a --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/CreateShapefileTest.cs @@ -0,0 +1,47 @@ +using NetTopologySuite.IO.Dbf; +using NetTopologySuite.IO.Shapefile.Core; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class CreateShapefileTest : Test + { + public override void Run() + { + var shpPath = GetTempFilePath("point.shp"); + var dateField = new DbfDateField("date"); + var floatField = new DbfFloatField("float"); + var intField = new DbfNumericField("int"); + var LogicalField = new DbfLogicalField("logical"); + var textField = new DbfCharacterField("text"); + + using (var shp = new ShapefilePointWriter(shpPath, Shapefile.ShapeType.Point, dateField, floatField, intField, LogicalField, textField)) + { + for (int i = 1; i < 5; i++) + { + dateField.DateValue = new DateTime(2000, 1, i + 1); + floatField.NumericValue = i * 0.1; + intField.NumericValue = i; + LogicalField.LogicalValue = i % 2 == 0; + textField.StringValue = i.ToString("0.00"); + + shp.Point = new ShpCoordinates(i, i + 1, i + 2); + + shp.Write(); + Console.WriteLine("Record number " + i + " was written."); + } + Console.WriteLine(); + } + + using (var shp = new Shapefile.Core.ShapefilePointReader(shpPath)) + { + WriteFeatures(shp); + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Tests/DbaseRreaderTest.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/DbaseRreaderTest.cs new file mode 100644 index 0000000..4f7b800 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/DbaseRreaderTest.cs @@ -0,0 +1,32 @@ +using NetTopologySuite.IO.Dbf; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class DbaseRreaderTest : FileTest + { + public DbaseRreaderTest(string dbfPath) : base(dbfPath) + { + Title = "Read DBF file"; + } + + public override void Run() + { + var fullPath = GetTestFilePath(Path); + + using (var dbfReader = new DbfReader(fullPath)) + { + WriteFields(dbfReader); + } + } + + + + + + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Tests/ReadArcMapFilesTest.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/ReadArcMapFilesTest.cs new file mode 100644 index 0000000..cb11040 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/ReadArcMapFilesTest.cs @@ -0,0 +1,18 @@ +using NetTopologySuite.IO.Shapefile.Core; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class ReadArcMapFilesTest : ArcMapShapefilesTest + { + protected override void RunShapefile(string fileName, ShapefileReader reader) + { + WriteSectionTitle(fileName); + WriteFeatures(reader); + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Tests/ShapeFileReaderTest.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/ShapeFileReaderTest.cs new file mode 100644 index 0000000..407929a --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/ShapeFileReaderTest.cs @@ -0,0 +1,28 @@ +using NetTopologySuite.IO.Shapefile.Core; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public class ShapeFileReaderTest : DbaseRreaderTest + { + + public ShapeFileReaderTest(string path) : base(path) + { + Title = "Read SHP file"; + } + + public override void Run() + { + var fullPath = GetTestFilePath(Path); + + using (var shp = new ShapefilePointReader(fullPath)) + { + WriteFeatures(shp); + } + } + } +} diff --git a/test/NetTopologySuite.IO.Esri.TestConsole/Tests/Test.cs b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/Test.cs new file mode 100644 index 0000000..c6e3ea8 --- /dev/null +++ b/test/NetTopologySuite.IO.Esri.TestConsole/Tests/Test.cs @@ -0,0 +1,211 @@ +using NetTopologySuite.IO.Dbf; +using NetTopologySuite.IO.Shapefile; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading.Tasks; + +namespace NetTopologySuite.IO.Esri.TestConsole.Tests +{ + public abstract class Test + { + public string Title { get; protected set; } + + public Test() + { + Title = GetType().Name; + } + + public abstract void Run(); + + public static readonly string FieldSpace = " ".PadRight(12); + + public static string TestDataDir = GetTestDataDir(Assembly.GetExecutingAssembly().Location); + + private static string GetTestDataDir(string dir) + { + dir = Path.GetDirectoryName(dir); + var testDataDir = Path.Combine(dir, "TestData"); + + if (Directory.Exists(testDataDir)) + return testDataDir; + + if (dir.Length < 4) // "C:\" + { + return ""; + } + + return GetTestDataDir(dir); + } + + public static string GetTestFilePath(string filePath) + { + if (File.Exists(filePath)) + return filePath; + + var path = Path.Combine(TestDataDir, filePath); + path = Path.GetFullPath(path); + if (!File.Exists(path)) + { + throw new FileNotFoundException("Test file not found: " + filePath); + } + return path; + } + + public static string GetTempFilePath(string fileName) + { + var tempFilePath = Path.Combine(TestDataDir, "temp", fileName); + Directory.CreateDirectory(Path.GetDirectoryName(tempFilePath)); + return tempFilePath; + } + + public static string CreateFileCopyDir(string sourceFilePath) + { + var filePath = GetTestFilePath(sourceFilePath); // arcmap/shp/point.shp + var copyDirPath = Path.GetDirectoryName(filePath) + "-copy"; // arcmap/shp => arcmap/shp-copy + Directory.CreateDirectory(copyDirPath); + + return Path.Combine(copyDirPath, Path.GetFileName(sourceFilePath)); // arcmap/shp-copy/point.shp + } + + public override string ToString() + { + return Title; + } + + protected void WriteFieldNames(IReadOnlyList fields) + { + Console.WriteLine("FIELD LIST"); + Console.WriteLine("----------"); + foreach (var field in fields) + { + Console.WriteLine(" " + field.Name.PadRight(10) + " [" + field.FieldType.ToString().PadRight(9) + field.Length.ToString().PadLeft(4) + field.Precision.ToString().PadLeft(3) + "]"); + } + Console.WriteLine(); + } + + protected void WriteFieldValues(IReadOnlyList fields, IReadOnlyDictionary values) + { + Console.WriteLine(); + foreach (var field in fields) + { + WriteFieldValue(field.Name, values[field.Name]); + } + } + + protected void WriteFieldValue(string name, object value) + { + name = name + ": "; + Console.WriteLine(name.PadRight(12) + ToText(value)); + } + + public void WriteFields(DbfReader dbf) + { + WriteFieldNames(dbf.Fields); + foreach (var values in dbf) + { + WriteFieldValues(dbf.Fields, values); + + } + } + + protected void WriteShape(ShapeType type, IReadOnlyList> shape) + { + if (shape.Count < 1 || shape[0].Count < 1) + { + WriteFieldValue("SHAPE", "NullShape"); + return; + } + + WriteFieldValue("SHAPE", type); + for (int i = 0; i < shape.Count; i++) + { + if (shape.Count > 1) + Console.WriteLine(FieldSpace + "Part " + (i + 1) + ":"); + + var part = shape[i]; + foreach (var pt in part) + { + Console.WriteLine(FieldSpace + " " + pt); + } + } + if (shape.Count > 1) + Console.WriteLine(FieldSpace + "Parts end."); + + } + + public void WriteFeature(ShapeType type, IReadOnlyList fields, Shapefile.Core.ShapefileFeature feature) + { + WriteFieldValues(fields, feature.Attributes); + WriteShape(type, feature.Shape); + } + + public void WriteFeatures(Shapefile.Core.ShapefileReader shp) + { + WriteFieldNames(shp.Fields); + + WriteRecordListHeader(); + foreach (var feature in shp) + { + WriteFeature(shp.ShapeType, shp.Fields, feature); + Console.WriteLine(); + } + } + + + protected string ToText(object value) + { + if (value == null) + return ""; + + if (value is string s) + return "'" + s + "'"; + + return value.ToString(); + } + protected void WriteRecordListHeader() + { + Console.WriteLine("RECORD LIST"); + Console.WriteLine("-----------"); + Console.WriteLine(); + } + + public void WriteSectionTitle(string title) + { + Console.ForegroundColor = ConsoleColor.White; + Console.WriteLine(); + Console.WriteLine(new string('_', 80)); + Console.WriteLine(title); + Console.WriteLine(new string('-', 80)); + Console.WriteLine(); + Console.ResetColor(); + } + + public void WriteValidationResult(bool isValid, string message) + { + Console.ForegroundColor = isValid ? ConsoleColor.Green : ConsoleColor.Red; + Console.WriteLine(message); + Console.ResetColor(); + } + } + + + + public abstract class FileTest : Test + { + protected readonly string Path; + + public FileTest(string dbfPath) + { + Path = dbfPath; + } + + public override string ToString() + { + return $"{Title}: {Path}"; + } + } +} diff --git a/test/NetTopologySuite.IO.ShapeFile.Test/NetTopologySuite.IO.ShapeFile.Test.csproj b/test/NetTopologySuite.IO.ShapeFile.Test/NetTopologySuite.IO.ShapeFile.Test.csproj index 4932dd2..1639594 100644 --- a/test/NetTopologySuite.IO.ShapeFile.Test/NetTopologySuite.IO.ShapeFile.Test.csproj +++ b/test/NetTopologySuite.IO.ShapeFile.Test/NetTopologySuite.IO.ShapeFile.Test.csproj @@ -13,8 +13,8 @@ - + diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/CommonHelpers.cs b/test/NetTopologySuite.IO.ShapefileNG.Test/CommonHelpers.cs new file mode 100644 index 0000000..acbe7cd --- /dev/null +++ b/test/NetTopologySuite.IO.ShapefileNG.Test/CommonHelpers.cs @@ -0,0 +1,7 @@ +using System; +using System.IO; + +internal sealed class CommonHelpers +{ + public static readonly string TestShapefilesDirectory = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "TestShapefiles"); +} diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/NetTopologySuite.IO.ShapefileNG.Test.csproj b/test/NetTopologySuite.IO.ShapefileNG.Test/NetTopologySuite.IO.ShapefileNG.Test.csproj new file mode 100644 index 0000000..d49df2b --- /dev/null +++ b/test/NetTopologySuite.IO.ShapefileNG.Test/NetTopologySuite.IO.ShapefileNG.Test.csproj @@ -0,0 +1,19 @@ + + + + netcoreapp3.1 + + true + + NetTopologySuite.IO + + + + + + + + + + + diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/ShapefileSpanReaderNGTest.cs b/test/NetTopologySuite.IO.ShapefileNG.Test/ShapefileSpanReaderNGTest.cs new file mode 100644 index 0000000..b120c53 --- /dev/null +++ b/test/NetTopologySuite.IO.ShapefileNG.Test/ShapefileSpanReaderNGTest.cs @@ -0,0 +1,54 @@ +using System; +using System.IO; +using NetTopologySuite.IO.ShapeRecords; +using NUnit.Framework; + +namespace NetTopologySuite.IO +{ + public sealed class ShapefileSpanReaderNGTest + { + [Test] + public void BasicPointsTest() + { + byte[] mainFile = File.ReadAllBytes(Path.Combine(CommonHelpers.TestShapefilesDirectory, "points.shp")); + byte[] indexFile = File.ReadAllBytes(Path.Combine(CommonHelpers.TestShapefilesDirectory, "points.shx")); + var reader = new ShapefileSpanReaderNG(mainFile, indexFile); + Assert.That(reader.RecordCount, Is.EqualTo(2)); + + var dodgePark = reader.GetPointXYRecord(0); + Assert.That(dodgePark.X, Is.EqualTo(-83.01107)); + Assert.That(dodgePark.Y, Is.EqualTo(42.59286)); + + var joeLouis = reader.GetPointXYRecord(1); + Assert.That(joeLouis.X, Is.EqualTo(-83.05270)); + Assert.That(joeLouis.Y, Is.EqualTo(42.32529)); + } + + [Test] + public void BasicMultiPointsTest() + { + byte[] mainFile = File.ReadAllBytes(Path.Combine(CommonHelpers.TestShapefilesDirectory, "multipoints.shp")); + byte[] indexFile = File.ReadAllBytes(Path.Combine(CommonHelpers.TestShapefilesDirectory, "multipoints.shx")); + var reader = new ShapefileSpanReaderNG(mainFile, indexFile); + Assert.That(reader.RecordCount, Is.EqualTo(2)); + + var mp1 = reader.GetMultiPointXYRecord(0); + Assert.That(mp1.Points.Length, Is.EqualTo(6)); + Assert.That(mp1.Points[0], Is.EqualTo(new PointXYRecordNG(-83.163443, 42.462961))); + Assert.That(mp1.Points[1], Is.EqualTo(new PointXYRecordNG(-83.149998, 42.465838))); + Assert.That(mp1.Points[2], Is.EqualTo(new PointXYRecordNG(-83.152076, 42.457263))); + Assert.That(mp1.Points[3], Is.EqualTo(new PointXYRecordNG(-83.168934, 42.465194))); + Assert.That(mp1.Points[4], Is.EqualTo(new PointXYRecordNG(-83.170986, 42.457793))); + Assert.That(mp1.Points[5], Is.EqualTo(new PointXYRecordNG(-83.180223, 42.465100))); + + var mp2 = reader.GetMultiPointXYRecord(1); + Assert.That(mp2.Points.Length, Is.EqualTo(6)); + Assert.That(mp2.Points[0], Is.EqualTo(new PointXYRecordNG(-83.155848, 42.466008))); + Assert.That(mp2.Points[1], Is.EqualTo(new PointXYRecordNG(-83.162570, 42.459781))); + Assert.That(mp2.Points[2], Is.EqualTo(new PointXYRecordNG(-83.163186, 42.466671))); + Assert.That(mp2.Points[3], Is.EqualTo(new PointXYRecordNG(-83.168061, 42.460689))); + Assert.That(mp2.Points[4], Is.EqualTo(new PointXYRecordNG(-83.151307, 42.462658))); + Assert.That(mp2.Points[5], Is.EqualTo(new PointXYRecordNG(-83.142532, 42.460462))); + } + } +} diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.cpg b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.dbf b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.dbf new file mode 100644 index 0000000..210939e Binary files /dev/null and b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.dbf differ diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.prj b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.prj new file mode 100644 index 0000000..f45cbad --- /dev/null +++ b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.shp b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.shp new file mode 100644 index 0000000..735545b Binary files /dev/null and b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.shp differ diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.shx b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.shx new file mode 100644 index 0000000..5fe6aff Binary files /dev/null and b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/multipoints.shx differ diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.cpg b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.dbf b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.dbf new file mode 100644 index 0000000..d553573 Binary files /dev/null and b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.dbf differ diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.prj b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.prj new file mode 100644 index 0000000..f45cbad --- /dev/null +++ b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.shp b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.shp new file mode 100644 index 0000000..843832c Binary files /dev/null and b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.shp differ diff --git a/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.shx b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.shx new file mode 100644 index 0000000..fec209c Binary files /dev/null and b/test/NetTopologySuite.IO.ShapefileNG.Test/TestShapefiles/points.shx differ diff --git a/test/TestData/TestData.mxd b/test/TestData/TestData.mxd new file mode 100644 index 0000000..c12c8ac Binary files /dev/null and b/test/TestData/TestData.mxd differ diff --git a/test/TestData/arcmap/shp-copy/fields_utf8.cpg b/test/TestData/arcmap/shp-copy/fields_utf8.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/fields_utf8.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/fields_utf8.dbf b/test/TestData/arcmap/shp-copy/fields_utf8.dbf new file mode 100644 index 0000000..8c09049 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/fields_utf8.dbf differ diff --git a/test/TestData/arcmap/shp-copy/fields_utf8.shp b/test/TestData/arcmap/shp-copy/fields_utf8.shp new file mode 100644 index 0000000..d1a1019 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/fields_utf8.shp differ diff --git a/test/TestData/arcmap/shp-copy/fields_utf8.shx b/test/TestData/arcmap/shp-copy/fields_utf8.shx new file mode 100644 index 0000000..c93da43 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/fields_utf8.shx differ diff --git a/test/TestData/arcmap/shp-copy/multipoint.cpg b/test/TestData/arcmap/shp-copy/multipoint.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/multipoint.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/multipoint.dbf b/test/TestData/arcmap/shp-copy/multipoint.dbf new file mode 100644 index 0000000..4f523b3 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/multipoint.dbf differ diff --git a/test/TestData/arcmap/shp-copy/multipoint.shp b/test/TestData/arcmap/shp-copy/multipoint.shp new file mode 100644 index 0000000..30a2079 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/multipoint.shp differ diff --git a/test/TestData/arcmap/shp-copy/multipoint.shx b/test/TestData/arcmap/shp-copy/multipoint.shx new file mode 100644 index 0000000..39c884b Binary files /dev/null and b/test/TestData/arcmap/shp-copy/multipoint.shx differ diff --git a/test/TestData/arcmap/shp-copy/multipoint_m.cpg b/test/TestData/arcmap/shp-copy/multipoint_m.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/multipoint_m.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/multipoint_m.dbf b/test/TestData/arcmap/shp-copy/multipoint_m.dbf new file mode 100644 index 0000000..4f523b3 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/multipoint_m.dbf differ diff --git a/test/TestData/arcmap/shp-copy/multipoint_m.shp b/test/TestData/arcmap/shp-copy/multipoint_m.shp new file mode 100644 index 0000000..7fdc49d Binary files /dev/null and b/test/TestData/arcmap/shp-copy/multipoint_m.shp differ diff --git a/test/TestData/arcmap/shp-copy/multipoint_m.shx b/test/TestData/arcmap/shp-copy/multipoint_m.shx new file mode 100644 index 0000000..f236289 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/multipoint_m.shx differ diff --git a/test/TestData/arcmap/shp-copy/multipoint_zm.cpg b/test/TestData/arcmap/shp-copy/multipoint_zm.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/multipoint_zm.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/multipoint_zm.dbf b/test/TestData/arcmap/shp-copy/multipoint_zm.dbf new file mode 100644 index 0000000..4f523b3 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/multipoint_zm.dbf differ diff --git a/test/TestData/arcmap/shp-copy/multipoint_zm.shp b/test/TestData/arcmap/shp-copy/multipoint_zm.shp new file mode 100644 index 0000000..844f829 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/multipoint_zm.shp differ diff --git a/test/TestData/arcmap/shp-copy/multipoint_zm.shx b/test/TestData/arcmap/shp-copy/multipoint_zm.shx new file mode 100644 index 0000000..1d1d20f Binary files /dev/null and b/test/TestData/arcmap/shp-copy/multipoint_zm.shx differ diff --git a/test/TestData/arcmap/shp-copy/point.cpg b/test/TestData/arcmap/shp-copy/point.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/point.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/point.dbf b/test/TestData/arcmap/shp-copy/point.dbf new file mode 100644 index 0000000..93fe9b2 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/point.dbf differ diff --git a/test/TestData/arcmap/shp-copy/point.shp b/test/TestData/arcmap/shp-copy/point.shp new file mode 100644 index 0000000..b718c0f Binary files /dev/null and b/test/TestData/arcmap/shp-copy/point.shp differ diff --git a/test/TestData/arcmap/shp-copy/point.shx b/test/TestData/arcmap/shp-copy/point.shx new file mode 100644 index 0000000..a5cec23 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/point.shx differ diff --git a/test/TestData/arcmap/shp-copy/point_m.cpg b/test/TestData/arcmap/shp-copy/point_m.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/point_m.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/point_m.dbf b/test/TestData/arcmap/shp-copy/point_m.dbf new file mode 100644 index 0000000..93fe9b2 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/point_m.dbf differ diff --git a/test/TestData/arcmap/shp-copy/point_m.shp b/test/TestData/arcmap/shp-copy/point_m.shp new file mode 100644 index 0000000..961e42f Binary files /dev/null and b/test/TestData/arcmap/shp-copy/point_m.shp differ diff --git a/test/TestData/arcmap/shp-copy/point_m.shx b/test/TestData/arcmap/shp-copy/point_m.shx new file mode 100644 index 0000000..74a03d8 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/point_m.shx differ diff --git a/test/TestData/arcmap/shp-copy/point_zm.cpg b/test/TestData/arcmap/shp-copy/point_zm.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/point_zm.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/point_zm.dbf b/test/TestData/arcmap/shp-copy/point_zm.dbf new file mode 100644 index 0000000..93fe9b2 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/point_zm.dbf differ diff --git a/test/TestData/arcmap/shp-copy/point_zm.shp b/test/TestData/arcmap/shp-copy/point_zm.shp new file mode 100644 index 0000000..5ef4fd8 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/point_zm.shp differ diff --git a/test/TestData/arcmap/shp-copy/point_zm.shx b/test/TestData/arcmap/shp-copy/point_zm.shx new file mode 100644 index 0000000..3a2199a Binary files /dev/null and b/test/TestData/arcmap/shp-copy/point_zm.shx differ diff --git a/test/TestData/arcmap/shp-copy/polygon.cpg b/test/TestData/arcmap/shp-copy/polygon.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/polygon.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/polygon.dbf b/test/TestData/arcmap/shp-copy/polygon.dbf new file mode 100644 index 0000000..1c60a0e Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polygon.dbf differ diff --git a/test/TestData/arcmap/shp-copy/polygon.prj b/test/TestData/arcmap/shp-copy/polygon.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp-copy/polygon.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/polygon.shp b/test/TestData/arcmap/shp-copy/polygon.shp new file mode 100644 index 0000000..8405ba3 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polygon.shp differ diff --git a/test/TestData/arcmap/shp-copy/polygon.shx b/test/TestData/arcmap/shp-copy/polygon.shx new file mode 100644 index 0000000..18c8318 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polygon.shx differ diff --git a/test/TestData/arcmap/shp-copy/polygon_m.cpg b/test/TestData/arcmap/shp-copy/polygon_m.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/polygon_m.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/polygon_m.dbf b/test/TestData/arcmap/shp-copy/polygon_m.dbf new file mode 100644 index 0000000..1c60a0e Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polygon_m.dbf differ diff --git a/test/TestData/arcmap/shp-copy/polygon_m.prj b/test/TestData/arcmap/shp-copy/polygon_m.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp-copy/polygon_m.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/polygon_m.shp b/test/TestData/arcmap/shp-copy/polygon_m.shp new file mode 100644 index 0000000..2c75a4f Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polygon_m.shp differ diff --git a/test/TestData/arcmap/shp-copy/polygon_m.shx b/test/TestData/arcmap/shp-copy/polygon_m.shx new file mode 100644 index 0000000..49932b1 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polygon_m.shx differ diff --git a/test/TestData/arcmap/shp-copy/polygon_zm.cpg b/test/TestData/arcmap/shp-copy/polygon_zm.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/polygon_zm.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/polygon_zm.dbf b/test/TestData/arcmap/shp-copy/polygon_zm.dbf new file mode 100644 index 0000000..2a0a382 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polygon_zm.dbf differ diff --git a/test/TestData/arcmap/shp-copy/polygon_zm.prj b/test/TestData/arcmap/shp-copy/polygon_zm.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp-copy/polygon_zm.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/polygon_zm.shp b/test/TestData/arcmap/shp-copy/polygon_zm.shp new file mode 100644 index 0000000..801c0b2 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polygon_zm.shp differ diff --git a/test/TestData/arcmap/shp-copy/polygon_zm.shx b/test/TestData/arcmap/shp-copy/polygon_zm.shx new file mode 100644 index 0000000..7d71e16 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polygon_zm.shx differ diff --git a/test/TestData/arcmap/shp-copy/polyline.cpg b/test/TestData/arcmap/shp-copy/polyline.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/polyline.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/polyline.dbf b/test/TestData/arcmap/shp-copy/polyline.dbf new file mode 100644 index 0000000..1c60a0e Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polyline.dbf differ diff --git a/test/TestData/arcmap/shp-copy/polyline.prj b/test/TestData/arcmap/shp-copy/polyline.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp-copy/polyline.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/polyline.shp b/test/TestData/arcmap/shp-copy/polyline.shp new file mode 100644 index 0000000..f90fb44 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polyline.shp differ diff --git a/test/TestData/arcmap/shp-copy/polyline.shx b/test/TestData/arcmap/shp-copy/polyline.shx new file mode 100644 index 0000000..7ff6dcf Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polyline.shx differ diff --git a/test/TestData/arcmap/shp-copy/polyline_m.cpg b/test/TestData/arcmap/shp-copy/polyline_m.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/polyline_m.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/polyline_m.dbf b/test/TestData/arcmap/shp-copy/polyline_m.dbf new file mode 100644 index 0000000..1c60a0e Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polyline_m.dbf differ diff --git a/test/TestData/arcmap/shp-copy/polyline_m.prj b/test/TestData/arcmap/shp-copy/polyline_m.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp-copy/polyline_m.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/polyline_m.shp b/test/TestData/arcmap/shp-copy/polyline_m.shp new file mode 100644 index 0000000..6fa4410 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polyline_m.shp differ diff --git a/test/TestData/arcmap/shp-copy/polyline_m.shx b/test/TestData/arcmap/shp-copy/polyline_m.shx new file mode 100644 index 0000000..3e24db4 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polyline_m.shx differ diff --git a/test/TestData/arcmap/shp-copy/polyline_zm.cpg b/test/TestData/arcmap/shp-copy/polyline_zm.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/polyline_zm.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/polyline_zm.dbf b/test/TestData/arcmap/shp-copy/polyline_zm.dbf new file mode 100644 index 0000000..1c60a0e Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polyline_zm.dbf differ diff --git a/test/TestData/arcmap/shp-copy/polyline_zm.prj b/test/TestData/arcmap/shp-copy/polyline_zm.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp-copy/polyline_zm.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/polyline_zm.shp b/test/TestData/arcmap/shp-copy/polyline_zm.shp new file mode 100644 index 0000000..534e53c Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polyline_zm.shp differ diff --git a/test/TestData/arcmap/shp-copy/polyline_zm.shx b/test/TestData/arcmap/shp-copy/polyline_zm.shx new file mode 100644 index 0000000..95b2d18 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/polyline_zm.shx differ diff --git a/test/TestData/arcmap/shp-copy/pt_date.cpg b/test/TestData/arcmap/shp-copy/pt_date.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/pt_date.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/pt_date.dbf b/test/TestData/arcmap/shp-copy/pt_date.dbf new file mode 100644 index 0000000..b8ae18f Binary files /dev/null and b/test/TestData/arcmap/shp-copy/pt_date.dbf differ diff --git a/test/TestData/arcmap/shp-copy/pt_date.shp b/test/TestData/arcmap/shp-copy/pt_date.shp new file mode 100644 index 0000000..66b0b76 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/pt_date.shp differ diff --git a/test/TestData/arcmap/shp-copy/pt_date.shx b/test/TestData/arcmap/shp-copy/pt_date.shx new file mode 100644 index 0000000..1647788 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/pt_date.shx differ diff --git a/test/TestData/arcmap/shp-copy/pt_utf8.cpg b/test/TestData/arcmap/shp-copy/pt_utf8.cpg new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp-copy/pt_utf8.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/pt_utf8.dbf b/test/TestData/arcmap/shp-copy/pt_utf8.dbf new file mode 100644 index 0000000..688eb65 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/pt_utf8.dbf differ diff --git a/test/TestData/arcmap/shp-copy/pt_utf8.prj b/test/TestData/arcmap/shp-copy/pt_utf8.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp-copy/pt_utf8.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp-copy/pt_utf8.shp b/test/TestData/arcmap/shp-copy/pt_utf8.shp new file mode 100644 index 0000000..b43a96c Binary files /dev/null and b/test/TestData/arcmap/shp-copy/pt_utf8.shp differ diff --git a/test/TestData/arcmap/shp-copy/pt_utf8.shx b/test/TestData/arcmap/shp-copy/pt_utf8.shx new file mode 100644 index 0000000..76a0c76 Binary files /dev/null and b/test/TestData/arcmap/shp-copy/pt_utf8.shx differ diff --git a/test/TestData/arcmap/shp/fields_utf8.CPG b/test/TestData/arcmap/shp/fields_utf8.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/fields_utf8.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/fields_utf8.dbf b/test/TestData/arcmap/shp/fields_utf8.dbf new file mode 100644 index 0000000..7ed225e Binary files /dev/null and b/test/TestData/arcmap/shp/fields_utf8.dbf differ diff --git a/test/TestData/arcmap/shp/fields_utf8.sbn b/test/TestData/arcmap/shp/fields_utf8.sbn new file mode 100644 index 0000000..a3b358a Binary files /dev/null and b/test/TestData/arcmap/shp/fields_utf8.sbn differ diff --git a/test/TestData/arcmap/shp/fields_utf8.sbx b/test/TestData/arcmap/shp/fields_utf8.sbx new file mode 100644 index 0000000..dd1c950 Binary files /dev/null and b/test/TestData/arcmap/shp/fields_utf8.sbx differ diff --git a/test/TestData/arcmap/shp/fields_utf8.shp b/test/TestData/arcmap/shp/fields_utf8.shp new file mode 100644 index 0000000..d1a1019 Binary files /dev/null and b/test/TestData/arcmap/shp/fields_utf8.shp differ diff --git a/test/TestData/arcmap/shp/fields_utf8.shp.xml b/test/TestData/arcmap/shp/fields_utf8.shp.xml new file mode 100644 index 0000000..fb4b3f6 --- /dev/null +++ b/test/TestData/arcmap/shp/fields_utf8.shp.xml @@ -0,0 +1,2 @@ + +20210212131426001.0TRUEfile://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\dbf\fields_utf8Local Area Network diff --git a/test/TestData/arcmap/shp/fields_utf8.shx b/test/TestData/arcmap/shp/fields_utf8.shx new file mode 100644 index 0000000..c93da43 Binary files /dev/null and b/test/TestData/arcmap/shp/fields_utf8.shx differ diff --git a/test/TestData/arcmap/shp/multipoint.CPG b/test/TestData/arcmap/shp/multipoint.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/multipoint.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/multipoint.dbf b/test/TestData/arcmap/shp/multipoint.dbf new file mode 100644 index 0000000..0e3324c Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint.dbf differ diff --git a/test/TestData/arcmap/shp/multipoint.sbn b/test/TestData/arcmap/shp/multipoint.sbn new file mode 100644 index 0000000..275ac8a Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint.sbn differ diff --git a/test/TestData/arcmap/shp/multipoint.sbx b/test/TestData/arcmap/shp/multipoint.sbx new file mode 100644 index 0000000..b8ccaff Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint.sbx differ diff --git a/test/TestData/arcmap/shp/multipoint.shp b/test/TestData/arcmap/shp/multipoint.shp new file mode 100644 index 0000000..30a2079 Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint.shp differ diff --git a/test/TestData/arcmap/shp/multipoint.shp.xml b/test/TestData/arcmap/shp/multipoint.shp.xml new file mode 100644 index 0000000..4cd6148 --- /dev/null +++ b/test/TestData/arcmap/shp/multipoint.shp.xml @@ -0,0 +1,2 @@ + +20210204144206001.0TRUEfile://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\shp\multipointLocal Area Network diff --git a/test/TestData/arcmap/shp/multipoint.shx b/test/TestData/arcmap/shp/multipoint.shx new file mode 100644 index 0000000..39c884b Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint.shx differ diff --git a/test/TestData/arcmap/shp/multipoint_m.CPG b/test/TestData/arcmap/shp/multipoint_m.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/multipoint_m.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/multipoint_m.dbf b/test/TestData/arcmap/shp/multipoint_m.dbf new file mode 100644 index 0000000..bd50830 Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint_m.dbf differ diff --git a/test/TestData/arcmap/shp/multipoint_m.sbn b/test/TestData/arcmap/shp/multipoint_m.sbn new file mode 100644 index 0000000..a3dccc1 Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint_m.sbn differ diff --git a/test/TestData/arcmap/shp/multipoint_m.sbx b/test/TestData/arcmap/shp/multipoint_m.sbx new file mode 100644 index 0000000..fd3aac1 Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint_m.sbx differ diff --git a/test/TestData/arcmap/shp/multipoint_m.shp b/test/TestData/arcmap/shp/multipoint_m.shp new file mode 100644 index 0000000..7fdc49d Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint_m.shp differ diff --git a/test/TestData/arcmap/shp/multipoint_m.shp.xml b/test/TestData/arcmap/shp/multipoint_m.shp.xml new file mode 100644 index 0000000..b86d47f --- /dev/null +++ b/test/TestData/arcmap/shp/multipoint_m.shp.xml @@ -0,0 +1,2 @@ + +20210204144209001.0TRUEfile://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\shp\multipoint_mLocal Area Network diff --git a/test/TestData/arcmap/shp/multipoint_m.shx b/test/TestData/arcmap/shp/multipoint_m.shx new file mode 100644 index 0000000..f236289 Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint_m.shx differ diff --git a/test/TestData/arcmap/shp/multipoint_zm.CPG b/test/TestData/arcmap/shp/multipoint_zm.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/multipoint_zm.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/multipoint_zm.dbf b/test/TestData/arcmap/shp/multipoint_zm.dbf new file mode 100644 index 0000000..bd50830 Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint_zm.dbf differ diff --git a/test/TestData/arcmap/shp/multipoint_zm.sbn b/test/TestData/arcmap/shp/multipoint_zm.sbn new file mode 100644 index 0000000..a3dccc1 Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint_zm.sbn differ diff --git a/test/TestData/arcmap/shp/multipoint_zm.sbx b/test/TestData/arcmap/shp/multipoint_zm.sbx new file mode 100644 index 0000000..fd3aac1 Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint_zm.sbx differ diff --git a/test/TestData/arcmap/shp/multipoint_zm.shp b/test/TestData/arcmap/shp/multipoint_zm.shp new file mode 100644 index 0000000..844f829 Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint_zm.shp differ diff --git a/test/TestData/arcmap/shp/multipoint_zm.shp.xml b/test/TestData/arcmap/shp/multipoint_zm.shp.xml new file mode 100644 index 0000000..2a732bd --- /dev/null +++ b/test/TestData/arcmap/shp/multipoint_zm.shp.xml @@ -0,0 +1,2 @@ + +20210203175105001.0FALSECalculateField multipoint_zm Id "101" VB #CalculateField multipoint_zm Id 102 VB #CalculateField multipoint_zm Id 202 VB #Dissolve multipoint_zm D:\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\multipoint\multipoint_zm2.shp Id # MULTI_PART DISSOLVE_LINESmultipoint_zm20020.000file://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\shp\multipoint_zmLocal Area Network20210203175249002021020317524900 Version 6.2 (Build 9200) ; Esri ArcGIS 10.6.1.9328multipoint_zm2Shapefile0.000dataset0SimpleFALSE0TRUETRUEmultipoint_zm2Feature Class0FIDFIDOID400Internal feature number.EsriSequential unique whole numbers that are automatically generated.ShapeShapeGeometry000Feature geometry.EsriCoordinates defining the features.IdIdInteger66020210203 diff --git a/test/TestData/arcmap/shp/multipoint_zm.shx b/test/TestData/arcmap/shp/multipoint_zm.shx new file mode 100644 index 0000000..1d1d20f Binary files /dev/null and b/test/TestData/arcmap/shp/multipoint_zm.shx differ diff --git a/test/TestData/arcmap/shp/point.CPG b/test/TestData/arcmap/shp/point.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/point.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/point.dbf b/test/TestData/arcmap/shp/point.dbf new file mode 100644 index 0000000..1ea2589 Binary files /dev/null and b/test/TestData/arcmap/shp/point.dbf differ diff --git a/test/TestData/arcmap/shp/point.sbn b/test/TestData/arcmap/shp/point.sbn new file mode 100644 index 0000000..0fb3ec0 Binary files /dev/null and b/test/TestData/arcmap/shp/point.sbn differ diff --git a/test/TestData/arcmap/shp/point.sbx b/test/TestData/arcmap/shp/point.sbx new file mode 100644 index 0000000..2b2fb30 Binary files /dev/null and b/test/TestData/arcmap/shp/point.sbx differ diff --git a/test/TestData/arcmap/shp/point.shp b/test/TestData/arcmap/shp/point.shp new file mode 100644 index 0000000..b718c0f Binary files /dev/null and b/test/TestData/arcmap/shp/point.shp differ diff --git a/test/TestData/arcmap/shp/point.shp.xml b/test/TestData/arcmap/shp/point.shp.xml new file mode 100644 index 0000000..541c5f2 --- /dev/null +++ b/test/TestData/arcmap/shp/point.shp.xml @@ -0,0 +1,2 @@ + +20210204144149001.0TRUEfile://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\shp\pointLocal Area Network diff --git a/test/TestData/arcmap/shp/point.shx b/test/TestData/arcmap/shp/point.shx new file mode 100644 index 0000000..a5cec23 Binary files /dev/null and b/test/TestData/arcmap/shp/point.shx differ diff --git a/test/TestData/arcmap/shp/point_m.CPG b/test/TestData/arcmap/shp/point_m.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/point_m.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/point_m.dbf b/test/TestData/arcmap/shp/point_m.dbf new file mode 100644 index 0000000..1ea2589 Binary files /dev/null and b/test/TestData/arcmap/shp/point_m.dbf differ diff --git a/test/TestData/arcmap/shp/point_m.sbn b/test/TestData/arcmap/shp/point_m.sbn new file mode 100644 index 0000000..0fb3ec0 Binary files /dev/null and b/test/TestData/arcmap/shp/point_m.sbn differ diff --git a/test/TestData/arcmap/shp/point_m.sbx b/test/TestData/arcmap/shp/point_m.sbx new file mode 100644 index 0000000..2b2fb30 Binary files /dev/null and b/test/TestData/arcmap/shp/point_m.sbx differ diff --git a/test/TestData/arcmap/shp/point_m.shp b/test/TestData/arcmap/shp/point_m.shp new file mode 100644 index 0000000..961e42f Binary files /dev/null and b/test/TestData/arcmap/shp/point_m.shp differ diff --git a/test/TestData/arcmap/shp/point_m.shp.xml b/test/TestData/arcmap/shp/point_m.shp.xml new file mode 100644 index 0000000..9762952 --- /dev/null +++ b/test/TestData/arcmap/shp/point_m.shp.xml @@ -0,0 +1,2 @@ + +20210204144201001.0TRUEfile://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\shp\point_mLocal Area Network diff --git a/test/TestData/arcmap/shp/point_m.shx b/test/TestData/arcmap/shp/point_m.shx new file mode 100644 index 0000000..74a03d8 Binary files /dev/null and b/test/TestData/arcmap/shp/point_m.shx differ diff --git a/test/TestData/arcmap/shp/point_zm.CPG b/test/TestData/arcmap/shp/point_zm.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/point_zm.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/point_zm.dbf b/test/TestData/arcmap/shp/point_zm.dbf new file mode 100644 index 0000000..1ea2589 Binary files /dev/null and b/test/TestData/arcmap/shp/point_zm.dbf differ diff --git a/test/TestData/arcmap/shp/point_zm.sbn b/test/TestData/arcmap/shp/point_zm.sbn new file mode 100644 index 0000000..0fb3ec0 Binary files /dev/null and b/test/TestData/arcmap/shp/point_zm.sbn differ diff --git a/test/TestData/arcmap/shp/point_zm.sbx b/test/TestData/arcmap/shp/point_zm.sbx new file mode 100644 index 0000000..2b2fb30 Binary files /dev/null and b/test/TestData/arcmap/shp/point_zm.sbx differ diff --git a/test/TestData/arcmap/shp/point_zm.shp b/test/TestData/arcmap/shp/point_zm.shp new file mode 100644 index 0000000..5ef4fd8 Binary files /dev/null and b/test/TestData/arcmap/shp/point_zm.shp differ diff --git a/test/TestData/arcmap/shp/point_zm.shp.xml b/test/TestData/arcmap/shp/point_zm.shp.xml new file mode 100644 index 0000000..2304810 --- /dev/null +++ b/test/TestData/arcmap/shp/point_zm.shp.xml @@ -0,0 +1,2 @@ + +20210203173426001.0TRUECalculateField point_zm Id "[FID] * 100" VB #CalculateField point_zm Id "[FID] * 100" VB #CalculateField point_zm Id "[FID] + 100" VB #file://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\shp\point_zmLocal Area Network diff --git a/test/TestData/arcmap/shp/point_zm.shx b/test/TestData/arcmap/shp/point_zm.shx new file mode 100644 index 0000000..3a2199a Binary files /dev/null and b/test/TestData/arcmap/shp/point_zm.shx differ diff --git a/test/TestData/arcmap/shp/polygon.CPG b/test/TestData/arcmap/shp/polygon.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/polygon.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/polygon.dbf b/test/TestData/arcmap/shp/polygon.dbf new file mode 100644 index 0000000..25dd80c Binary files /dev/null and b/test/TestData/arcmap/shp/polygon.dbf differ diff --git a/test/TestData/arcmap/shp/polygon.prj b/test/TestData/arcmap/shp/polygon.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp/polygon.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp/polygon.sbn b/test/TestData/arcmap/shp/polygon.sbn new file mode 100644 index 0000000..5337cd1 Binary files /dev/null and b/test/TestData/arcmap/shp/polygon.sbn differ diff --git a/test/TestData/arcmap/shp/polygon.sbx b/test/TestData/arcmap/shp/polygon.sbx new file mode 100644 index 0000000..a7264c4 Binary files /dev/null and b/test/TestData/arcmap/shp/polygon.sbx differ diff --git a/test/TestData/arcmap/shp/polygon.shp b/test/TestData/arcmap/shp/polygon.shp new file mode 100644 index 0000000..8405ba3 Binary files /dev/null and b/test/TestData/arcmap/shp/polygon.shp differ diff --git a/test/TestData/arcmap/shp/polygon.shp.xml b/test/TestData/arcmap/shp/polygon.shp.xml new file mode 100644 index 0000000..f55e53a --- /dev/null +++ b/test/TestData/arcmap/shp/polygon.shp.xml @@ -0,0 +1,2 @@ + +20210204144138001.0TRUEfile://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\shp\polygonLocal Area Network diff --git a/test/TestData/arcmap/shp/polygon.shx b/test/TestData/arcmap/shp/polygon.shx new file mode 100644 index 0000000..18c8318 Binary files /dev/null and b/test/TestData/arcmap/shp/polygon.shx differ diff --git a/test/TestData/arcmap/shp/polygon_m.CPG b/test/TestData/arcmap/shp/polygon_m.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/polygon_m.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/polygon_m.dbf b/test/TestData/arcmap/shp/polygon_m.dbf new file mode 100644 index 0000000..25dd80c Binary files /dev/null and b/test/TestData/arcmap/shp/polygon_m.dbf differ diff --git a/test/TestData/arcmap/shp/polygon_m.prj b/test/TestData/arcmap/shp/polygon_m.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp/polygon_m.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp/polygon_m.sbn b/test/TestData/arcmap/shp/polygon_m.sbn new file mode 100644 index 0000000..5337cd1 Binary files /dev/null and b/test/TestData/arcmap/shp/polygon_m.sbn differ diff --git a/test/TestData/arcmap/shp/polygon_m.sbx b/test/TestData/arcmap/shp/polygon_m.sbx new file mode 100644 index 0000000..a7264c4 Binary files /dev/null and b/test/TestData/arcmap/shp/polygon_m.sbx differ diff --git a/test/TestData/arcmap/shp/polygon_m.shp b/test/TestData/arcmap/shp/polygon_m.shp new file mode 100644 index 0000000..2c75a4f Binary files /dev/null and b/test/TestData/arcmap/shp/polygon_m.shp differ diff --git a/test/TestData/arcmap/shp/polygon_m.shp.xml b/test/TestData/arcmap/shp/polygon_m.shp.xml new file mode 100644 index 0000000..863d3fa --- /dev/null +++ b/test/TestData/arcmap/shp/polygon_m.shp.xml @@ -0,0 +1,2 @@ + +20210204144141001.0TRUEfile://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\shp\polygon_mLocal Area Network diff --git a/test/TestData/arcmap/shp/polygon_m.shx b/test/TestData/arcmap/shp/polygon_m.shx new file mode 100644 index 0000000..49932b1 Binary files /dev/null and b/test/TestData/arcmap/shp/polygon_m.shx differ diff --git a/test/TestData/arcmap/shp/polygon_zm.CPG b/test/TestData/arcmap/shp/polygon_zm.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/polygon_zm.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/polygon_zm.dbf b/test/TestData/arcmap/shp/polygon_zm.dbf new file mode 100644 index 0000000..7fda6c7 Binary files /dev/null and b/test/TestData/arcmap/shp/polygon_zm.dbf differ diff --git a/test/TestData/arcmap/shp/polygon_zm.prj b/test/TestData/arcmap/shp/polygon_zm.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp/polygon_zm.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp/polygon_zm.sbn b/test/TestData/arcmap/shp/polygon_zm.sbn new file mode 100644 index 0000000..5337cd1 Binary files /dev/null and b/test/TestData/arcmap/shp/polygon_zm.sbn differ diff --git a/test/TestData/arcmap/shp/polygon_zm.sbx b/test/TestData/arcmap/shp/polygon_zm.sbx new file mode 100644 index 0000000..a7264c4 Binary files /dev/null and b/test/TestData/arcmap/shp/polygon_zm.sbx differ diff --git a/test/TestData/arcmap/shp/polygon_zm.shp b/test/TestData/arcmap/shp/polygon_zm.shp new file mode 100644 index 0000000..801c0b2 Binary files /dev/null and b/test/TestData/arcmap/shp/polygon_zm.shp differ diff --git a/test/TestData/arcmap/shp/polygon_zm.shx b/test/TestData/arcmap/shp/polygon_zm.shx new file mode 100644 index 0000000..7d71e16 Binary files /dev/null and b/test/TestData/arcmap/shp/polygon_zm.shx differ diff --git a/test/TestData/arcmap/shp/polyline.CPG b/test/TestData/arcmap/shp/polyline.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/polyline.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/polyline.dbf b/test/TestData/arcmap/shp/polyline.dbf new file mode 100644 index 0000000..25dd80c Binary files /dev/null and b/test/TestData/arcmap/shp/polyline.dbf differ diff --git a/test/TestData/arcmap/shp/polyline.prj b/test/TestData/arcmap/shp/polyline.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp/polyline.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp/polyline.sbn b/test/TestData/arcmap/shp/polyline.sbn new file mode 100644 index 0000000..5337cd1 Binary files /dev/null and b/test/TestData/arcmap/shp/polyline.sbn differ diff --git a/test/TestData/arcmap/shp/polyline.sbx b/test/TestData/arcmap/shp/polyline.sbx new file mode 100644 index 0000000..a7264c4 Binary files /dev/null and b/test/TestData/arcmap/shp/polyline.sbx differ diff --git a/test/TestData/arcmap/shp/polyline.shp b/test/TestData/arcmap/shp/polyline.shp new file mode 100644 index 0000000..f90fb44 Binary files /dev/null and b/test/TestData/arcmap/shp/polyline.shp differ diff --git a/test/TestData/arcmap/shp/polyline.shp.xml b/test/TestData/arcmap/shp/polyline.shp.xml new file mode 100644 index 0000000..54b71b5 --- /dev/null +++ b/test/TestData/arcmap/shp/polyline.shp.xml @@ -0,0 +1,2 @@ + +20210204144124001.0TRUEfile://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\shp\polylineLocal Area Network diff --git a/test/TestData/arcmap/shp/polyline.shx b/test/TestData/arcmap/shp/polyline.shx new file mode 100644 index 0000000..7ff6dcf Binary files /dev/null and b/test/TestData/arcmap/shp/polyline.shx differ diff --git a/test/TestData/arcmap/shp/polyline_m.CPG b/test/TestData/arcmap/shp/polyline_m.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/polyline_m.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/polyline_m.dbf b/test/TestData/arcmap/shp/polyline_m.dbf new file mode 100644 index 0000000..0b8d06f Binary files /dev/null and b/test/TestData/arcmap/shp/polyline_m.dbf differ diff --git a/test/TestData/arcmap/shp/polyline_m.prj b/test/TestData/arcmap/shp/polyline_m.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp/polyline_m.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp/polyline_m.sbn b/test/TestData/arcmap/shp/polyline_m.sbn new file mode 100644 index 0000000..5337cd1 Binary files /dev/null and b/test/TestData/arcmap/shp/polyline_m.sbn differ diff --git a/test/TestData/arcmap/shp/polyline_m.sbx b/test/TestData/arcmap/shp/polyline_m.sbx new file mode 100644 index 0000000..a7264c4 Binary files /dev/null and b/test/TestData/arcmap/shp/polyline_m.sbx differ diff --git a/test/TestData/arcmap/shp/polyline_m.shp b/test/TestData/arcmap/shp/polyline_m.shp new file mode 100644 index 0000000..6fa4410 Binary files /dev/null and b/test/TestData/arcmap/shp/polyline_m.shp differ diff --git a/test/TestData/arcmap/shp/polyline_m.shp.xml b/test/TestData/arcmap/shp/polyline_m.shp.xml new file mode 100644 index 0000000..9cb72b9 --- /dev/null +++ b/test/TestData/arcmap/shp/polyline_m.shp.xml @@ -0,0 +1,2 @@ + +20210204144129001.0TRUEfile://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\shp\polyline_mLocal Area Network diff --git a/test/TestData/arcmap/shp/polyline_m.shx b/test/TestData/arcmap/shp/polyline_m.shx new file mode 100644 index 0000000..3e24db4 Binary files /dev/null and b/test/TestData/arcmap/shp/polyline_m.shx differ diff --git a/test/TestData/arcmap/shp/polyline_zm.CPG b/test/TestData/arcmap/shp/polyline_zm.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/polyline_zm.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/polyline_zm.dbf b/test/TestData/arcmap/shp/polyline_zm.dbf new file mode 100644 index 0000000..0b8d06f Binary files /dev/null and b/test/TestData/arcmap/shp/polyline_zm.dbf differ diff --git a/test/TestData/arcmap/shp/polyline_zm.prj b/test/TestData/arcmap/shp/polyline_zm.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp/polyline_zm.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp/polyline_zm.sbn b/test/TestData/arcmap/shp/polyline_zm.sbn new file mode 100644 index 0000000..5337cd1 Binary files /dev/null and b/test/TestData/arcmap/shp/polyline_zm.sbn differ diff --git a/test/TestData/arcmap/shp/polyline_zm.sbx b/test/TestData/arcmap/shp/polyline_zm.sbx new file mode 100644 index 0000000..a7264c4 Binary files /dev/null and b/test/TestData/arcmap/shp/polyline_zm.sbx differ diff --git a/test/TestData/arcmap/shp/polyline_zm.shp b/test/TestData/arcmap/shp/polyline_zm.shp new file mode 100644 index 0000000..534e53c Binary files /dev/null and b/test/TestData/arcmap/shp/polyline_zm.shp differ diff --git a/test/TestData/arcmap/shp/polyline_zm.shp.xml b/test/TestData/arcmap/shp/polyline_zm.shp.xml new file mode 100644 index 0000000..564c29a --- /dev/null +++ b/test/TestData/arcmap/shp/polyline_zm.shp.xml @@ -0,0 +1,2 @@ + +20210204144133001.0TRUEfile://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\shp\polyline_zmLocal Area Network diff --git a/test/TestData/arcmap/shp/polyline_zm.shx b/test/TestData/arcmap/shp/polyline_zm.shx new file mode 100644 index 0000000..95b2d18 Binary files /dev/null and b/test/TestData/arcmap/shp/polyline_zm.shx differ diff --git a/test/TestData/arcmap/shp/pt_date.CPG b/test/TestData/arcmap/shp/pt_date.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/pt_date.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/pt_date.dbf b/test/TestData/arcmap/shp/pt_date.dbf new file mode 100644 index 0000000..7bf6f49 Binary files /dev/null and b/test/TestData/arcmap/shp/pt_date.dbf differ diff --git a/test/TestData/arcmap/shp/pt_date.sbn b/test/TestData/arcmap/shp/pt_date.sbn new file mode 100644 index 0000000..195e53f Binary files /dev/null and b/test/TestData/arcmap/shp/pt_date.sbn differ diff --git a/test/TestData/arcmap/shp/pt_date.sbx b/test/TestData/arcmap/shp/pt_date.sbx new file mode 100644 index 0000000..14d1c56 Binary files /dev/null and b/test/TestData/arcmap/shp/pt_date.sbx differ diff --git a/test/TestData/arcmap/shp/pt_date.shp b/test/TestData/arcmap/shp/pt_date.shp new file mode 100644 index 0000000..66b0b76 Binary files /dev/null and b/test/TestData/arcmap/shp/pt_date.shp differ diff --git a/test/TestData/arcmap/shp/pt_date.shp.xml b/test/TestData/arcmap/shp/pt_date.shp.xml new file mode 100644 index 0000000..a168cd2 --- /dev/null +++ b/test/TestData/arcmap/shp/pt_date.shp.xml @@ -0,0 +1,2 @@ + +20210212131444001.0TRUEfile://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\dbf\pt_dateLocal Area Network diff --git a/test/TestData/arcmap/shp/pt_date.shx b/test/TestData/arcmap/shp/pt_date.shx new file mode 100644 index 0000000..1647788 Binary files /dev/null and b/test/TestData/arcmap/shp/pt_date.shx differ diff --git a/test/TestData/arcmap/shp/pt_utf8.CPG b/test/TestData/arcmap/shp/pt_utf8.CPG new file mode 100644 index 0000000..3ad133c --- /dev/null +++ b/test/TestData/arcmap/shp/pt_utf8.CPG @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/test/TestData/arcmap/shp/pt_utf8.dbf b/test/TestData/arcmap/shp/pt_utf8.dbf new file mode 100644 index 0000000..4263ce4 Binary files /dev/null and b/test/TestData/arcmap/shp/pt_utf8.dbf differ diff --git a/test/TestData/arcmap/shp/pt_utf8.prj b/test/TestData/arcmap/shp/pt_utf8.prj new file mode 100644 index 0000000..5c6f76d --- /dev/null +++ b/test/TestData/arcmap/shp/pt_utf8.prj @@ -0,0 +1 @@ +PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/test/TestData/arcmap/shp/pt_utf8.sbn b/test/TestData/arcmap/shp/pt_utf8.sbn new file mode 100644 index 0000000..40dd961 Binary files /dev/null and b/test/TestData/arcmap/shp/pt_utf8.sbn differ diff --git a/test/TestData/arcmap/shp/pt_utf8.sbx b/test/TestData/arcmap/shp/pt_utf8.sbx new file mode 100644 index 0000000..aff1b37 Binary files /dev/null and b/test/TestData/arcmap/shp/pt_utf8.sbx differ diff --git a/test/TestData/arcmap/shp/pt_utf8.shp b/test/TestData/arcmap/shp/pt_utf8.shp new file mode 100644 index 0000000..b43a96c Binary files /dev/null and b/test/TestData/arcmap/shp/pt_utf8.shp differ diff --git a/test/TestData/arcmap/shp/pt_utf8.shp.xml b/test/TestData/arcmap/shp/pt_utf8.shp.xml new file mode 100644 index 0000000..16e868b --- /dev/null +++ b/test/TestData/arcmap/shp/pt_utf8.shp.xml @@ -0,0 +1,2 @@ + +20210204144233001.0TRUEfile://\\DELL5490\Work.Dev\VS\Coordinatus.NET\NetTopologySuite.IO.Esri\test\TestData\arcmap\dbf\pt_utf8Local Area Network diff --git a/test/TestData/arcmap/shp/pt_utf8.shx b/test/TestData/arcmap/shp/pt_utf8.shx new file mode 100644 index 0000000..76a0c76 Binary files /dev/null and b/test/TestData/arcmap/shp/pt_utf8.shx differ diff --git a/test/TestData/temp/abcd.dbf b/test/TestData/temp/abcd.dbf new file mode 100644 index 0000000..630e07c Binary files /dev/null and b/test/TestData/temp/abcd.dbf differ diff --git a/test/TestData/temp/abcd.shp b/test/TestData/temp/abcd.shp new file mode 100644 index 0000000..ab6ae8c Binary files /dev/null and b/test/TestData/temp/abcd.shp differ diff --git a/test/TestData/temp/abcd.shx b/test/TestData/temp/abcd.shx new file mode 100644 index 0000000..6c6b2ad Binary files /dev/null and b/test/TestData/temp/abcd.shx differ diff --git a/test/TestData/temp/abcd1.dbf b/test/TestData/temp/abcd1.dbf new file mode 100644 index 0000000..cb27a2e Binary files /dev/null and b/test/TestData/temp/abcd1.dbf differ diff --git a/test/TestData/temp/abcd1.shp b/test/TestData/temp/abcd1.shp new file mode 100644 index 0000000..8883da9 Binary files /dev/null and b/test/TestData/temp/abcd1.shp differ diff --git a/test/TestData/temp/abcd1.shx b/test/TestData/temp/abcd1.shx new file mode 100644 index 0000000..23fc121 Binary files /dev/null and b/test/TestData/temp/abcd1.shx differ diff --git a/test/TestData/temp/abcd2.dbf b/test/TestData/temp/abcd2.dbf new file mode 100644 index 0000000..630e07c Binary files /dev/null and b/test/TestData/temp/abcd2.dbf differ diff --git a/test/TestData/temp/abcd2.shp b/test/TestData/temp/abcd2.shp new file mode 100644 index 0000000..8883da9 Binary files /dev/null and b/test/TestData/temp/abcd2.shp differ diff --git a/test/TestData/temp/abcd2.shx b/test/TestData/temp/abcd2.shx new file mode 100644 index 0000000..23fc121 Binary files /dev/null and b/test/TestData/temp/abcd2.shx differ diff --git a/test/TestData/temp/abcd3.dbf b/test/TestData/temp/abcd3.dbf new file mode 100644 index 0000000..cb27a2e Binary files /dev/null and b/test/TestData/temp/abcd3.dbf differ diff --git a/test/TestData/temp/abcd3.shp b/test/TestData/temp/abcd3.shp new file mode 100644 index 0000000..6fe6cf4 Binary files /dev/null and b/test/TestData/temp/abcd3.shp differ diff --git a/test/TestData/temp/abcd3.shx b/test/TestData/temp/abcd3.shx new file mode 100644 index 0000000..3326b6b Binary files /dev/null and b/test/TestData/temp/abcd3.shx differ diff --git a/test/TestData/temp/point.dbf b/test/TestData/temp/point.dbf new file mode 100644 index 0000000..74bdbed Binary files /dev/null and b/test/TestData/temp/point.dbf differ diff --git a/test/TestData/temp/point.shp b/test/TestData/temp/point.shp new file mode 100644 index 0000000..5fa11a6 Binary files /dev/null and b/test/TestData/temp/point.shp differ diff --git a/test/TestData/temp/point.shx b/test/TestData/temp/point.shx new file mode 100644 index 0000000..a6c3427 Binary files /dev/null and b/test/TestData/temp/point.shx differ