Skip to content
Permalink
Browse files

construct shapefiles in memory (#890)

- added functions to export shapefiles to MemoryStream
- changed code so NullShapes are not only recognized for polygon / line shapes when in IndexMode, but also in !IndexMode and for points and multipoints too
- made Shapefile class abstract, because we already have FeatureSet for creating unspecified Shapefiles
  • Loading branch information...
donogst authored and jany-tenaj committed Apr 3, 2017
1 parent 0fc3ee4 commit 56770cb0ff2df9f10963c320b05e7269b7eab5a1
@@ -189,4 +189,3 @@ FakesAssemblies/
# LightSwitch generated files
GeneratedArtifacts/
_Pvt_Extensions/
ModelManifest.xml
@@ -16,6 +16,7 @@ Be aware that code written for 1.9 will not work out of the box because DotSpati
- CopySubset overloads with withAttributes parameters
- Jenks Natural Breaks support in categories binning.
- Test that checks correct creation of GpggkSentence objects from string
- Constructing Shapefiles in memory as single zip archives (#885)

### Changed
- Switched to VS2015 and C#6
@@ -22,6 +22,7 @@ Martin Karing <karing.martin@gmail.com>
Florian Fuchs <florian.fuchs@k.roteskreuz.at>
Peder Wikstrom <peder.wikstrom@treesys.se>
Jan Paolo Go <go.janpaolo@gmail.com>
Steve Donoghue <steve@donoghue.it>
Jesus Fernandez <jfevia@gmail.com>
Bart de Groot <bdegroot@gmail.com>
Hugo Dejaune <hugo.dejaune@gmail.com>
@@ -1142,7 +1142,7 @@ public void GenerateDefaultOutput(Parameter par)
switch (par.ParamType)
{
case "DotSpatial FeatureSet Param":
addedFeatureSet = new Shapefile
addedFeatureSet = new FeatureSet
{
Filename =
Path.GetTempPath() +
@@ -105,7 +105,9 @@
<Compile Include="ShapeTests.cs" />
</ItemGroup>
<ItemGroup>
<None Include="packages.config" />
<None Include="packages.config">
<SubType>Designer</SubType>
</None>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\DotSpatial.Data.Rasters.GdalExtension\DotSpatial.Data.Rasters.GdalExtension.csproj">
@@ -13,7 +13,8 @@ public void CanReadLineShapeWithNullShapes()
const string path = @"Data\Shapefiles\Archi\ARCHI_13-01-01.shp";
var target = new LineShapefile(path);
Assert.IsNotNull(target);
Assert.IsTrue(target.ShapeIndices.Any(d => d.ShapeType == ShapeType.NullShape));
Assert.AreEqual(11, target.ShapeIndices.Count(d => d.ShapeType == ShapeType.NullShape));
Assert.AreEqual(11, target.Features.Count(d => d.Geometry.IsEmpty));
}

[Test]
@@ -33,12 +34,11 @@ public void CanExportLineShapeWithNullShapes(bool indexMode)
{
var actual = new LineShapefile(exportPath);
Assert.IsNotNull(actual);
Assert.AreEqual(target.Extent,actual.Extent);
Assert.AreEqual(target.ShapeIndices.Count, actual.ShapeIndices.Count);
if (indexMode)
{
Assert.AreEqual(target.ShapeIndices.Count(d => d.ShapeType == ShapeType.NullShape), actual.ShapeIndices.Count(d => d.ShapeType == ShapeType.NullShape));
}
Assert.AreEqual(target.ShapeIndices.Count(d => d.ShapeType == ShapeType.NullShape), actual.ShapeIndices.Count(d => d.ShapeType == ShapeType.NullShape));
Assert.AreEqual(target.Features.Count, actual.Features.Count);
Assert.AreEqual(target.Features.Count(d => d.Geometry.IsEmpty), actual.Features.Count(d => d.Geometry.IsEmpty));
}
finally
{
@@ -1,11 +1,15 @@
using System.Linq;
using DotSpatial.Tests.Common;
using NUnit.Framework;

namespace DotSpatial.Data.Tests
{
[TestFixture]
class PointShapefileTests
{
/// <summary>
/// Checks whether point shapefiles that have a z but no m value can be loaded.
/// </summary>
[Test]
public void CanReadPointZWithoutM()
{
@@ -17,12 +21,50 @@ public void CanReadPointZWithoutM()
Assert.IsTrue(target.M.All(d => d < -1e38));
}

/// <summary>
/// Checks whether point shapefiles that contain NullShapes can be loaded without loosing the NullShapes.
/// </summary>
[Test]
public void CanLoadShapePointWithNullShapes()
{
const string path = @"Data\Shapefiles\Yield\Yield 2012.shp";
var target = new PointShapefile(path);
Assert.IsNotNull(target);
Assert.AreEqual(target.ShapeIndices.Count(d => d.ShapeType == ShapeType.NullShape), 1792);
Assert.AreEqual(target.Features.Count(d => d.Geometry.IsEmpty), 1792);
}

/// <summary>
/// Checks whether point shapefiles that contain NullShapes can be exported without loosing the NullShapes.
/// </summary>
/// <param name="indexMode"></param>
[Test]
[TestCase(false)]
[TestCase(true)]
public void CanExportPointShapeWithNullShapes(bool indexMode)
{
const string path = @"Data\Shapefiles\Yield\Yield 2012.shp";
var target = new PointShapefile(path);
Assert.IsTrue(target.Features.Count > 0);
target.IndexMode = indexMode;

var exportPath = FileTools.GetTempFileName(".shp");
target.SaveAs(exportPath, true);

try
{
var actual = new PointShapefile(exportPath);
Assert.IsNotNull(actual);
Assert.AreEqual(target.ShapeIndices.Count, actual.ShapeIndices.Count);
Assert.AreEqual(target.ShapeIndices.Count(d => d.ShapeType == ShapeType.NullShape), actual.ShapeIndices.Count(d => d.ShapeType == ShapeType.NullShape));
Assert.AreEqual(target.Features.Count, actual.Features.Count);
Assert.AreEqual(target.Features.Count(d => d.Geometry.IsEmpty), actual.Features.Count(d => d.Geometry.IsEmpty));
}
finally
{
FileTools.DeleteShapeFile(exportPath);
}
}

}
}
@@ -1,4 +1,8 @@
using System.IO;
using System;
using System.IO;

using DotSpatial.Tests.Common;

using NUnit.Framework;

namespace DotSpatial.Data.Tests
@@ -37,5 +41,77 @@ public void NumericColumnAsDoubleTest()
var shapeFile = Shapefile.OpenFile(Path.Combine(_shapefiles, @"OGR-numeric\ogr-numeric.shp"));
Assert.AreEqual("System.Double", shapeFile.DataTable.Columns[2].DataType.FullName);
}

/// <summary>
/// This test checks whether the exported shapesfiles equal the original shapefiles.
/// </summary>
/// <param name="filename">Name of the original shapefile used for exporting.</param>
/// <param name="indexMode">Indicates whether the IndexMode export routine should be used.</param>
[Test]
[TestCase("counties.shp", true)]
[TestCase("cities.shp", true)]
[TestCase("rivers.shp", true)]
[TestCase("counties.shp", false)]
[TestCase("cities.shp", false)]
[TestCase("rivers.shp", false)]
public void ShapeFileExport(string filename, bool indexMode)
{
//TODO needs test cases for multipoints
string originalFileName = Path.Combine(new[] { _shapefiles, filename });

var original = (Shapefile)DataManager.DefaultDataManager.OpenFile(originalFileName);
original.IndexMode = indexMode;
var package = original.ExportShapefilePackage();

// check archive has correct number of contained files
// shp, shx, dbf & prj
Assert.IsNotNull(package.ShpFile);
Assert.IsNotNull(package.ShxFile);
Assert.IsNotNull(package.DbfFile);
Assert.IsNotNull(package.PrjFile);

string tempFileBase = Path.GetRandomFileName();
string shpName = Path.Combine(Path.GetTempPath(), $"{tempFileBase}.shp");
string shxName = Path.Combine(Path.GetTempPath(), $"{tempFileBase}.shx");
string dbfName = Path.Combine(Path.GetTempPath(), $"{tempFileBase}.dbf");

SaveStream(shpName, package.ShpFile);
SaveStream(shxName, package.ShxFile);
SaveStream(dbfName, package.DbfFile);

// open the shape file from the archive
var newExport = (Shapefile)DataManager.DefaultDataManager.OpenFile(shpName);

//compare the in memory representations of the original and the extract
try
{
Assert.AreEqual(original.Features.Count, newExport.Features.Count);
for (var j = 0; j < original.Features.Count; j += 100)
{
Assert.AreEqual(original.Features[j].DataRow, original.Features[j].DataRow);
Assert.AreEqual(original.Features[j].Geometry.Coordinates, newExport.Features[j].Geometry.Coordinates);
}
}
finally
{
// this method deletes the other files too
FileTools.DeleteShapeFile(shpName);
}
}

/// <summary>
/// Saves a stream to disk.
/// </summary>
/// <param name="path">Path of the destination file.</param>
/// <param name="content">Stream with the content that should be saved to the file.</param>
private static void SaveStream(string path, Stream content)
{
using (var fs = new FileStream(path, FileMode.Create))
{
content.CopyTo(fs);
fs.Flush();
}
}

}
}
@@ -184,7 +184,7 @@ public object[] SupplyPageOfData(int lowerPageBoundary, int rowsPerPage, string
while (current < fieldLength)
{
current += myStream.Read(byteContent, current, fieldLength - current);
}
}

result[outRow++] = ParseColumn(field, row, byteContent, 0, fieldLength, null);
}
@@ -291,6 +291,10 @@ public void SetAttributes(int startRow, DataTable dataValues)
}
}

/// <summary>
/// Gets a BinaryWriter that can be used to write to the file inside _fileName.
/// </summary>
/// <returns>Returns a BinaryWriter that can be used to write to the file inside _fileName.</returns>
private BinaryWriter GetBinaryWriter()
{
return new BinaryWriter(new FileStream(_fileName, FileMode.OpenOrCreate, FileAccess.Write, FileShare.None, 1000000));
@@ -887,6 +891,34 @@ public void Fill(int numRows)
_isFilling = false;
}

/// <summary>
/// Exports the dbf file content to a stream.
/// </summary>
/// <returns>A stream that contains the dbf file content.</returns>
public Stream ExportDbfToStream()
{
MemoryStream dbfStream = new MemoryStream();
UpdateSchema();

try
{
using (var inMemoryStream = new MemoryStream())
using (_writer = new BinaryWriter(inMemoryStream))
{
WriteHeader(_writer);
WriteTable();
inMemoryStream.Seek(0, SeekOrigin.Begin);
inMemoryStream.CopyTo(dbfStream);
_writer.Close();
}
}
finally
{
dbfStream.Seek(0, SeekOrigin.Begin);
}
return dbfStream;
}

/// <summary>
/// Attempts to save the file to the path specified by the Filename property.
/// This should be the .shp extension.
@@ -948,7 +980,6 @@ private void UpdateSchema()
}

// Add new columns that exist in the data Table, but don't have a matching field yet.

tempColumns.AddRange(from DataColumn dc in _dataTable.Columns
where !ColumnNameExists(dc.ColumnName)
select dc as Field ?? new Field(dc));
@@ -1372,10 +1403,10 @@ private object ParseCharacterColumn(Field field, int currentRow, char[] cBuffer,
break;

case 'C': // character record.
// Symbol | Data Type | Description
// -------+-----------+----------------------------------------------------------------------------
// C | Character | All OEM code page characters - padded with blanks to the width of the field.
// Symbol | Data Type | Description
// -------+-----------+----------------------------------------------------------------------------
// C | Character | All OEM code page characters - padded with blanks to the width of the field.

for (var i = cBuffer.Length - 1; i >= 0; --i)
{
if (cBuffer[i] != ' ')
@@ -1468,7 +1499,7 @@ private object ParseNumericColumn(Field field, int currentRow, char[] cBuffer, D
{
return DBNull.Value;
}

object tempObject = DBNull.Value;
Type t = field.DataType;
var errorMessage = new Lazy<string>(() => string.Format(parseErrString, tempStr, currentRow, field.Ordinal, field.ColumnName, _fileName, t));
@@ -1617,6 +1648,7 @@ private object ParseNumericColumn(Field field, int currentRow, char[] cBuffer, D
}
return tempObject;
}

/// <summary>
/// Read the header data from the DBF file.
/// </summary>
@@ -33,7 +33,7 @@ public class BufferedBinaryWriter
private long _bufferOffset; // Position of the start of the buffer relative to the start of the file
private int _bufferSize;
private long _fileOffset; // position in the entire file
private FileStream _fileStream;
private Stream _fileStream;
private bool _isBufferLoaded;
private int _maxBufferSize = 9600000; // Approximately around ten megs, divisible by 8
private IProgressHandler _progressHandler;
@@ -56,6 +56,26 @@ public BufferedBinaryWriter(string fileName)
_progressMeter = new ProgressMeter(_progressHandler);
}

/// <summary>
/// create buffered binary writer on arbitrary stream
/// </summary>
/// <param name="s"></param>
public BufferedBinaryWriter(Stream s)
{
long expectedByteCount = 100000;

_fileStream = s;
_fileLength = 0;
_fileOffset = 0;

_buffer = new byte[expectedByteCount];
_bufferSize = Convert.ToInt32(expectedByteCount);
_maxBufferSize = _bufferSize;
_writeOffset = -1; // There is no buffer loaded.
_bufferOffset = -1; // -1 means no buffer is loaded.

}

/// <summary>
/// Creates a new instance of BufferedBinaryWriter, and specifies where to send progress messages.
/// </summary>
Oops, something went wrong.

0 comments on commit 56770cb

Please sign in to comment.
You can’t perform that action at this time.