Skip to content

Commit

Permalink
feat(core): support shapefile in reearth/core (#420)
Browse files Browse the repository at this point in the history
Co-authored-by: rot1024 <aayhrot@gmail.com>
  • Loading branch information
pyshx and rot1024 committed Feb 7, 2023
1 parent b4b04bc commit 5085022
Show file tree
Hide file tree
Showing 6 changed files with 253 additions and 3 deletions.
3 changes: 2 additions & 1 deletion package.json
Expand Up @@ -97,8 +97,8 @@
"@auth0/auth0-react": "1.12.0",
"@emotion/react": "11.10.5",
"@emotion/styled": "11.10.5",
"@mapbox/vector-tile": "1.3.1",
"@floating-ui/react-dom": "1.1.2",
"@mapbox/vector-tile": "1.3.1",
"@monaco-editor/react": "4.4.6",
"@popperjs/core": "2.11.6",
"@rot1024/use-transition": "1.0.0",
Expand Down Expand Up @@ -130,6 +130,7 @@
"js-file-download": "0.4.12",
"jsep": "1.3.8",
"jsonpath-plus": "7.2.0",
"jszip": "3.10.1",
"leaflet": "1.9.3",
"localforage": "1.10.0",
"lodash-es": "4.17.21",
Expand Down
1 change: 1 addition & 0 deletions src/core/engines/Cesium/Feature/index.tsx
Expand Up @@ -43,6 +43,7 @@ const displayConfig: Record<DataType, (keyof typeof components)[] | "auto"> = {
"3dtiles": ["3dtiles"],
"osm-buildings": ["3dtiles"],
gpx: "auto",
shapefile: "auto",
};

// Some layer that is delegated data is not computed when layer is updated.
Expand Down
2 changes: 2 additions & 0 deletions src/core/mantle/data/index.ts
Expand Up @@ -5,12 +5,14 @@ import type { Data, DataRange, Feature } from "../types";
import { fetchCSV } from "./csv";
import { fetchGeoJSON } from "./geojson";
import { fetchGPXfile } from "./gpx";
import { fetchShapefile } from "./shapefile";

export type DataFetcher = (data: Data, range?: DataRange) => Promise<Feature[] | void>;

const registry: Record<string, DataFetcher> = {
geojson: fetchGeoJSON,
csv: fetchCSV,
shapefile: fetchShapefile,
gpx: fetchGPXfile,
};

Expand Down
228 changes: 228 additions & 0 deletions src/core/mantle/data/shapefile.ts
@@ -0,0 +1,228 @@
import type { GeoJSON } from "geojson";
import JSZip from "jszip";

import type { Data, DataRange, Feature } from "../types";

import { processGeoJSON } from "./geojson";
import { f } from "./utils";

export async function fetchShapefile(data: Data, range?: DataRange): Promise<Feature[] | void> {
const arrayBuffer = data.url ? await (await f(data.url)).arrayBuffer() : data.value;
const zip = await JSZip.loadAsync(new Uint8Array(arrayBuffer));

let shpFileArrayBuffer: ArrayBuffer | undefined;
let dbfFileArrayBuffer: ArrayBuffer | undefined;

// Access the files inside the ZIP archive
const zipEntries = Object.values(zip.files);
for (const entry of zipEntries) {
const filename = entry.name;
if (filename.endsWith(".shp")) {
shpFileArrayBuffer = await entry.async("arraybuffer");
} else if (filename.endsWith(".dbf")) {
dbfFileArrayBuffer = await entry.async("arraybuffer");
}
}

if (shpFileArrayBuffer && dbfFileArrayBuffer) {
return processGeoJSON(await parseShapefiles(shpFileArrayBuffer, dbfFileArrayBuffer), range);
} else {
throw new Error(`Zip archive does not contain .shp and .dbf files`);
}
}

export const parseShapefiles = async (
shpFile: ArrayBuffer,
dbfFile: ArrayBuffer,
configuration?: Configuration,
): Promise<GeoJSON> => {
return new ShapefileParser(shpFile, dbfFile, configuration).parse();
};

interface Configuration {
trim?: boolean;
}

class ShapefileParser {
#shp: ArrayBuffer;
#dbf: ArrayBuffer;
#configuration?: Configuration;
#features: any[] = [];
#propertiesArray: any[] = [];

constructor(shp: ArrayBuffer, dbf: ArrayBuffer, configuration?: Configuration) {
this.#shp = shp;
this.#dbf = dbf;
this.#configuration = configuration;
}

#parseShp() {
const dataView = new DataView(this.#shp);
let idx = 0;
const wordLength = dataView.getInt32((idx += 6 * 4), false);
const byteLength = wordLength * 2;
idx += 4; //version
idx += 4; //shapeType
idx += 4; //minX, minY
idx += 8 * 8; //min(Y, Z, M),max(X, Y, Z, M)

const features: any[] = [];
while (idx < byteLength) {
const feature: any = {};
const length: number = dataView.getInt32((idx += 4), false);

const type: number = dataView.getInt32((idx += 4), true);
let idxFeature: number = idx + 4;
let numberOfParts: number, nbpoints: number, numberOfPoints: number, nbpartsPoint: number[];
switch (type) {
case 1:
case 11:
case 21:
feature.type = "Point";
feature.coordinates = [
dataView.getFloat64(idxFeature, true),
dataView.getFloat64(idxFeature + 8, true),
];
break;
case 3:
case 13:
case 23:
case 5:
case 15:
case 25:
if (type === 3 || type === 13 || type === 23) {
feature.type = "MultiLineString";
} else if (type === 5 || type === 15 || type === 25) {
feature.type = "Polygon";
}
numberOfParts = dataView.getInt32(idxFeature + 32, true);
nbpoints = dataView.getInt32(idxFeature + 36, true);
idxFeature += 40;
nbpartsPoint = new Array(numberOfParts).fill(0).map(() => {
const result = dataView.getInt32(idxFeature, true);
idxFeature += 4;
return result;
});

feature.coordinates = new Array(numberOfParts).fill(0).map((_, i) => {
const idstart = nbpartsPoint[i];
const idend = (i < numberOfParts - 1 ? nbpartsPoint[i + 1] : nbpoints) - 1;
const part = [];
for (let j = idstart; j <= idend; j++) {
part.push([
dataView.getFloat64(idxFeature, true),
dataView.getFloat64(idxFeature + 8, true),
]);
idxFeature += 16;
}
return part;
});
break;
case 8:
case 18:
case 28:
feature.type = "MultiPoint";
numberOfPoints = dataView.getInt32(idxFeature + 32, true);
idxFeature += 36;
feature.coordinates = new Array(numberOfPoints).fill(0).map(() => {
const result = [
dataView.getFloat64(idxFeature, true),
dataView.getFloat64(idxFeature + 8, true),
];
idxFeature += 16;
return result;
});
break;
}

idx += length * 2;
features.push(feature);
}
this.#features = features;
}

#parseDbf() {
const dataView = new DataView(new Uint8Array(this.#dbf).buffer);
let idx = 4;
const numberOfRecords: number = dataView.getInt32(idx, true);
idx += 28;
let end = false;
const fields = [];
while (!end) {
const field: any = {};
const nameArray: string[] = [];
for (let i = 0; i < 10; i++) {
const letter = dataView.getUint8(idx);
if (letter !== 0) {
nameArray.push(String.fromCharCode(letter));
}
idx += 1;
}
field.name = nameArray.join("");
idx += 1;
field.type = String.fromCharCode(dataView.getUint8(idx));
idx += 5;
field.fieldLength = dataView.getUint8(idx);
idx += 16;
fields.push(field);
if (dataView.getUint8(idx) === 0x0d) {
break;
}
}
idx += 1;
const propertiesArray = [];
for (let i = 0; i < numberOfRecords; i++) {
const properties: any = {};
if (!end) {
try {
idx += 1;
for (let j = 0; j < fields.length; j++) {
let str = "";
const charString = [];
for (let h = 0; h < fields[j].fieldLength; h++) {
charString.push(String.fromCharCode(dataView.getUint8(idx)));
idx += 1;
}
str = charString.join("");
// }
if (this.#configuration?.trim !== false) {
str = str.trim();
}
const number = parseFloat(str);
if (isNaN(number)) {
properties[fields[j].name] = str;
} else {
properties[fields[j].name] = number;
}
}
} catch (err) {
end = true;
}
}
propertiesArray.push(properties);
}
this.#propertiesArray = propertiesArray;
}

#geoJSON() {
const geojson: any = {
type: "FeatureCollection",
features: [],
};
for (let i = 0; i < Math.min(this.#features.length, this.#propertiesArray.length); i++) {
geojson.features.push({
type: "Feature",
geometry: this.#features[i],
properties: this.#propertiesArray[i],
});
}
return geojson;
}

parse(): GeoJSON {
this.#parseShp();
this.#parseDbf();

return this.#geoJSON();
}
}
3 changes: 2 additions & 1 deletion src/core/mantle/types/index.ts
Expand Up @@ -87,7 +87,8 @@ export type DataType =
| "wms"
| "mvt"
| "kml"
| "gpx";
| "gpx"
| "shapefile";

// Feature
export type CommonFeature<T extends "feature" | "computedFeature"> = {
Expand Down
19 changes: 18 additions & 1 deletion yarn.lock
Expand Up @@ -12909,6 +12909,16 @@ jsonwebtoken@^9.0.0:
array-includes "^3.1.5"
object.assign "^4.1.3"

jszip@3.10.1:
version "3.10.1"
resolved "https://registry.yarnpkg.com/jszip/-/jszip-3.10.1.tgz#34aee70eb18ea1faec2f589208a157d1feb091c2"
integrity sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==
dependencies:
lie "~3.3.0"
pako "~1.0.2"
readable-stream "~2.3.6"
setimmediate "^1.0.5"

junk@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/junk/-/junk-3.1.0.tgz#31499098d902b7e98c5d9b9c80f43457a88abfa1"
Expand Down Expand Up @@ -13038,6 +13048,13 @@ lie@3.1.1:
dependencies:
immediate "~3.0.5"

lie@~3.3.0:
version "3.3.0"
resolved "https://registry.yarnpkg.com/lie/-/lie-3.3.0.tgz#dcf82dee545f46074daf200c7c1c5a08e0f40f6a"
integrity sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==
dependencies:
immediate "~3.0.5"

lilconfig@2.0.6, lilconfig@^2.0.6:
version "2.0.6"
resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.6.tgz#32a384558bd58af3d4c6e077dd1ad1d397bc69d4"
Expand Down Expand Up @@ -14872,7 +14889,7 @@ pako@^2.0.4:
resolved "https://registry.yarnpkg.com/pako/-/pako-2.0.4.tgz#6cebc4bbb0b6c73b0d5b8d7e8476e2b2fbea576d"
integrity sha512-v8tweI900AUkZN6heMU/4Uy4cXRc2AYNRggVmTR+dEncawDJgCdLMximOVA2p4qO57WMynangsfGRb5WD6L1Bg==

pako@~1.0.5:
pako@~1.0.2, pako@~1.0.5:
version "1.0.11"
resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf"
integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==
Expand Down

0 comments on commit 5085022

Please sign in to comment.