From 4e8197d544876be5ce96738761dbc8953e009e99 Mon Sep 17 00:00:00 2001 From: dariaterekhovaae <98411986+dariaterekhovaae@users.noreply.github.com> Date: Tue, 17 May 2022 13:44:10 +0300 Subject: [PATCH] [Chore]: Technical: add types for processors (#1798) * add types for processors Signed-off-by: Daria Terekhova * add changes due to review Signed-off-by: Evgeny Zhgulev * add fixes after code review Signed-off-by: Evgeny Zhgulev Co-authored-by: Evgeny Zhgulev --- src/actions/vis-state-actions.ts | 2 +- src/components/geocoder-panel.tsx | 4 +- src/layers/base-layer.ts | 1 - src/processors/data-processor.d.ts | 56 ------ .../{data-processor.js => data-processor.ts} | 171 +++++++++++------- src/processors/file-handler.d.ts | 46 ----- .../{file-handler.js => file-handler.ts} | 76 +++++--- src/processors/index.d.ts | 2 - src/processors/{index.js => index.ts} | 25 +-- src/processors/types.ts | 22 +++ src/reducers/types.ts | 2 +- src/utils/utils.ts | 14 +- 12 files changed, 195 insertions(+), 226 deletions(-) delete mode 100644 src/processors/data-processor.d.ts rename src/processors/{data-processor.js => data-processor.ts} (80%) delete mode 100644 src/processors/file-handler.d.ts rename src/processors/{file-handler.js => file-handler.ts} (74%) delete mode 100644 src/processors/index.d.ts rename src/processors/{index.js => index.ts} (71%) create mode 100644 src/processors/types.ts diff --git a/src/actions/vis-state-actions.ts b/src/actions/vis-state-actions.ts index 62381bca1f..aad1904d18 100644 --- a/src/actions/vis-state-actions.ts +++ b/src/actions/vis-state-actions.ts @@ -21,7 +21,7 @@ // vis-state-reducer import ActionTypes from 'constants/action-types'; import {AddDataToMapPayload} from '../actions/actions'; -import {FileCacheItem} from '../processors/file-handler'; +import {FileCacheItem} from '../processors/types'; import {Layer, LayerBaseConfig, LayerVisConfig} from 'layers'; import {Feature, InteractionConfig} from 'reducers/vis-state-updaters'; import {ValueOf, Merge, RGBColor} from '../reducers/types'; diff --git a/src/components/geocoder-panel.tsx b/src/components/geocoder-panel.tsx index 3edb68db6c..6d1b8be701 100644 --- a/src/components/geocoder-panel.tsx +++ b/src/components/geocoder-panel.tsx @@ -20,7 +20,7 @@ import React, {Component, ComponentType} from 'react'; import styled from 'styled-components'; -import Processors from 'processors'; +import {processRowObject} from 'processors'; import {FlyToInterpolator} from '@deck.gl/core'; import KeplerGlSchema from 'schemas'; import {getCenterAndZoomFromBounds} from 'utils/projection-utils'; @@ -80,7 +80,7 @@ const StyledGeocoderPanel = styled.div` function generateGeocoderDataset(lat, lon, text) { return { - data: Processors.processRowObject([ + data: processRowObject([ { lt: lat, ln: lon, diff --git a/src/layers/base-layer.ts b/src/layers/base-layer.ts index d166e65c4e..90fafd135c 100644 --- a/src/layers/base-layer.ts +++ b/src/layers/base-layer.ts @@ -721,7 +721,6 @@ class Layer { const colorUIProp = Object.entries(newConfig).reduce((accu, [key, value]) => { return { ...accu, - // @ts-expect-error TODO: better type guard for isPlainObject [key]: isPlainObject(accu[key]) && isPlainObject(value) ? {...accu[key], ...value} : value }; }, previous[prop] || DEFAULT_COLOR_UI); diff --git a/src/processors/data-processor.d.ts b/src/processors/data-processor.d.ts deleted file mode 100644 index 46fd3a8240..0000000000 --- a/src/processors/data-processor.d.ts +++ /dev/null @@ -1,56 +0,0 @@ -import {ProtoDataset} from '../actions'; -import {Field} from 'reducers/vis-state-updaters'; -import {SavedMap, ParsedDataset} from 'schemas'; -import {DataContainerInterface} from 'utils/table-utils/data-container-interface'; - -type RowData = { - [key: string]: any; -}[]; -type ProcessorResult = {fields: Field[]; rows: any[][]} | null; -export function processGeojson(rawData: object): ProcessorResult; -export function processCsvData(rawData: string | any[][], header?: string[]): ProcessorResult; -export function processKeplerglJSON(rawData: SavedMap): ProcessorResult; -export function processRowObject(rawData: object[]): ProcessorResult; -export function processKeplerglDataset( - rawData: object | object[] -): ParsedDataset | ParsedDataset[] | null; - -export function validateInputData(data: any): ProcessorResult; - -export function getSampleForTypeAnalyze(p: { - fields: string[]; - rows: any[][]; - sampleCount?: number; -}): object[]; - -export function getFieldsFromData(data: RowData, fieldOrder: string[]): Field[]; - -export function parseCsvRowsByFieldType( - rows: any[][], - geoFieldIdx: number, - field: Field, - i: number -): void; - -export function formatCsv(data: DataContainerInterface, fields: Field[]): string; - -export function analyzerTypeToFieldType(aType: string): string; - -export const DATASET_HANDLERS: { - row: typeof processRowObject; - geojson: typeof processGeojson; - csv: typeof processCsvData; - keplergl: typeof processKeplerglDataset; -}; - -export const Processors: { - processGeojson: typeof processGeojson; - processCsvData: typeof processCsvData; - processRowObject: typeof processRowObject; - processKeplerglJSON: typeof processKeplerglJSON; - processKeplerglDataset: typeof processKeplerglDataset; - analyzerTypeToFieldType: typeof analyzerTypeToFieldType; - getFieldsFromData: typeof getFieldsFromData; - parseCsvRowsByFieldType: typeof parseCsvRowsByFieldType; - formatCsv: typeof formatCsv; -}; diff --git a/src/processors/data-processor.js b/src/processors/data-processor.ts similarity index 80% rename from src/processors/data-processor.js rename to src/processors/data-processor.ts index 2499ff4450..9a23dbd4c8 100644 --- a/src/processors/data-processor.js +++ b/src/processors/data-processor.ts @@ -26,9 +26,14 @@ import {Analyzer, DATA_TYPES as AnalyzerDATA_TYPES} from 'type-analyzer'; import normalize from '@mapbox/geojson-normalize'; import {ALL_FIELD_TYPES, DATASET_FORMATS} from 'constants/default-settings'; import {notNullorUndefined, parseFieldValue} from 'utils/data-utils'; -import KeplerGlSchema from 'schemas'; +import KeplerGlSchema, {SavedMap, ParsedDataset} from 'schemas'; +import {LoadedMap} from 'schemas/schema-manager'; import {GUIDES_FILE_FORMAT_DOC} from 'constants/user-guides'; -import {isPlainObject, toArray} from 'utils/utils'; +import {hasOwnProperty, isPlainObject, toArray} from 'utils/utils'; +import {Field} from 'utils/table-utils/kepler-table'; +import {DataContainerInterface} from 'utils/table-utils/data-container-interface'; +import {ProcessorResult, RowData} from './types'; +import {Feature} from '@nebula.gl/edit-modes'; export const ACCEPTED_ANALYZER_TYPES = [ AnalyzerDATA_TYPES.DATE, @@ -58,20 +63,23 @@ const IGNORE_DATA_TYPES = Object.keys(AnalyzerDATA_TYPES).filter( export const PARSE_FIELD_VALUE_FROM_STRING = { [ALL_FIELD_TYPES.boolean]: { - valid: d => typeof d === 'boolean', - parse: d => d === 'true' || d === 'True' || d === 'TRUE' || d === '1' + valid: (d: unknown): boolean => typeof d === 'boolean', + parse: (d: unknown): boolean => d === 'true' || d === 'True' || d === 'TRUE' || d === '1' }, [ALL_FIELD_TYPES.integer]: { - valid: d => parseInt(d, 10) === d, - parse: d => parseInt(d, 10) + // @ts-ignore + valid: (d: unknown): boolean => parseInt(d, 10) === d, + // @ts-ignore + parse: (d: unknown): number => parseInt(d, 10) }, [ALL_FIELD_TYPES.timestamp]: { - valid: (d, field) => + valid: (d: unknown, field: Field): boolean => ['x', 'X'].includes(field.format) ? typeof d === 'number' : typeof d === 'string', - parse: (d, field) => (['x', 'X'].includes(field.format) ? Number(d) : d) + parse: (d: any, field: Field) => (['x', 'X'].includes(field.format) ? Number(d) : d) }, [ALL_FIELD_TYPES.real]: { - valid: d => parseFloat(d) === d, + // @ts-ignore + valid: (d: unknown): boolean => parseFloat(d) === d, // Note this will result in NaN for some string parse: parseFloat } @@ -81,8 +89,7 @@ export const PARSE_FIELD_VALUE_FROM_STRING = { * Process csv data, output a data object with `{fields: [], rows: []}`. * The data object can be wrapped in a `dataset` and pass to [`addDataToMap`](../actions/actions.md#adddatatomap) * @param rawData raw csv string - * @returns data object `{fields: [], rows: []}` can be passed to addDataToMaps - * @type {typeof import('./data-processor').processCsvData} + * @returns data object `{fields: [], rows: []}` can be passed to addDataToMaps * @public * @example * import {processCsvData} from 'kepler.gl/processors'; @@ -103,12 +110,12 @@ export const PARSE_FIELD_VALUE_FROM_STRING = { * options: {centerMap: true, readOnly: true} * })); */ -export function processCsvData(rawData, header) { - let rows; - let headerRow; +export function processCsvData(rawData: unknown[][], header?: string[]): ProcessorResult { + let rows: unknown[][] | undefined; + let headerRow: string[] | undefined; if (typeof rawData === 'string') { - const parsedRows = csvParseRows(rawData); + const parsedRows: string[][] = csvParseRows(rawData); if (!Array.isArray(parsedRows) || parsedRows.length < 2) { // looks like an empty file, throw error to be catch @@ -123,6 +130,7 @@ export function processCsvData(rawData, header) { if (!Array.isArray(headerRow)) { // if data is passed in as array of rows and missing header // assume first row is header + // @ts-ignore headerRow = rawData[0]; rows = rawData.slice(1); } @@ -147,10 +155,10 @@ export function processCsvData(rawData, header) { /** * Parse rows of csv by analyzed field types. So that `'1'` -> `1`, `'True'` -> `true` - * @param {Array} rows - * @param {Array} fields + * @param rows + * @param fields */ -export function parseRowsByFields(rows, fields) { +export function parseRowsByFields(rows: any[][], fields: Field[]) { // Edit rows in place const geojsonFieldIdx = fields.findIndex(f => f.name === '_geojson'); fields.forEach(parseCsvRowsByFieldType.bind(null, rows, geojsonFieldIdx)); @@ -159,10 +167,16 @@ export function parseRowsByFields(rows, fields) { } /** * Getting sample data for analyzing field type. - * - * @type {typeof import('./data-processor').getSampleForTypeAnalyze} */ -export function getSampleForTypeAnalyze({fields, rows, sampleCount = 50}) { +export function getSampleForTypeAnalyze({ + fields, + rows, + sampleCount = 50 +}: { + fields: string[]; + rows: unknown[][]; + sampleCount?: number; +}): RowData { const total = Math.min(sampleCount, rows.length); // const fieldOrder = fields.map(f => f.name); const sample = range(0, total, 1).map(d => ({})); @@ -197,9 +211,9 @@ export function getSampleForTypeAnalyze({fields, rows, sampleCount = 50}) { * Convert falsy value in csv including `'', 'null', 'NULL', 'Null', 'NaN'` to `null`, * so that type-analyzer won't detect it as string * - * @param {Array} rows + * @param rows */ -function cleanUpFalsyCsvValue(rows) { +function cleanUpFalsyCsvValue(rows: unknown[][]): void { const re = new RegExp(CSV_NULLS, 'g'); for (let i = 0; i < rows.length; i++) { for (let j = 0; j < rows[i].length; j++) { @@ -207,7 +221,7 @@ function cleanUpFalsyCsvValue(rows) { // which will be parsed as '' by d3.csv // here we parse empty data as null // TODO: create warning when deltect `CSV_NULLS` in the data - if (typeof rows[i][j] === 'string' && rows[i][j].match(re)) { + if (typeof rows[i][j] === 'string' && (rows[i][j] as string).match(re)) { rows[i][j] = null; } } @@ -221,9 +235,13 @@ function cleanUpFalsyCsvValue(rows) { * @param geoFieldIdx field index * @param field * @param i - * @type {typeof import('./data-processor').parseCsvRowsByFieldType} */ -export function parseCsvRowsByFieldType(rows, geoFieldIdx, field, i) { +export function parseCsvRowsByFieldType( + rows: unknown[][], + geoFieldIdx: number, + field: Field, + i: number +): void { const parser = PARSE_FIELD_VALUE_FROM_STRING[field.type]; if (parser) { // check first not null value of it's already parsed @@ -235,7 +253,13 @@ export function parseCsvRowsByFieldType(rows, geoFieldIdx, field, i) { // parse string value based on field type if (row[i] !== null) { row[i] = parser.parse(row[i], field); - if (geoFieldIdx > -1 && row[geoFieldIdx] && row[geoFieldIdx].properties) { + if ( + geoFieldIdx > -1 && + isPlainObject(row[geoFieldIdx]) && + // @ts-ignore + hasOwnProperty(row[geoFieldIdx], 'properties') + ) { + // @ts-ignore row[geoFieldIdx].properties[field.name] = row[i]; } } @@ -250,7 +274,6 @@ export function parseCsvRowsByFieldType(rows, geoFieldIdx, field, i) { * @param data array of row object * @param fieldOrder array of field names as string * @returns formatted fields - * @type {typeof import('./data-processor').getFieldsFromData} * @public * @example * @@ -285,7 +308,7 @@ export function parseCsvRowsByFieldType(rows, geoFieldIdx, field, i) { * // {name: 'zeroOnes', format: '', fieldIdx: 7, type: 'integer'}]; * */ -export function getFieldsFromData(data, fieldOrder) { +export function getFieldsFromData(data: RowData, fieldOrder: string[]): Field[] { // add a check for epoch timestamp const metadata = Analyzer.computeColMeta( data, @@ -318,7 +341,6 @@ export function getFieldsFromData(data, fieldOrder) { }; }); - // @ts-ignore return result; } @@ -326,11 +348,13 @@ export function getFieldsFromData(data, fieldOrder) { * pass in an array of field names, rename duplicated one * and return a map from old field index to new name * - * @param {Array} fieldOrder - * @returns {Object} new field name by index + * @param fieldOrder + * @returns new field name by index */ -export function renameDuplicateFields(fieldOrder) { - return fieldOrder.reduce( +export function renameDuplicateFields( + fieldOrder: string[] +): {allNames: string[]; fieldByIndex: string[]} { + return fieldOrder.reduce<{allNames: string[]; fieldByIndex: string[]}>( (accu, field, i) => { const {allNames} = accu; let fieldName = field; @@ -349,7 +373,7 @@ export function renameDuplicateFields(fieldOrder) { return accu; }, - {allNames: [], fieldByIndex: {}} + {allNames: [], fieldByIndex: []} ); } @@ -358,10 +382,9 @@ export function renameDuplicateFields(fieldOrder) { * * @param aType * @returns corresponding type in `ALL_FIELD_TYPES` - * @type {typeof import('./data-processor').analyzerTypeToFieldType}} */ /* eslint-disable complexity */ -export function analyzerTypeToFieldType(aType) { +export function analyzerTypeToFieldType(aType: string): string { const { DATE, TIME, @@ -416,7 +439,6 @@ export function analyzerTypeToFieldType(aType) { * NOTE: This function may mutate input. * @param rawData an array of row object, each object should have the same number of keys * @returns dataset containing `fields` and `rows` - * @type {typeof import('./data-processor').processRowObject} * @public * @example * import {addDataToMap} from 'kepler.gl/actions'; @@ -434,7 +456,7 @@ export function analyzerTypeToFieldType(aType) { * } * })); */ -export function processRowObject(rawData) { +export function processRowObject(rawData: unknown[]): ProcessorResult { if (!Array.isArray(rawData)) { return null; } else if (!rawData.length) { @@ -445,8 +467,8 @@ export function processRowObject(rawData) { }; } - const keys = Object.keys(rawData[0]); - const rows = rawData.map(d => keys.map(key => d[key])); + const keys = Object.keys(rawData[0]); // [lat, lng, value] + const rows = rawData.map(d => keys.map(key => d[key])); // [[31.27, 127.56, 3]] // row object an still contain values like `Null` or `N/A` cleanUpFalsyCsvValue(rows); @@ -460,9 +482,8 @@ export function processRowObject(rawData) { * The data object can be wrapped in a `dataset` and passed to [`addDataToMap`](../actions/actions.md#adddatatomap) * NOTE: This function may mutate input. * - * @param rawData raw geojson feature collection - * @returns dataset containing `fields` and `rows` - * @type {typeof import('./data-processor').processGeojson} + * @param rawData raw geojson feature collection + * @returns dataset containing `fields` and `rows` * @public * @example * import {addDataToMap} from 'kepler.gl/actions'; @@ -493,7 +514,7 @@ export function processRowObject(rawData) { * } * })); */ -export function processGeojson(rawData) { +export function processGeojson(rawData: unknown): ProcessorResult { const normalizedGeojson = normalize(rawData); if (!normalizedGeojson || !Array.isArray(normalizedGeojson.features)) { @@ -505,7 +526,7 @@ export function processGeojson(rawData) { } // getting all feature fields - const allDataRows = []; + const allDataRows: Array<{_geojson: Feature} & keyof Feature> = []; for (let i = 0; i < normalizedGeojson.features.length; i++) { const f = normalizedGeojson.features[i]; if (f.geometry) { @@ -517,13 +538,13 @@ export function processGeojson(rawData) { } } // get all the field - const fields = allDataRows.reduce((prev, curr) => { + const fields = allDataRows.reduce((accu, curr) => { Object.keys(curr).forEach(key => { - if (!prev.includes(key)) { - prev.push(key); + if (!accu.includes(key)) { + accu.push(key); } }); - return prev; + return accu; }, []); // make sure each feature has exact same fields @@ -541,16 +562,16 @@ export function processGeojson(rawData) { /** * On export data to csv - * @param {import('utils/table-utils/data-container-interface').DataContainerInterface} dataContainer - * @param {Array} fields `dataset.fields` - * @returns {string} csv string + * @param dataContainer + * @param fields `dataset.fields` + * @returns csv string */ -export function formatCsv(dataContainer, fields) { +export function formatCsv(data: DataContainerInterface, fields: Field[]): string { const columns = fields.map(f => f.displayName || f.name); const formattedData = [columns]; // parse geojson object as string - for (const row of dataContainer.rows(true)) { + for (const row of data.rows(true)) { formattedData.push(row.map((d, i) => parseFieldValue(d, fields[i].type))); } @@ -559,9 +580,8 @@ export function formatCsv(dataContainer, fields) { /** * Validate input data, adding missing field types, rename duplicate columns - * @type {typeof import('./data-processor').validateInputData} */ -export function validateInputData(data) { +export function validateInputData(data: Record): ProcessorResult { if (!isPlainObject(data)) { assert('addDataToMap Error: dataset.data cannot be null'); return null; @@ -630,11 +650,11 @@ export function validateInputData(data) { return {fields: updatedFields, rows}; } -function findNonEmptyRowsAtField(rows, fieldIdx, total) { - const sample = []; +function findNonEmptyRowsAtField(rows: unknown[][], fieldIdx: number, total: number): any[] { + const sample: any[] = []; let i = 0; while (sample.length < total && i < rows.length) { - if (notNullorUndefined(rows[i][fieldIdx])) { + if (notNullorUndefined(rows[i]?.[fieldIdx])) { sample.push(rows[i]); } i++; @@ -645,10 +665,8 @@ function findNonEmptyRowsAtField(rows, fieldIdx, total) { /** * Process saved kepler.gl json to be pass to [`addDataToMap`](../actions/actions.md#adddatatomap). * The json object should contain `datasets` and `config`. - * @param {Object} rawData - * @param {Array} rawData.datasets - * @param {Object} rawData.config - * @returns {Object} datasets and config `{datasets: {}, config: {}}` + * @param rawData + * @returns datasets and config `{datasets: {}, config: {}}` * @public * @example * import {addDataToMap} from 'kepler.gl/actions'; @@ -656,15 +674,15 @@ function findNonEmptyRowsAtField(rows, fieldIdx, total) { * * dispatch(addDataToMap(processKeplerglJSON(keplerGlJson))); */ -export function processKeplerglJSON(rawData) { +export function processKeplerglJSON(rawData: SavedMap): LoadedMap | null { return rawData ? KeplerGlSchema.load(rawData.datasets, rawData.config) : null; } /** * Parse a single or an array of datasets saved using kepler.gl schema - * @param {Array | Array} rawData + * @param rawData */ -export function processKeplerglDataset(rawData) { +export function processKeplerglDataset(rawData: unknown): ParsedDataset | ParsedDataset[] | null { if (!rawData) { return null; } @@ -676,14 +694,29 @@ export function processKeplerglDataset(rawData) { return Array.isArray(rawData) ? results : results[0]; } -export const DATASET_HANDLERS = { +export const DATASET_HANDLERS: { + row: typeof processRowObject; + geojson: typeof processGeojson; + csv: typeof processCsvData; + keplergl: typeof processKeplerglDataset; +} = { [DATASET_FORMATS.row]: processRowObject, [DATASET_FORMATS.geojson]: processGeojson, [DATASET_FORMATS.csv]: processCsvData, [DATASET_FORMATS.keplergl]: processKeplerglDataset }; -export const Processors = { +export const Processors: { + processGeojson: typeof processGeojson; + processCsvData: typeof processCsvData; + processRowObject: typeof processRowObject; + processKeplerglJSON: typeof processKeplerglJSON; + processKeplerglDataset: typeof processKeplerglDataset; + analyzerTypeToFieldType: typeof analyzerTypeToFieldType; + getFieldsFromData: typeof getFieldsFromData; + parseCsvRowsByFieldType: typeof parseCsvRowsByFieldType; + formatCsv: typeof formatCsv; +} = { processGeojson, processCsvData, processRowObject, diff --git a/src/processors/file-handler.d.ts b/src/processors/file-handler.d.ts deleted file mode 100644 index 751aade45b..0000000000 --- a/src/processors/file-handler.d.ts +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) 2020 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -import {AddDataToMapPayload} from 'actions/actions'; - -export type FileCacheItem = { - data: any; - info: { - label: string; - format: string; - }; -}; - -export function readFileInBatches(payload: { - file: File; - fileCache: FileCacheItem[]; - loaders: LoaderObject; - loadOptions: object; -}): AsyncGenerator; - -export function processFileData(payload: { - content: any; - fileCache: FileCacheItem[]; -}): Promise; - -export function filesToDataPayload(fileCache: FileCacheItem[]): AddDataToMapPayload[]; - -export function readBatch(asyncIterator, fileName): AsyncIterable; -export function isKeplerGlMap(obj: any): Boolean; diff --git a/src/processors/file-handler.js b/src/processors/file-handler.ts similarity index 74% rename from src/processors/file-handler.js rename to src/processors/file-handler.ts index 1a0e1c745f..009058cbec 100644 --- a/src/processors/file-handler.js +++ b/src/processors/file-handler.ts @@ -25,6 +25,11 @@ import {CSVLoader} from '@loaders.gl/csv'; import {processGeojson, processKeplerglJSON, processRowObject} from './data-processor'; import {isPlainObject, generateHashId} from 'utils/utils'; import {DATASET_FORMATS} from 'constants/default-settings'; +import {LoaderObject} from '@loaders.gl/loader-utils'; +import {AddDataToMapPayload} from 'actions/actions'; +import {FileCacheItem, ValidKeplerGlMap} from './types'; +import {Feature} from 'reducers'; +import {FeatureCollection} from '@turf/helpers'; const BATCH_TYPE = { METADATA: 'metadata', @@ -47,35 +52,39 @@ const JSON_LOADER_OPTIONS = { ] }; -export function isGeoJson(json) { +export function isGeoJson(json: unknown): json is Feature | FeatureCollection { // json can be feature collection // or single feature return isPlainObject(json) && (isFeature(json) || isFeatureCollection(json)); } -export function isFeature(json) { - return json.type === 'Feature' && json.geometry; +export function isFeature(json: unknown): json is Feature { + return isPlainObject(json) && json.type === 'Feature' && !!json.geometry; } -export function isFeatureCollection(json) { - return json.type === 'FeatureCollection' && json.features; +export function isFeatureCollection(json: unknown): json is FeatureCollection { + return isPlainObject(json) && json.type === 'FeatureCollection' && !!json.features; } -export function isRowObject(json) { +export function isRowObject(json: any): boolean { return Array.isArray(json) && isPlainObject(json[0]); } -export function isKeplerGlMap(json) { +export function isKeplerGlMap(json: unknown): json is ValidKeplerGlMap { return Boolean( isPlainObject(json) && json.datasets && json.config && json.info && + isPlainObject(json.info) && json.info.app === 'kepler.gl' ); } -export async function* makeProgressIterator(asyncIterator, info) { +export async function* makeProgressIterator( + asyncIterator: AsyncIterable, + info: {size: number} +): AsyncGenerator { let rowCount = 0; for await (const batch of asyncIterator) { @@ -87,7 +96,6 @@ export async function* makeProgressIterator(asyncIterator, info) { const progress = { rowCount, rowCountInBatch, - // @ts-ignore ...(Number.isFinite(percent) ? {percent} : {}) }; @@ -96,9 +104,12 @@ export async function* makeProgressIterator(asyncIterator, info) { } // eslint-disable-next-line complexity -export async function* readBatch(asyncIterator, fileName) { +export async function* readBatch( + asyncIterator: AsyncIterable, + fileName: string +): AsyncGenerator { let result = null; - const batches = []; + const batches = []; for await (const batch of asyncIterator) { // Last batch will have this special type and will provide all the root @@ -134,7 +145,16 @@ export async function* readBatch(asyncIterator, fileName) { } } -export async function readFileInBatches({file, fileCache = [], loaders = [], loadOptions = {}}) { +export async function readFileInBatches({ + file, + loaders = [], + loadOptions = {} +}: { + file: File; + fileCache: FileCacheItem[]; + loaders: LoaderObject[]; + loadOptions: any; +}): Promise { loaders = [JSONLoader, CSVLoader, ...loaders]; loadOptions = { csv: CSV_LOADER_OPTIONS, @@ -149,12 +169,18 @@ export async function readFileInBatches({file, fileCache = [], loaders = [], loa return readBatch(progressIterator, file.name); } -export function processFileData({content, fileCache}) { +export function processFileData({ + content, + fileCache +}: { + content: {data: unknown; fileName: string}; + fileCache: FileCacheItem[]; +}): Promise { return new Promise((resolve, reject) => { const {data} = content; - let format; - let processor; + let format: string | undefined; + let processor: Function | undefined; if (isKeplerGlMap(data)) { format = DATASET_FORMATS.keplergl; processor = processKeplerglJSON; @@ -181,17 +207,19 @@ export function processFileData({content, fileCache}) { ]); } - reject('Unknow File Format'); + reject('Unknown File Format'); }); } -export function filesToDataPayload(fileCache) { +export function filesToDataPayload(fileCache: FileCacheItem[]): AddDataToMapPayload[] { // seperate out files which could be a single datasets. or a keplergl map json - const collection = fileCache.reduce( + const collection = fileCache.reduce<{ + datasets: FileCacheItem[]; + keplerMaps: AddDataToMapPayload[]; + }>( (accu, file) => { - const {data, info = {}} = file; - const {format} = info; - if (format === DATASET_FORMATS.keplergl) { + const {data, info} = file; + if (info?.format === DATASET_FORMATS.keplergl) { // if file contains a single kepler map dataset & config accu.keplerMaps.push({ ...data, @@ -199,13 +227,13 @@ export function filesToDataPayload(fileCache) { centerMap: !(data.config && data.config.mapState) } }); - } else if (DATASET_FORMATS[format]) { + } else if (DATASET_FORMATS[info?.format]) { // if file contains only data const newDataset = { data, info: { - id: info.id || generateHashId(4), - ...info + id: info?.id || generateHashId(4), + ...(info || {}) } }; accu.datasets.push(newDataset); diff --git a/src/processors/index.d.ts b/src/processors/index.d.ts deleted file mode 100644 index cf01a97de1..0000000000 --- a/src/processors/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from './data-processor'; -export * from './file-handler'; diff --git a/src/processors/index.js b/src/processors/index.ts similarity index 71% rename from src/processors/index.js rename to src/processors/index.ts index 21ad4ff8b5..00377e2f66 100644 --- a/src/processors/index.js +++ b/src/processors/index.ts @@ -18,26 +18,5 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. -// Data Processor -export { - formatCsv, - processGeojson, - processCsvData, - processRowObject, - processKeplerglJSON, - processKeplerglDataset, - analyzerTypeToFieldType, - getFieldsFromData, - parseCsvRowsByFieldType -} from './data-processor'; - -// File Handlers -export { - readFileInBatches, - processFileData, - filesToDataPayload, - isKeplerGlMap, - readBatch -} from './file-handler'; - -export {Processors as default} from './data-processor'; +export * from './data-processor'; +export * from './file-handler'; diff --git a/src/processors/types.ts b/src/processors/types.ts new file mode 100644 index 0000000000..84e52a37b5 --- /dev/null +++ b/src/processors/types.ts @@ -0,0 +1,22 @@ +import {Field} from 'utils/table-utils/kepler-table'; + +export type FileCacheItem = { + data: any; + info: { + id?: string; + label: string; + format: string; + }; +}; + +export type RowData = { + [key: string]: string | null; +}[]; + +export type ProcessorResult = {fields: Field[]; rows: any[][]} | null; + +export type ValidKeplerGlMap = { + datasets: unknown; + config: unknown; + info: Record; +}; diff --git a/src/reducers/types.ts b/src/reducers/types.ts index 046e831e4d..b0aca90944 100644 --- a/src/reducers/types.ts +++ b/src/reducers/types.ts @@ -1,6 +1,6 @@ export type RGBColor = [number, number, number]; export type RGBAColor = [number, number, number, number]; -export type HexColor = string; // this is the best tpescript can do at the moment +export type HexColor = string; // this is the best typescript can do at the moment export type Millisecond = number; export type ValueOf = T[keyof T]; diff --git a/src/utils/utils.ts b/src/utils/utils.ts index fadabc5abe..a4e7709773 100644 --- a/src/utils/utils.ts +++ b/src/utils/utils.ts @@ -44,10 +44,22 @@ export function isChrome() { * whether is an object * @returns {boolean} - yes or no */ -export function isPlainObject(obj) { +export function isPlainObject(obj: unknown): obj is Record { return obj === Object(obj) && typeof obj !== 'function' && !Array.isArray(obj); } +/** + * whether object has property + * @param {string} prop + * @returns {boolean} - yes or no + */ +export function hasOwnProperty( + obj: X, + prop: Y +): obj is X & Record { + return obj.hasOwnProperty(prop); +} + /** * Capitalize first letter of a string * @param {string} str