diff --git a/README.md b/README.md index c4a6ba6..e4d0272 100644 --- a/README.md +++ b/README.md @@ -28,19 +28,25 @@ See files in the `./test` directory. ## TODO/Ideas +- [ ] Import: Preview Table: Use highlight.js, mongodb-ace-mode, or something so the text style of the value within a cell matches its destination type +- [ ] Export: Use electron add to destination file to [recent documents](https://electronjs.org/docs/tutorial/recent-documents) +- [ ] Import and Export: Show system notification when operation completes. like dropbox screenshot message. toast "XX/XX documents successfully" +- [ ] Import and Export: New Option: If you need to [specify extended-json legacy spec](https://github.com/mongodb/js-bson/pull/339) - [ ] Refactor src/modules/ so import and export reuse a common base -- [ ] Import and Export: Show system notification when operation completes. like dropbox screenshot message. -- [ ] Import csv: dynamicTyping of values like papaparse -- [ ] Import csv: mapHeaders option to support existing .() caster like [mongoimport does today][mongoimport] +- [ ] Import: bson-csv: support existing .() caster like [mongoimport does today][mongoimport] - [ ] Import: expose finer-grained bulk op results in progress -- [ ] Import: define import mode: insert, upsert, merge -- [ ] Import: option to specify a different path for `_id` such as `business_id` in the yelp dataset -- [ ] Import: Paste URL to fetch from -- [ ] Import: multi file import via archive (supports gzip/zip/bzip2/etc.) -- [ ] Import: option for path to pass to JSONStream for nested docs (e.g. `results` array when fetching JSON from a rest api) -- [ ] Import: Option to drop target collection before import -- [ ] Import: Drop file target in modal -- [ ] Export: use electron add to destination file to [recent documents](https://electronjs.org/docs/tutorial/recent-documents) +- [ ] Import: New Option: drop target collection before import +- [ ] Import: New Option: define import mode: insert, upsert, merge +- [ ] Import: New Option: specify a different path for `_id` such as `business_id` in the yelp dataset +- [ ] Import: Option for path to pass to JSONStream for nested docs (e.g. `results` array when fetching JSON from a rest api) +- [ ] Import: New Option: Paste URL to fetch from +- [ ] Import: Preview Table: use `react-table` and [`react-window`](https://www.npmjs.com/package/react-window-infinite-loader) for fixed headers and more # of documents to preview +- [ ] Import: Preview Table: Allow transpose on fields/values so all type selection and projection is in a single left aligned list +- [ ] Import: Multi file import via archive (supports gzip/zip/bzip2/etc.) +- [ ] Import: Use schema parser or something later to handle complete tabular renderings of sparse/polymorphic +- [ ] Import: Improve import-size-guesstimator +- [ ] Import: guess delimiter in `src/utils/detect-import-file.js` +- [ ] Import and Export: Extract anything from `./src/utils` that could live as standalone modules so other things like say a cli or a different platform could reuse compass' import/export business logic and perf. ## License diff --git a/config/webpack.dev.config.js b/config/webpack.dev.config.js index 2487f5e..baf2e2e 100644 --- a/config/webpack.dev.config.js +++ b/config/webpack.dev.config.js @@ -11,6 +11,9 @@ const config = { mode: 'development', target: 'electron-renderer', devtool: 'eval-source-map', + optimization: { + minimize: false + }, entry: { index: [ // activate HMR for React diff --git a/config/webpack.storybook.config.js b/config/webpack.storybook.config.js index fdbb008..79fc56d 100644 --- a/config/webpack.storybook.config.js +++ b/config/webpack.storybook.config.js @@ -64,6 +64,9 @@ const config = { process: false, Buffer: false }, + optimization: { + minimize: false + }, devtool: 'eval-source-map', entry: { index: path.resolve(project.path.src, 'index.js') diff --git a/electron/renderer/index.js b/electron/renderer/index.js index 608227d..96da467 100644 --- a/electron/renderer/index.js +++ b/electron/renderer/index.js @@ -21,12 +21,15 @@ import 'less/global.less'; /** * Customize data service for your sandbox. */ -const NS = 'test.people_imported'; +const NS = 'lucas_apple_health_data.sleep'; import Connection from 'mongodb-connection-model'; const connection = new Connection({ hostname: '127.0.0.1', - port: 27017 + port: 27017, + options: { + explicitlyIgnoreSession: true + } }); /** @@ -51,8 +54,8 @@ var QUERY_BAR = { console.group('Compass Plugin Sandbox'); console.log('db.collection', NS); -console.log('connect', connection.driver_url, { - options: connection.driver_options +console.log('connect', connection.driverUrl, { + options: connection.driverOptions }); console.groupEnd(); @@ -82,7 +85,7 @@ const store = configureStore({ }); // Create a HMR enabled render function -const render = Component => { +const render = (Component) => { ReactDOM.render( diff --git a/examples/import-options.stories.js b/examples/import-options.stories.js new file mode 100644 index 0000000..c193f9f --- /dev/null +++ b/examples/import-options.stories.js @@ -0,0 +1,49 @@ +const DEFAULT_PROPS = { + delimiter: ',', + setDelimiter: () => console.log('setDelimiter:'), + fileType: '', + selectImportFileType: () => console.log('selectImportFileType:'), + fileName: '', + selectImportFileName: () => console.log('selectImportFileName:'), + stopOnErrors: false, + setStopOnErrors: () => console.log('setStopOnErrors:'), + ignoreBlanks: true, + setIgnoreBlanks: () => console.log('setIgnoreBlanks:'), + fileOpenDialog: () => console.log('fileOpenDialog:') +}; + +/* eslint-disable no-alert */ +import React from 'react'; +import { storiesOf } from '@storybook/react'; +import ImportOptions from 'components/import-options'; + +storiesOf('Examples/ImportOptions', module) + .add('csv', () => { + const props = { + ...DEFAULT_PROPS, + fileType: 'csv', + fileName: '~/my-csv-data.csv' + }; + return ; + }) + .add('tsv', () => { + const props = { + ...DEFAULT_PROPS, + fileType: 'csv', + fileName: '~/my-tsv-data.tsv', + delimiter: '\\t' + }; + return ; + }) + .add('json', () => { + const props = { + ...DEFAULT_PROPS, + fileType: 'json', + fileName: '~/compass-github-api-releases.json' + }; + + return ; + }) + .add('default', () => { + return ; + }); diff --git a/examples/import-preview.stories.js b/examples/import-preview.stories.js new file mode 100644 index 0000000..0e08e8e --- /dev/null +++ b/examples/import-preview.stories.js @@ -0,0 +1,71 @@ +/* eslint-disable no-alert */ +import React from 'react'; +import { storiesOf } from '@storybook/react'; +import ImportPreview from 'components/import-preview'; + +// const docs = [ +// { +// _id: 'arlo', +// name: 'Arlo', +// stats: { +// age: 5, +// fluffiness: '' +// } +// }, +// { +// _id: 'basilbazel', +// name: 'Basil', +// stats: { +// age: 8, +// fluffiness: '100' +// } +// }, +// { +// _id: 'hellbeast', +// name: 'Kochka', +// stats: { +// age: '14', +// fluffiness: 50 +// } +// } +// ]; + +storiesOf('Examples/ImportPreview', module).add('simple', () => { + return ( + { + console.log('onFieldCheckedChanged: %s is now', path, checked); + }} + setFieldType={(path, bsonType) => { + console.log('setFieldType: %s to %s', path, bsonType); + }} + fields={[ + { + path: '_id', + type: 'string', + checked: true + }, + { + path: 'name', + type: 'string', + checked: true + }, + { + path: 'stats.age', + type: 'string', + checked: true + }, + { + path: 'stats.fluffiness', + type: 'string', + checked: false + } + ]} + values={[ + ['arlo', 'Arlo', '5', ''], + ['basilbazel', 'Basil', '8', '100'], + ['hellbeast', 'Kochka', '14', '50'] + ]} + /> + ); +}); diff --git a/examples/select-field-type.stories.js b/examples/select-field-type.stories.js new file mode 100644 index 0000000..a3115db --- /dev/null +++ b/examples/select-field-type.stories.js @@ -0,0 +1,21 @@ +/* eslint-disable no-alert */ +import React from 'react'; +import { storiesOf } from '@storybook/react'; +import SelectFieldType from 'components/select-field-type'; + +storiesOf('Examples/SelectFieldType', module) + .add('default', () => { + return ( + window.alert(`Selected type changed ${t}`)} + /> + ); + }) + .add('number selected', () => { + return ( + window.alert(`Selected type changed ${t}`)} + /> + ); + }); diff --git a/package-lock.json b/package-lock.json index 3190e77..bed2b10 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16192,11 +16192,15 @@ "integrity": "sha1-PI+41bW/S/kK4G4U8qUwpO2TXh0=", "dev": true }, + "lodash.isobjectlike": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/lodash.isobjectlike/-/lodash.isobjectlike-4.0.0.tgz", + "integrity": "sha1-dCxfxlrdJ5JNPSQZFoGqmheytg0=" + }, "lodash.isplainobject": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", - "integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=", - "dev": true + "integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=" }, "lodash.isstring": { "version": "4.0.1", diff --git a/package.json b/package.json index 3a546d6..634d461 100644 --- a/package.json +++ b/package.json @@ -154,6 +154,8 @@ "fast-csv": "^3.4.0", "flat": "^4.1.0", "javascript-stringify": "^1.6.0", + "lodash.isobjectlike": "^4.0.0", + "lodash.isplainobject": "^4.0.6", "lodash.throttle": "^4.1.1", "marky": "^1.2.1", "mime-types": "^2.1.24", diff --git a/src/components/error-box/error-box.jsx b/src/components/error-box/error-box.jsx index 30b3b6c..7eb15da 100644 --- a/src/components/error-box/error-box.jsx +++ b/src/components/error-box/error-box.jsx @@ -7,7 +7,10 @@ import styles from './error-box.less'; import createStyler from 'utils/styler.js'; const style = createStyler(styles, 'error-box'); -// TODO: lucas: Sync hex values against .less +/** + * TODO: lucas: Sync hex values against palatte + * once we start actually produce ansi. + */ const ANSI_TO_HTML_OPTIONS = { fg: '#FFF', bg: '#000', diff --git a/src/components/import-modal/import-modal.jsx b/src/components/import-modal/import-modal.jsx index 17a3227..a070440 100644 --- a/src/components/import-modal/import-modal.jsx +++ b/src/components/import-modal/import-modal.jsx @@ -1,25 +1,22 @@ import React, { PureComponent } from 'react'; import { connect } from 'react-redux'; import PropTypes from 'prop-types'; -import { - Modal, - FormGroup, - InputGroup, - FormControl, - ControlLabel -} from 'react-bootstrap'; -import { TextButton, IconTextButton } from 'hadron-react-buttons'; +import { Modal } from 'react-bootstrap'; +import { TextButton } from 'hadron-react-buttons'; import fileOpenDialog from 'utils/file-open-dialog'; import { FINISHED_STATUSES, STARTED, COMPLETED, - CANCELED + CANCELED, + FAILED, + UNSPECIFIED } from 'constants/process-status'; -import FILE_TYPES from 'constants/file-types'; + import ProgressBar from 'components/progress-bar'; import ErrorBox from 'components/error-box'; -import SelectFileType from 'components/select-file-type'; +import ImportPreview from 'components/import-preview'; +import ImportOptions from 'components/import-options'; import { startImport, @@ -28,65 +25,65 @@ import { selectImportFileName, setDelimiter, setStopOnErrors, - setIgnoreEmptyFields, - closeImport + setIgnoreBlanks, + closeImport, + toggleIncludeField, + setFieldType } from 'modules/import'; -import styles from './import-modal.less'; -import createStyler from 'utils/styler.js'; -const style = createStyler(styles, 'import-modal'); +/** + * Progress messages. + */ +const MESSAGES = { + [STARTED]: 'Importing documents...', + [CANCELED]: 'Import canceled', + [COMPLETED]: 'Import completed', + [FAILED]: 'Error importing', + [UNSPECIFIED]: '' +}; class ImportModal extends PureComponent { static propTypes = { open: PropTypes.bool, ns: PropTypes.string.isRequired, - progress: PropTypes.number.isRequired, - status: PropTypes.string.isRequired, - error: PropTypes.object, startImport: PropTypes.func.isRequired, cancelImport: PropTypes.func.isRequired, closeImport: PropTypes.func.isRequired, + + /** + * Shared + */ + error: PropTypes.object, + status: PropTypes.string.isRequired, + + /** + * See `` + */ selectImportFileType: PropTypes.func.isRequired, selectImportFileName: PropTypes.func.isRequired, setDelimiter: PropTypes.func.isRequired, delimiter: PropTypes.string, fileType: PropTypes.string, fileName: PropTypes.string, - docsWritten: PropTypes.number, stopOnErrors: PropTypes.bool, setStopOnErrors: PropTypes.func, - ignoreEmptyFields: PropTypes.bool, - setIgnoreEmptyFields: PropTypes.func, - guesstimatedDocsTotal: PropTypes.number - }; + ignoreBlanks: PropTypes.bool, + setIgnoreBlanks: PropTypes.func, - getStatusMessage() { - const status = this.props.status; - if (this.props.error) { - return 'Error importing'; - } - if (status === STARTED) { - return 'Importing documents...'; - } - if (status === CANCELED) { - return 'Import canceled'; - } - if (status === COMPLETED) { - return 'Import completed'; - } - - return 'UNKNOWN'; - } + /** + * See `` + */ + progress: PropTypes.number.isRequired, + docsWritten: PropTypes.number, + guesstimatedDocsTotal: PropTypes.number, - /** - * Handle choosing a file from the file dialog. - */ - // eslint-disable-next-line react/sort-comp - handleChooseFile = () => { - const file = fileOpenDialog(); - if (file) { - this.props.selectImportFileName(file[0]); - } + /** + * See `` + */ + fields: PropTypes.array, + values: PropTypes.array, + toggleIncludeField: PropTypes.func.isRequired, + setFieldType: PropTypes.func.isRequired }; /** @@ -111,24 +108,31 @@ class ImportModal extends PureComponent { this.props.startImport(); }; - handleOnSubmit = evt => { - evt.preventDefault(); - evt.stopPropagation(); - if (this.props.fileName) { - this.props.startImport(); - } - }; + // TODO: lucas: Make COMPLETED, FINISHED_STATUSES + // have better names. + // COMPLETED = Done and Successful + // FINISHED_STATUSES = Done and maybe success|error|canceled + // @irina: "maybe call it IMPORT_STATUS ? since technically a cancelled status means it's not finished" + + /** + * Has the import completed successfully? + * @returns {Boolean} + */ + wasImportSuccessful() { + return this.props.status === COMPLETED; + } renderDoneButton() { - if (this.props.status === COMPLETED) { - return ( - - ); + if (!this.wasImportSuccessful()) { + return null; } + return ( + + ); } renderCancelButton() { @@ -146,69 +150,16 @@ class ImportModal extends PureComponent { } renderImportButton() { - if (this.props.status !== COMPLETED) { - return ( - - ); + if (this.wasImportSuccessful()) { + return null; } - } - - renderOptions() { - const isCSV = this.props.fileType === FILE_TYPES.CSV; return ( -
- Options - {isCSV && ( -
- - -
- )} -
- { - this.props.setIgnoreEmptyFields(!this.props.ignoreEmptyFields); - }} - className={style('option-checkbox')} - /> - -
-
- { - this.props.setStopOnErrors(!this.props.stopOnErrors); - }} - className={style('option-checkbox')} - /> - -
-
+ ); } @@ -224,30 +175,30 @@ class ImportModal extends PureComponent { Import To Collection {this.props.ns} -
- - Select File - - - - - - - {this.renderOptions()} - + + + ({ +const mapStateToProps = (state) => ({ ns: state.ns, progress: state.importData.progress, open: state.importData.isOpen, @@ -283,7 +235,10 @@ const mapStateToProps = state => ({ guesstimatedDocsTotal: state.importData.guesstimatedDocsTotal, delimiter: state.importData.delimiter, stopOnErrors: state.importData.stopOnErrors, - ignoreEmptyFields: state.importData.ignoreEmptyFields + ignoreBlanks: state.importData.ignoreBlanks, + fields: state.importData.fields, + values: state.importData.values, + previewLoaded: state.importData.previewLoaded }); /** @@ -298,7 +253,9 @@ export default connect( selectImportFileName, setDelimiter, setStopOnErrors, - setIgnoreEmptyFields, - closeImport + setIgnoreBlanks, + closeImport, + toggleIncludeField, + setFieldType } )(ImportModal); diff --git a/src/components/import-options/import-options.jsx b/src/components/import-options/import-options.jsx new file mode 100644 index 0000000..44aa694 --- /dev/null +++ b/src/components/import-options/import-options.jsx @@ -0,0 +1,126 @@ +import React, { PureComponent } from 'react'; +import PropTypes from 'prop-types'; +import { + FormGroup, + InputGroup, + FormControl, + ControlLabel +} from 'react-bootstrap'; +import { IconTextButton } from 'hadron-react-buttons'; + +import FILE_TYPES from 'constants/file-types'; +import SelectFileType from 'components/select-file-type'; + +import styles from './import-options.less'; +import createStyler from 'utils/styler.js'; +const style = createStyler(styles, 'import-options'); + +class ImportOptions extends PureComponent { + static propTypes = { + delimiter: PropTypes.string, + setDelimiter: PropTypes.func.isRequired, + fileType: PropTypes.string, + selectImportFileType: PropTypes.func.isRequired, + fileName: PropTypes.string, + selectImportFileName: PropTypes.func.isRequired, + stopOnErrors: PropTypes.bool, + setStopOnErrors: PropTypes.func, + ignoreBlanks: PropTypes.bool, + setIgnoreBlanks: PropTypes.func, + fileOpenDialog: PropTypes.func + }; + + /** + * Handle choosing a file from the file dialog. + */ + handleChooseFile = () => { + const file = this.props.fileOpenDialog(); + if (file) { + this.props.selectImportFileName(file[0]); + } + }; + + handleOnSubmit = (evt) => { + evt.preventDefault(); + evt.stopPropagation(); + }; + + render() { + /** + * TODO: lucas: Reuse `Select File` component shared with export. + */ + + const isCSV = this.props.fileType === FILE_TYPES.CSV; + + return ( +
+ + Select File + + + + + + +
+ Options + {isCSV && ( +
+ + +
+ )} +
+ { + this.props.setIgnoreBlanks(!this.props.ignoreBlanks); + }} + className={style('option-checkbox')} + /> + +
+
+ { + this.props.setStopOnErrors(!this.props.stopOnErrors); + }} + className={style('option-checkbox')} + /> + +
+
+ + ); + } +} + +export default ImportOptions; diff --git a/src/components/import-modal/import-modal.less b/src/components/import-options/import-options.less similarity index 72% rename from src/components/import-modal/import-modal.less rename to src/components/import-options/import-options.less index 01b5dcf..b968d3f 100644 --- a/src/components/import-modal/import-modal.less +++ b/src/components/import-options/import-options.less @@ -1,33 +1,8 @@ -@import (reference) '~less/compass/_theme.less'; - -.import-modal { +.import-options { &-form { box-shadow: initial; } - &-progress { - display: flex; - height: 20px; - width: 100%; - - &-bar { - flex-grow: 4; - } - - &-cancel { - flex-grow: 0; - display: flex; - align-items: center; - justify-content: center; - width: 30px; - - i { - cursor: pointer; - color: @gray5; - } - } - } - &-browse { &-group { display: flex; @@ -75,8 +50,7 @@ color: #494747; } &-option { - display: flex; - align-items: center; + margin-bottom: 5px; &-checkbox { margin: 0px 8px 0px 0px !important; diff --git a/src/components/import-options/index.js b/src/components/import-options/index.js new file mode 100644 index 0000000..d5de64d --- /dev/null +++ b/src/components/import-options/index.js @@ -0,0 +1,2 @@ +import ImportOptions from './import-options'; +export default ImportOptions; diff --git a/src/components/import-preview/import-preview.jsx b/src/components/import-preview/import-preview.jsx new file mode 100644 index 0000000..e7455e3 --- /dev/null +++ b/src/components/import-preview/import-preview.jsx @@ -0,0 +1,152 @@ +/* eslint-disable react/no-multi-comp */ +import React, { PureComponent } from 'react'; +import PropTypes from 'prop-types'; +import styles from './import-preview.less'; + +import createStyler from 'utils/styler.js'; +const style = createStyler(styles, 'import-preview'); + +import { createLogger } from 'utils/logger'; +const debug = createLogger('import-preview'); + +import SelectFieldType from 'components/select-field-type'; + +class PreviewRow extends PureComponent { + static propTypes = { + values: PropTypes.array, + fields: PropTypes.array, + index: PropTypes.number + }; + + render() { + const { values, index } = this.props; + const cells = values.map((v, i) => { + const header = this.props.fields[i]; + if (v === '') { + v = empty string; + } + if (!header.checked) { + return ( + + {v} + + ); + } + /** + * TODO: lucas: Use highlight.js, mongodb-ace-mode, or something + * so the text style of the value matches its destination type. + * This is particular important for the user to be able to descern + * numbers/booleans that are strings from csv are actually being + * cast to those types the user expects. + */ + return {v}; + }); + + return {[].concat({index + 1}, cells)}; + } +} + +class PreviewValues extends PureComponent { + static propTypes = { + values: PropTypes.array, + fields: PropTypes.array + }; + + render() { + const { values } = this.props; + return ( + + {values.map((val, i) => ( + + ))} + + ); + } +} + +// const FieldShape = PropTypes.shape({ +// path: PropTypes.string, +// checked: PropTypes.bool, +// type: PropTypes.string +// }); + +class PreviewFields extends PureComponent { + static propTypes = { + fields: PropTypes.array, + onCheckedChanged: PropTypes.func.isRequired, + setFieldType: PropTypes.func.isRequired + }; + + onCheckedChanged(path, evt) { + debug('Checked changed', path, evt.currentTarget.checked); + this.props.onCheckedChanged(path, evt.currentTarget.checked); + } + + render() { + const fields = this.props.fields.map((field) => { + return ( + +
+ +
    +
  • {field.path}
  • +
  • + +
  • +
+
+ + ); + }); + return ( + + {[].concat(, fields)} + + ); + } +} + +class ImportPreview extends PureComponent { + static propTypes = { + fields: PropTypes.array, + values: PropTypes.array, + onFieldCheckedChanged: PropTypes.func.isRequired, + setFieldType: PropTypes.func.isRequired + }; + render() { + return ( +
+ + + +
+
+ ); + } +} + +export default ImportPreview; diff --git a/src/components/import-preview/import-preview.less b/src/components/import-preview/import-preview.less new file mode 100644 index 0000000..6cdcd50 --- /dev/null +++ b/src/components/import-preview/import-preview.less @@ -0,0 +1,71 @@ +@import (reference) '~less/compass/_theme.less'; + +.import-preview { + height: 100%; + width: 100%; + overflow-y: auto; + + table, + th, + td { + padding: 3px 5px; + border: 1px solid @gray6; + } + + table { + border-collapse: collapse; + border-spacing: 0px; + font-family: Menlo, Monaco, Consolas, 'Courier New', monospace; + font-size: 11px; + height: 100%; + width: 100%; + + thead { + background: @gray8; + tr { + th { + min-width: 25px; + font-weight: normal; + + div { + display: flex; + align-items: center; + input[type='checkbox'] { + margin: 0px 5px 0px 0px; + } + ul { + list-style: none; + margin: 0; + padding: 0; + + li:first-child { + font-weight: bold; + } + } + } + } + th:first-child { + width: 25px; + max-width: 25px; + } + } + } + + tbody { + height: 100%; + width: 100%; + tr { + height: 25px; + td:first-child { + text-align: center; + } + td { + white-space: nowrap; + } + td.unchecked { + opacity: 0.4; + } + } + } + } +} diff --git a/src/components/import-preview/index.js b/src/components/import-preview/index.js new file mode 100644 index 0000000..0a7b796 --- /dev/null +++ b/src/components/import-preview/index.js @@ -0,0 +1,2 @@ +import ImportPreview from './import-preview'; +export default ImportPreview; diff --git a/src/components/select-field-type/index.js b/src/components/select-field-type/index.js new file mode 100644 index 0000000..48c8703 --- /dev/null +++ b/src/components/select-field-type/index.js @@ -0,0 +1,2 @@ +import SelectFieldType from './select-field-type'; +export default SelectFieldType; diff --git a/src/components/select-field-type/select-field-type.jsx b/src/components/select-field-type/select-field-type.jsx new file mode 100644 index 0000000..7c92d34 --- /dev/null +++ b/src/components/select-field-type/select-field-type.jsx @@ -0,0 +1,42 @@ +/* eslint-disable react/no-multi-comp */ +import React, { PureComponent } from 'react'; +import PropTypes from 'prop-types'; + +import bsonCSV from 'utils/bson-csv'; + +import { createLogger } from 'utils/logger'; +const debug = createLogger('select-field-type'); + +function getBSONTypeCastings() { + return Object.keys(bsonCSV); +} + +class SelectFieldType extends PureComponent { + static propTypes = { + selectedType: PropTypes.string, + onChange: PropTypes.func.isRequired + }; + + onChange(evt) { + debug('type changed', evt.currentTarget.value); + this.props.onChange(evt.currentTarget.value); + } + render() { + const { selectedType } = this.props; + const onChange = this.onChange.bind(this); + + /** + * TODO: lucas: Handle JSON casting. + */ + return ( + + ); + } +} +export default SelectFieldType; diff --git a/src/modules/export.js b/src/modules/export.js index 2e7498d..52572cf 100644 --- a/src/modules/export.js +++ b/src/modules/export.js @@ -398,7 +398,6 @@ export const startExport = () => { }); progress.on('progress', function(info) { - // debug('progress', info); dispatch(onProgress(info.percentage, info.transferred)); }); @@ -412,8 +411,6 @@ export const startExport = () => { const dest = fs.createWriteStream(exportData.fileName); debug('executing pipeline'); - - // TODO: lucas: figure out how to make onStarted(); dispatch(onStarted(source, dest, numDocsToExport)); stream.pipeline(source, progress, formatter, dest, function(err) { if (err) { @@ -453,7 +450,7 @@ export const cancelExport = () => { } debug('cancelling'); source.unpipe(); - // dest.end(); + debug('canceled by user'); dispatch({ type: CANCELED }); }; diff --git a/src/modules/import.js b/src/modules/import.js index 07eb2c9..629e820 100644 --- a/src/modules/import.js +++ b/src/modules/import.js @@ -1,4 +1,27 @@ /* eslint-disable valid-jsdoc */ +/** + * # Import + * + * @see startImport() for the primary entrypoint. + * + * ``` + * openImport() + * | [user specifies import options or defaults] + * closeImport() | startImport() + * | > cancelImport() + * ``` + * + * - [User actions for speficying import options] can be called once the modal has been opened + * - Once `startImport()` has been called, [Import status action creators] are created internally + * + * NOTE: lucas: Any values intended for internal-use only, such as the action + * creators for import status/progress, are called out with @api private + * doc strings. This way, they can still be exported as needed for testing + * without having to think deeply on whether they are being called from a top-level + * action or not. Not great, but it has saved me a considerable amount of time vs. + * larger scale refactoring/frameworks. + */ + import { promisify } from 'util'; import fs from 'fs'; const checkFileExists = promisify(fs.exists); @@ -13,14 +36,13 @@ import { appRegistryEmit } from 'modules/compass'; import detectImportFile from 'utils/detect-import-file'; import { createCollectionWriteStream } from 'utils/collection-stream'; -import { - createCSVParser, - createJSONParser, - createProgressStream -} from 'utils/parsers'; +import createParser, { createProgressStream } from 'utils/import-parser'; +import createPreviewWritable, { createPeekStream } from 'utils/import-preview'; import createImportSizeGuesstimator from 'utils/import-size-guesstimator'; -import { removeEmptyFieldsStream } from 'utils/remove-empty-fields'; +import { removeBlanksStream } from 'utils/remove-blanks'; +import { transformProjectedTypesStream } from 'utils/import-apply-types-and-projection'; + import { createLogger } from 'utils/logger'; const debug = createLogger('import'); @@ -38,13 +60,17 @@ const FILE_TYPE_SELECTED = `${PREFIX}/FILE_TYPE_SELECTED`; const FILE_SELECTED = `${PREFIX}/FILE_SELECTED`; const OPEN = `${PREFIX}/OPEN`; const CLOSE = `${PREFIX}/CLOSE`; +const SET_PREVIEW = `${PREFIX}/SET_PREVIEW`; const SET_DELIMITER = `${PREFIX}/SET_DELIMITER`; const SET_GUESSTIMATED_TOTAL = `${PREFIX}/SET_GUESSTIMATED_TOTAL`; const SET_STOP_ON_ERRORS = `${PREFIX}/SET_STOP_ON_ERRORS`; -const SET_IGNORE_EMPTY_FIELDS = `${PREFIX}/SET_IGNORE_EMPTY_FIELDS`; +const SET_IGNORE_BLANKS = `${PREFIX}/SET_IGNORE_BLANKS`; +const TOGGLE_INCLUDE_FIELD = `${PREFIX}/TOGGLE_INCLUDE_FIELD`; +const SET_FIELD_TYPE = `${PREFIX}/SET_FIELD_TYPE`; /** - * Initial state. + * ## Initial state. + * * @api private */ export const INITIAL_STATE = { @@ -53,17 +79,32 @@ export const INITIAL_STATE = { error: null, fileName: '', fileIsMultilineJSON: false, - fileDelimiter: undefined, useHeaderLines: true, status: PROCESS_STATUS.UNSPECIFIED, fileStats: null, docsWritten: 0, guesstimatedDocsTotal: 0, - delimiter: undefined, + delimiter: ',', stopOnErrors: false, - ignoreEmptyFields: true + ignoreBlanks: true, + fields: [], + values: [], + previewLoaded: false }; +/** + * ### Import status action creators + * + * @see startImport below. + * + * ``` + * STARTED > + * | *ERROR* || SET_GUESSTIMATED_TOTAL > + * | <-- PROGRESS --> + * | *FINISHED* + * ``` + */ + /** * @param {Number} progress * @param {Number} docsWritten @@ -91,7 +132,7 @@ export const onStarted = (source, dest) => ({ * @param {Number} docsWritten * @api private */ -export const onFinished = docsWritten => ({ +export const onFinished = (docsWritten) => ({ type: FINISHED, docsWritten: docsWritten }); @@ -100,147 +141,19 @@ export const onFinished = docsWritten => ({ * @param {Error} error * @api private */ -export const onError = error => ({ +export const onError = (error) => ({ type: FAILED, error: error }); /** - * * @param {Number} guesstimatedDocsTotal * @api private */ -export const onGuesstimatedDocsTotal = guesstimatedDocsTotal => ({ +export const onGuesstimatedDocsTotal = (guesstimatedDocsTotal) => ({ type: SET_GUESSTIMATED_TOTAL, guesstimatedDocsTotal: guesstimatedDocsTotal }); -/** - * The import module reducer. - * - * @param {Object} state - The state. - * @param {Object} action - The action. - * - * @returns {Object} The state. - */ -// eslint-disable-next-line complexity -const reducer = (state = INITIAL_STATE, action) => { - if (action.type === SET_GUESSTIMATED_TOTAL) { - return { - ...state, - guesstimatedDocsTotal: action.guesstimatedDocsTotal - }; - } - - if (action.type === SET_DELIMITER) { - return { - ...state, - delimiter: action.delimiter - }; - } - - if (action.type === SET_STOP_ON_ERRORS) { - return { - ...state, - stopOnErrors: action.stopOnErrors - }; - } - - if (action.type === SET_IGNORE_EMPTY_FIELDS) { - return { - ...state, - ignoreEmptyFields: action.ignoreEmptyFields - }; - } - - if (action.type === FILE_SELECTED) { - return { - ...state, - fileName: action.fileName, - fileType: action.fileType, - fileStats: action.fileStats, - fileIsMultilineJSON: action.fileIsMultilineJSON, - status: PROCESS_STATUS.UNSPECIFIED, - progress: 0, - docsWritten: 0, - source: undefined, - dest: undefined - }; - } - - if (action.type === FAILED) { - return { - ...state, - error: action.error, - status: PROCESS_STATUS.FAILED - }; - } - - if (action.type === STARTED) { - return { - ...state, - error: null, - progress: 0, - status: PROCESS_STATUS.STARTED, - source: action.source, - dest: action.dest - }; - } - - if (action.type === PROGRESS) { - return { - ...state, - progress: action.progress, - docsWritten: action.docsWritten - }; - } - - if (action.type === FINISHED) { - const isComplete = !( - state.error || state.status === PROCESS_STATUS.CANCELED - ); - return { - ...state, - status: isComplete ? PROCESS_STATUS.COMPLETED : state.status, - docsWritten: action.docsWritten, - source: undefined, - dest: undefined - }; - } - - if (action.type === CANCELED) { - return { - ...state, - status: PROCESS_STATUS.CANCELED, - source: undefined, - dest: undefined - }; - } - - /** - * Open the `` - */ - if (action.type === OPEN) { - return { - ...INITIAL_STATE, - isOpen: true - }; - } - - if (action.type === CLOSE) { - return { - ...state, - isOpen: false - }; - } - - if (action.type === FILE_TYPE_SELECTED) { - return { - ...state, - fileType: action.fileType - }; - } - return state; -}; /** * @api public @@ -259,13 +172,16 @@ export const startImport = () => { fileIsMultilineJSON, fileStats: { size }, delimiter, - ignoreEmptyFields, - stopOnErrors + ignoreBlanks, + stopOnErrors, + fields } = importData; const source = fs.createReadStream(fileName, 'utf8'); - // TODO: lucas: Support ignoreUndefined as an option to pass to driver? + /** + * TODO: lucas: Support ignoreUndefined as an option to pass to driver? + */ const dest = createCollectionWriteStream(dataService, ns, stopOnErrors); const progress = createProgressStream(size, function(err, info) { @@ -286,19 +202,16 @@ export const startImport = () => { const stripBOM = stripBomStream(); - const removeEmptyFields = removeEmptyFieldsStream(ignoreEmptyFields); - - let parser; - if (fileType === 'csv') { - parser = createCSVParser({ - delimiter: delimiter - }); - } else { - parser = createJSONParser({ - selector: fileIsMultilineJSON ? null : '*', - fileName: fileName - }); - } + const removeBlanks = removeBlanksStream(ignoreBlanks); + + const applyTypes = transformProjectedTypesStream(fields); + + const parser = createParser({ + fileName, + fileType, + delimiter, + fileIsMultilineJSON + }); debug('executing pipeline'); @@ -307,15 +220,16 @@ export const startImport = () => { source, stripBOM, parser, - removeEmptyFields, + removeBlanks, + applyTypes, importSizeGuesstimator, progress, dest, function(err, res) { /** - * refresh data (docs, aggregations) regardless of whether we have a - * partial import or full import - */ + * Refresh data (docs, aggregations) regardless of whether we have a + * partial import or full import + */ dispatch(appRegistryEmit('refresh-data')); /** * TODO: lucas: Decorate with a codeframe if not already @@ -324,10 +238,6 @@ export const startImport = () => { if (err) { return dispatch(onError(err)); } - /** - * TODO: lucas: once import is finished, - * trigger a refresh on the documents view. - */ debug('done', err, res); dispatch(onFinished(dest.docsWritten)); dispatch(appRegistryEmit('import-finished', size, fileType)); @@ -337,6 +247,8 @@ export const startImport = () => { }; /** + * Cancels an active import if there is one, noop if not. + * * @api public */ export const cancelImport = () => { @@ -350,35 +262,116 @@ export const cancelImport = () => { } debug('cancelling'); source.unpipe(); - // dest.end(); + debug('import canceled by user'); dispatch({ type: CANCELED }); }; }; /** - * Gather file metadata quickly when the user specifies `fileName`. + * Load a preview of the first few documents in the selected file + * which is used to calculate an inital set of `fields` and `values`. + * + * `loadPreviewDocs()` is only called internally when any state used + * for specifying import parsing is modified. + * * @param {String} fileName + * @param {String} fileType + * @api private + */ +const loadPreviewDocs = ( + fileName, + fileType, + delimiter, + fileIsMultilineJSON +) => { + return (dispatch) => { + /** + * TODO: lucas: add dispatches for preview loading, error, etc. + * as needed. For the time being, its fast enough and we want + * errors/faults hard so we can figure out edge cases that + * actually need it. + */ + const source = fs.createReadStream(fileName, 'utf8'); + const dest = createPreviewWritable(); + stream.pipeline( + source, + createPeekStream(fileType, delimiter, fileIsMultilineJSON), + dest, + function(err) { + if (err) { + throw err; + } + dispatch({ + type: SET_PREVIEW, + fields: dest.fields, + values: dest.values + }); + } + ); + }; +}; + +/** + * ### User actions for speficying import options + */ + +/** + * Mark a field to be included or excluded from the import. + * + * @param {String} path Dot notation path of the field. * @api public */ -export const selectImportFileName = fileName => { - return dispatch => { +export const toggleIncludeField = (path) => ({ + type: TOGGLE_INCLUDE_FIELD, + path: path +}); + +/** + * Specify the `type` values at `path` should be cast to. + * + * @param {String} path Dot notation accessor for value. + * @param {String} bsonType A bson type identifier. + * @example + * ```javascript + * // Cast string _id from a csv to a bson.ObjectId + * setFieldType('_id', 'ObjectId'); + * // Cast `{stats: {flufiness: "100"}}` to + * // `{stats: {flufiness: 100}}` + * setFieldType('stats.flufiness', 'Int32'); + * ``` + * @api public + */ +export const setFieldType = (path, bsonType) => ({ + type: SET_FIELD_TYPE, + path: path, + bsonType: bsonType +}); + +/** + * Gather file metadata quickly when the user specifies `fileName` + * @param {String} fileName + * @api public + * @see utils/detect-import-file.js + */ +export const selectImportFileName = (fileName) => { + return (dispatch, getState) => { let fileStats = {}; checkFileExists(fileName) - .then(exists => { + .then((exists) => { if (!exists) { throw new Error(`File ${fileName} not found`); } return getFileStats(fileName); }) - .then(stats => { + .then((stats) => { fileStats = { ...stats, type: mime.lookup(fileName) }; return promisify(detectImportFile)(fileName); }) - .then(detected => { + .then((detected) => { dispatch({ type: FILE_SELECTED, fileName: fileName, @@ -386,62 +379,303 @@ export const selectImportFileName = fileName => { fileIsMultilineJSON: detected.fileIsMultilineJSON, fileType: detected.fileType }); + + /** + * TODO: lucas: @see utils/detect-import-file.js for future delimiter detection. + */ + const delimiter = getState().importData.delimiter; + dispatch( + loadPreviewDocs( + fileName, + detected.fileType, + delimiter, + detected.fileIsMultilineJSON + ) + ); }) - .catch(err => dispatch(onError(err))); + .catch((err) => dispatch(onError(err))); }; }; /** - * Select the file type of the import. + * The user has manually selected the `fileType` of the import. * * @param {String} fileType * @api public */ -export const selectImportFileType = fileType => ({ - type: FILE_TYPE_SELECTED, - fileType: fileType -}); +export const selectImportFileType = (fileType) => { + return (dispatch, getState) => { + const { + previewLoaded, + fileName, + delimiter, + fileIsMultilineJSON + } = getState().importData; + + dispatch({ + type: FILE_TYPE_SELECTED, + fileType: fileType + }); + + if (previewLoaded) { + debug('preview needs updated because fileType changed'); + dispatch( + loadPreviewDocs(fileName, fileType, delimiter, fileIsMultilineJSON) + ); + } + }; +}; /** - * Open the import modal. + * Set the tabular delimiter. + * @param {String} delimiter One of `,` for csv, `\t` for csv + * * @api public */ -export const openImport = () => ({ - type: OPEN -}); +export const setDelimiter = (delimiter) => { + return (dispatch, getState) => { + const { + previewLoaded, + fileName, + fileType, + fileIsMultilineJSON + } = getState().importData; + dispatch({ + type: SET_DELIMITER, + delimiter: delimiter + }); + + if (previewLoaded) { + debug( + 'preview needs updated because delimiter changed', + fileName, + fileType, + delimiter, + fileIsMultilineJSON + ); + dispatch( + loadPreviewDocs(fileName, fileType, delimiter, fileIsMultilineJSON) + ); + } + }; +}; /** - * Close the import modal. + * Stop the import if mongo returns an error for a document write + * such as a duplicate key for a unique index. In practice, + * the cases for this being false when importing are very minimal. + * For example, a duplicate unique key on _id is almost always caused + * by the user attempting to resume from a previous import without + * removing all documents sucessfully imported. + * + * @param {Boolean} stopOnErrors To stop or not to stop * @api public + * @see utils/collection-stream.js + * @see https://docs.mongodb.com/manual/reference/program/mongoimport/#cmdoption-mongoimport-stoponerror */ -export const closeImport = () => ({ - type: CLOSE +export const setStopOnErrors = (stopOnErrors) => ({ + type: SET_STOP_ON_ERRORS, + stopOnErrors: stopOnErrors }); /** - * Set the tabular delimiter. + * Any `value` that is `''` will not have this field set in the final + * document written to mongo. * + * @param {Boolean} ignoreBlanks * @api public + * @see https://docs.mongodb.com/manual/reference/program/mongoimport/#cmdoption-mongoimport-ignoreblanks + * @todo lucas: Standardize as `setIgnoreBlanks`? */ -export const setDelimiter = delimiter => ({ - type: SET_DELIMITER, - delimiter: delimiter +export const setIgnoreBlanks = (ignoreBlanks) => ({ + type: SET_IGNORE_BLANKS, + ignoreBlanks: ignoreBlanks }); /** + * ### Top-level modal visibility + */ + +/** + * Open the import modal. * @api public */ -export const setStopOnErrors = stopOnErrors => ({ - type: SET_STOP_ON_ERRORS, - stopOnErrors: stopOnErrors +export const openImport = () => ({ + type: OPEN }); /** + * Close the import modal. * @api public */ -export const setIgnoreEmptyFields = setignoreEmptyFields => ({ - type: SET_IGNORE_EMPTY_FIELDS, - setignoreEmptyFields: setignoreEmptyFields +export const closeImport = () => ({ + type: CLOSE }); +/** + * The import module reducer. + * + * @param {Object} state - The state. + * @param {Object} action - The action. + * + * @returns {Object} The state. + */ +// eslint-disable-next-line complexity +const reducer = (state = INITIAL_STATE, action) => { + if (action.type === SET_GUESSTIMATED_TOTAL) { + return { + ...state, + guesstimatedDocsTotal: action.guesstimatedDocsTotal + }; + } + + if (action.type === SET_DELIMITER) { + return { + ...state, + delimiter: action.delimiter + }; + } + + if (action.type === TOGGLE_INCLUDE_FIELD) { + const newState = { + ...state + }; + newState.fields = newState.fields.map((field) => { + if (field.path === action.path) { + field.checked = !field.checked; + } + return field; + }); + return newState; + } + + if (action.type === SET_FIELD_TYPE) { + const newState = { + ...state + }; + newState.fields = newState.fields.map((field) => { + if (field.path === action.path) { + // If a user changes a field type, automatically check it for them + // so they don't need an extra click or forget to click it an get frustrated + // like I did so many times :) + field.checked = true; + field.type = action.bsonType; + } + return field; + }); + return newState; + } + + if (action.type === SET_PREVIEW) { + return { + ...state, + values: action.values, + fields: action.fields, + previewLoaded: true + }; + } + + if (action.type === SET_STOP_ON_ERRORS) { + return { + ...state, + stopOnErrors: action.stopOnErrors + }; + } + + if (action.type === SET_IGNORE_BLANKS) { + return { + ...state, + ignoreBlanks: action.ignoreBlanks + }; + } + + if (action.type === FILE_SELECTED) { + return { + ...state, + fileName: action.fileName, + fileType: action.fileType, + fileStats: action.fileStats, + fileIsMultilineJSON: action.fileIsMultilineJSON, + status: PROCESS_STATUS.UNSPECIFIED, + progress: 0, + docsWritten: 0, + source: undefined, + dest: undefined + }; + } + + if (action.type === FAILED) { + return { + ...state, + error: action.error, + status: PROCESS_STATUS.FAILED + }; + } + + if (action.type === STARTED) { + return { + ...state, + error: null, + progress: 0, + status: PROCESS_STATUS.STARTED, + source: action.source, + dest: action.dest + }; + } + + if (action.type === PROGRESS) { + return { + ...state, + progress: action.progress, + docsWritten: action.docsWritten + }; + } + + if (action.type === FINISHED) { + const isComplete = !( + state.error || state.status === PROCESS_STATUS.CANCELED + ); + return { + ...state, + status: isComplete ? PROCESS_STATUS.COMPLETED : state.status, + docsWritten: action.docsWritten, + source: undefined, + dest: undefined + }; + } + + if (action.type === CANCELED) { + return { + ...state, + status: PROCESS_STATUS.CANCELED, + source: undefined, + dest: undefined + }; + } + + /** + * Open the `` + */ + if (action.type === OPEN) { + return { + ...INITIAL_STATE, + isOpen: true + }; + } + + if (action.type === CLOSE) { + return { + ...state, + isOpen: false + }; + } + + if (action.type === FILE_TYPE_SELECTED) { + return { + ...state, + fileType: action.fileType + }; + } + return state; +}; export default reducer; diff --git a/src/utils/bson-csv.js b/src/utils/bson-csv.js new file mode 100644 index 0000000..b617b59 --- /dev/null +++ b/src/utils/bson-csv.js @@ -0,0 +1,133 @@ +/** + * Unlike extended JSON, there is no library/spec for + * serializing and deserializing CSV values. + * + * Basically if: + * 1. All bson type defs had a consistent `.fromString()` * method + * 2. Castings/detection used by fromString() today were exposed + * (e.g. JS Number float -> bson.Double). + */ + +/** + * TODO: lucas: Incorporate serialization. Start with what mongoimport + * does: https://github.com/mongodb/mongo-tools-common/blob/master/json/csv_format.go + */ + +/** + * TODO: lucas: If we want to support types via CSV headers + * for compatibility with mongoimport, that all happens in: + * https://github.com/mongodb/mongo-tools/blob/master/mongoimport/typed_fields.go + * + * And https://www.npmjs.com/package/flat#transformkey can be used to prototype. + */ + +/** + * TODO: lucas: Other types (null, undefined, etc.) and formats + * (see mongoimport typed headers) later. Could also include: + * 1. [val 1, val2] -> array + * 2. {foo: bar} => nested object + * 3. etc. + */ +import bson from 'bson'; + +const BOOLEAN_TRUE = ['1', 'true', 'TRUE']; +const BOOLEAN_FALSE = ['0', 'false', 'FALSE', 'null', '', 'NULL']; + +export default { + String: { + fromString: function(s) { + return '' + s; + } + }, + Number: { + fromString: function(s) { + return parseFloat(s); + } + }, + Boolean: { + fromString: function(s) { + if (BOOLEAN_TRUE.includes(s)) { + return true; + } + + if (BOOLEAN_FALSE.includes(s)) { + return false; + } + + return Boolean(s); + } + }, + Date: { + fromString: function(s) { + return new Date(s); + } + }, + ObjectId: { + fromString: function(s) { + // eslint-disable-next-line new-cap + return new bson.ObjectId(s); + } + }, + Long: { + fromString: function(s) { + return bson.Long.fromString(s); + } + }, + RegExpr: { + fromString: function(s) { + // TODO: lucas: detect any specified regex options later. + // + // if (s.startsWith('/')) { + // var regexRegex = '/(.*)/([imxlsu]+)$' + // var [pattern, options]; + // return new bson.BSONRegExp(pattern, options); + // } + return new bson.BSONRegExp(s); + } + }, + Binary: { + fromString: function(s) { + return new bson.Binary(s, bson.Binary.SUBTYPE_DEFAULT); + } + }, + UUID: { + fromString: function(s) { + return new bson.Binary(s, bson.Binary.SUBTYPE_UUID); + } + }, + MD5: { + fromString: function(s) { + return new bson.Binary(s, bson.Binary.SUBTYPE_MD5); + } + }, + Timestamp: { + fromString: function(s) { + return new bson.Timestamp.fromString(s); + } + }, + Double: { + fromString: function(s) { + return new bson.Double(parseFloat(s)); + } + }, + Int32: { + fromString: function(s) { + return parseInt(s, 10); + } + }, + Decimal128: { + fromString: function(s) { + return bson.Decimal128.fromString(s); + } + } +}; + +export function detectType(value) { + if (value === undefined) { + return 'Undefined'; + } + if (value === null) { + return 'Null'; + } + return /function ([A-Za-z]+)/.exec(value.constructor.toString())[1]; +} diff --git a/src/utils/bson-csv.spec.js b/src/utils/bson-csv.spec.js new file mode 100644 index 0000000..68eac3b --- /dev/null +++ b/src/utils/bson-csv.spec.js @@ -0,0 +1,38 @@ +import bsonCSV from './bson-csv'; +// import bson from 'bson'; + +// TODO: lucas: probably dumb but think about that later. + +describe('bson-csv', () => { + describe('String', () => { + it('should work', () => { + expect(bsonCSV.String.fromString(1)).to.equal('1'); + }); + }); + describe('Boolean', () => { + it('should deserialize falsy values', () => { + expect(bsonCSV.Boolean.fromString('')).to.equal(false); + expect(bsonCSV.Boolean.fromString('false')).to.equal(false); + expect(bsonCSV.Boolean.fromString('FALSE')).to.equal(false); + // expect(bsonCSV.Boolean.fromString('0')).to.equal(false); + }); + it('should deserialize non-falsy values', () => { + // expect(bsonCSV.Boolean.fromString('1')).to.equal(true); + expect(bsonCSV.Boolean.fromString('true')).to.equal(true); + expect(bsonCSV.Boolean.fromString('TRUE')).to.equal(true); + }); + }); + describe('Number', () => { + it('should work', () => { + expect(bsonCSV.Number.fromString('1')).to.equal(1); + }); + }); + describe('ObjectId', () => { + it('should work', () => { + const oid = '5dd080acc15c0d5ee3ab6ad2'; + const deserialized = bsonCSV.ObjectId.fromString(oid); + expect(deserialized._bsontype).to.equal('ObjectID'); + expect(deserialized.toString()).to.equal('5dd080acc15c0d5ee3ab6ad2'); + }); + }); +}); diff --git a/src/utils/collection-stream.js b/src/utils/collection-stream.js index 3c067d4..4d2aae7 100644 --- a/src/utils/collection-stream.js +++ b/src/utils/collection-stream.js @@ -63,20 +63,18 @@ class WritableCollectionStream extends Writable { next(); }; - const execBatch = cb => { + const execBatch = (cb) => { const batchSize = this.batch.length; this.batch.execute((err, res) => { - // TODO: lucas: appears turning off retyableWrites - // gives a slightly different error but probably same problem? + /** + * TODO: lucas: appears turning off retyableWrites + * gives a slightly different error but probably same problem? + */ if ( err && Array.isArray(err.errorLabels) && err.errorLabels.indexOf('TransientTransactionError') ) { - debug( - 'NOTE: @lucas: this is a transient transaction error and is a bug in retryable writes.', - err - ); err = null; res = { nInserted: batchSize }; } @@ -84,8 +82,10 @@ class WritableCollectionStream extends Writable { if (err && !this.stopOnErrors) { console.log('stopOnErrors false. skipping', err); err = null; - // TODO: lucas: figure out how to extract finer-grained bulk op results - // from err in these cases. + /** + * TODO: lucas: figure out how to extract finer-grained bulk op results + * from err in these cases. + */ res = {}; } if (err) { @@ -105,20 +105,20 @@ class WritableCollectionStream extends Writable { debug('running _final()'); if (this.batch.length === 0) { - // debug('nothing left in buffer'); debug('%d docs written', this.docsWritten); this.printJobStats(); return callback(); } - // TODO: lucas: Reuse error wrangling from _write above. + /** + * TODO: lucas: Reuse error wrangling from _write above. + */ debug('draining buffered docs', this.batch.length); this.batch.execute((err, res) => { this.captureStatsForBulkResult(err, res); this.docsWritten += this.batch.length; this.printJobStats(); this.batch = null; - // debug('buffer drained', err, res); debug('%d docs written', this.docsWritten); callback(err); }); @@ -134,7 +134,7 @@ class WritableCollectionStream extends Writable { 'ok' ]; - keys.forEach(k => { + keys.forEach((k) => { this._stats[k] += res[k] || 0; }); if (!err) return; diff --git a/src/utils/detect-import-file.js b/src/utils/detect-import-file.js index 65f2651..c46243b 100644 --- a/src/utils/detect-import-file.js +++ b/src/utils/detect-import-file.js @@ -9,8 +9,21 @@ const debug = createLogger('detect-import-file'); const DEFAULT_FILE_TYPE = 'json'; -// TODO: Include more heuristics. Ideally the user just picks the file -// and we auto-detect the various formats/options. +// const importOptions = { +// fileIsMultilineJSON: false, +// fileType: DEFAULT_FILE_TYPE +// }; + +/** + * Guess the `importOptions` to use for parsing the contents of + * `fileName` without looking at the entire file. + * + * @param {String} fileName + * @param {Function} done (err, importOptions) + * + * TODO: lucas: Include more heuristics. Ideally the user just picks the file + * and we auto-detect the various formats/options. + **/ function detectImportFile(fileName, done) { debug('peeking at', fileName); @@ -32,7 +45,10 @@ function detectImportFile(fileName, done) { fileType = DEFAULT_FILE_TYPE; } - // TODO: lucas: papaparse guessDelimiter + /** + * TODO: lucas: guess delimiter like papaparse in the future. + * https://github.com/mholt/PapaParse/blob/49170b76b382317356c2f707e2e4191430b8d495/docs/resources/js/papaparse.js#L1264 + */ debug('swapping'); swap('done'); }); diff --git a/src/utils/dotnotation.js b/src/utils/dotnotation.js new file mode 100644 index 0000000..3a48242 --- /dev/null +++ b/src/utils/dotnotation.js @@ -0,0 +1,35 @@ +import { flatten, unflatten } from 'flat'; + +/** + * Converts any nested objects into a single depth object with `dotnotation` keys. + * @example + * ```javascript + * dotnotation.serialize({_id: 'arlo', collar: {size: 14}}); + * >> {_id: 'arlo', 'collar.size': 14} + * ``` + * @param {Object} obj + * @returns {Object} + */ +export function serialize(obj) { + /** + * TODO: lucas: bson type support. For now, drop. + */ + return flatten(obj); +} + +/** + * Converts an object using dotnotation to a full, nested object. + * @example + * ```javascript + * dotnotation.deserialize({_id: 'arlo', 'collar.size': 14}); + * >> {_id: 'arlo', collar: {size: 14}} + * ``` + * @param {Object} obj + * @returns {Object} + */ +export function deserialize(obj) { + /** + * TODO: lucas: bson type support. For now, drop. + */ + return unflatten(obj); +} diff --git a/src/utils/formatters.js b/src/utils/formatters.js index a0127b9..5cdfdce 100644 --- a/src/utils/formatters.js +++ b/src/utils/formatters.js @@ -2,6 +2,10 @@ /* eslint-disable callback-return */ /* eslint-disable complexity */ +/** + * TODO: lucas: rename `export-formatters` + */ + import csv from 'fast-csv'; import { EJSON } from 'bson'; import { Transform } from 'stream'; @@ -74,6 +78,6 @@ const formatTabularRow = function(doc, opts = { delimiter: '.' }) { export const createCSVFormatter = function() { return csv.format({ headers: true, - transform: row => formatTabularRow(row) + transform: (row) => formatTabularRow(row) }); }; diff --git a/src/utils/import-apply-types-and-projection.js b/src/utils/import-apply-types-and-projection.js new file mode 100644 index 0000000..065cd76 --- /dev/null +++ b/src/utils/import-apply-types-and-projection.js @@ -0,0 +1,64 @@ +import { Transform } from 'stream'; + +import bsonCSV from './bson-csv'; +import isPlainObject from 'lodash.isplainobject'; +import isObjectLike from 'lodash.isobjectlike'; + +import { createLogger } from './logger'; + +const debug = createLogger('apply-import-type-and-projection'); + +/** + * TODO: lucas: dot notation. Handle extended JSON case. + */ +function getProjection(fields, key) { + return fields.filter((f) => { + return f.path === key; + })[0]; +} + +function transformProjectedTypes(fields, data) { + if (Array.isArray(data)) { + return data.map(transformProjectedTypes.bind(null, fields)); + } else if (!isPlainObject(data) || data === null || data === undefined) { + return data; + } + + const keys = Object.keys(data); + if (keys.length === 0) { + return data; + } + return keys.reduce(function(doc, key) { + const def = getProjection(fields, key); + + /** + * TODO: lucas: Relocate removeEmptyStrings() here? + * Avoid yet another recursive traversal of every document. + */ + if (def && !def.checked) { + return doc; + } + if (def.type && bsonCSV[def.type] && !isObjectLike(data[key])) { + doc[key] = bsonCSV[def.type].fromString(data[key]); + } else { + doc[key] = transformProjectedTypes(fields, data[key]); + } + return doc; + }, {}); +} + +export default transformProjectedTypes; + +/** + * TODO: lucas: Add detection for nothing unchecked and all fields + * are default type and return a PassThrough. + */ + +export function transformProjectedTypesStream(fields) { + return new Transform({ + objectMode: true, + transform: function(doc, encoding, cb) { + cb(null, transformProjectedTypes(fields, doc)); + } + }); +} diff --git a/src/utils/import-apply-types-and-projection.spec.js b/src/utils/import-apply-types-and-projection.spec.js new file mode 100644 index 0000000..b331d74 --- /dev/null +++ b/src/utils/import-apply-types-and-projection.spec.js @@ -0,0 +1,79 @@ +import apply from './import-apply-types-and-projection'; + +describe('import-apply-types-and-projection', () => { + it('should include all fields by default', () => { + const res = apply([{ path: '_id', checked: true, type: 'String' }], { + _id: 'arlo' + }); + expect(res).to.deep.equal({ + _id: 'arlo' + }); + }); + it('should remove an unchecked path', () => { + const res = apply( + [ + { path: '_id', checked: true, type: 'String' }, + { path: 'name', checked: false, type: 'String' } + ], + { + _id: 'arlo', + name: 'Arlo' + } + ); + + expect(res).to.deep.equal({ + _id: 'arlo' + }); + }); + it('should deserialize strings to selected types', () => { + const res = apply( + [ + { path: '_id', checked: true, type: 'String' }, + { path: 'name', checked: true, type: 'String' }, + { path: 'birthday', checked: true, type: 'Date' } + ], + { + _id: 'arlo', + name: 'Arlo', + birthday: '2014-09-21' + } + ); + + expect(res).to.deep.equal({ + _id: 'arlo', + name: 'Arlo', + birthday: new Date('2014-09-21') + }); + }); + it('should handle nested objects'); + describe('Weird Cases', () => { + it('should throw if non ascii ends up in field paths', () => { + /** + * NOTE: lucas: Found this weird bug where my apple health data + * caused failed type conversion bc of a null pointer. + * This case makes sure that doesn't happen and is mostly + * so I remember to figure out whats happening later. + */ + const fields = [ + { path: 'type', checked: false, type: 'String' }, + { path: 'sourceName', checked: true, type: 'String' }, + { path: 'sourceVersion', checked: true, type: 'Number' }, + { path: 'creationDate', checked: true, type: 'Date' }, + { path: 'startDate', checked: true, type: 'Date' }, + { path: 'endDate', checked: true, type: 'Date' } + ]; + + const data = { + creationDate: '2016-11-04 06:30:14 -0400', + endDate: '2016-11-04 06:30:14 -0400', + sourceName: 'Clock', + sourceVersion: '50', + startDate: '2016-11-03 22:30:00 -0400', + type: 'HKCategoryTypeIdentifierSleepAnalysis', + value: 'HKCategoryValueSleepAnalysisInBed' + }; + + expect(apply.bind(null, fields, data)).to.throw(); + }); + }); +}); diff --git a/src/utils/parsers.js b/src/utils/import-parser.js similarity index 55% rename from src/utils/parsers.js rename to src/utils/import-parser.js index 9e393ee..d03250d 100644 --- a/src/utils/parsers.js +++ b/src/utils/import-parser.js @@ -7,17 +7,7 @@ import parseJSON from 'parse-json'; import throttle from 'lodash.throttle'; import progressStream from 'progress-stream'; -const debug = createLogger('parsers'); - -/** - * TODO: lucas: Add papaparse `dynamicTyping` of values - * https://github.com/mholt/PapaParse/blob/5219809f1d83ffa611ebe7ed13e8224bcbcf3bd7/papaparse.js#L1216 - */ - -/** - * TODO: lucas: mapHeaders option to support existing `.()` caster - * like `mongoimport` does today. - */ +const debug = createLogger('import-parser'); /** * A transform stream that turns file contents in objects @@ -27,7 +17,6 @@ const debug = createLogger('parsers'); */ export const createCSVParser = function({ delimiter = ',' } = {}) { return csv({ - strict: true, separator: delimiter }); }; @@ -57,7 +46,7 @@ export const createJSONParser = function({ } }); - parser.on('data', d => { + parser.on('data', (d) => { const doc = EJSON.deserialize(d, { promoteValues: true, bsonRegExp: true @@ -84,20 +73,75 @@ export const createJSONParser = function({ return stream; }; +/** + * How often to update progress via a leading throttle + */ +const PROGRESS_UPDATE_INTERVAL = 250; + +/** + * Since we have no idea what the size of a document + * will be as part of an import before we've started, + * just pick a nice number of bytes :) + * + * @see utils/import-size-guesstimator + * will figure out a more realistic number once the documents start + * flowing through the pipechain. + */ +const NAIVE_AVERAGE_DOCUMENT_SIZE = 800; + +/** + * Creates a transform stream for measuring progress at any point in the pipechain + * backing an import operation. The `onProgress` callback will be throttled to only update once + * every `${PROGRESS_UPDATE_INTERVAL}ms`. + * + * @param {Number} fileSize The total file size + * @param {Function} onProgress Your callback for progress updates + * @returns {stream.Transform} + */ export const createProgressStream = function(fileSize, onProgress) { const progress = progressStream({ objectMode: true, - length: fileSize / 800, - time: 500 + length: fileSize / NAIVE_AVERAGE_DOCUMENT_SIZE, + time: PROGRESS_UPDATE_INTERVAL // NOTE: ask lucas how time is different from an interval here. }); // eslint-disable-next-line camelcase function update_import_progress_throttled(info) { - // debug('progress', info); - // dispatch(onProgress(info.percentage, dest.docsWritten)); onProgress(null, info); } - const updateProgress = throttle(update_import_progress_throttled, 500); + const updateProgress = throttle( + update_import_progress_throttled, + PROGRESS_UPDATE_INTERVAL, + { leading: true } + ); progress.on('progress', updateProgress); return progress; }; + +/** + * Convenience for creating the right parser transform stream in a single call. + * + * @param {String} fileName + * @param {String} fileType `csv` or `json` + * @param {String} delimiter See `createCSVParser()` + * @param {Boolean} fileIsMultilineJSON + * @returns {stream.Transform} + */ +function createParser({ + fileName = 'myfile', + fileType = 'json', + delimiter = ',', + fileIsMultilineJSON = false +} = {}) { + if (fileType === 'csv') { + return createCSVParser({ + delimiter: delimiter + }); + } + return createJSONParser({ + selector: fileIsMultilineJSON ? null : '*', + fileName: fileName + }); +} + +export default createParser; diff --git a/src/utils/parsers.spec.js b/src/utils/import-parser.spec.js similarity index 63% rename from src/utils/parsers.spec.js rename to src/utils/import-parser.spec.js index 5047074..44f789c 100644 --- a/src/utils/parsers.spec.js +++ b/src/utils/import-parser.spec.js @@ -2,12 +2,12 @@ import fs from 'fs'; import path from 'path'; import stream from 'stream'; -import { createCSVParser, createJSONParser } from './parsers'; +import createParser from './import-parser'; const TEST_DIR = path.join(__dirname, '..', '..', '..', 'test'); const FIXTURES = { GOOD_CSV: path.join(TEST_DIR, 'good.csv'), - BAD_CSV: path.join(TEST_DIR, 'bad.csv'), + BAD_CSV: path.join(TEST_DIR, 'mongoimport', 'test_bad.csv'), JS_I_THINK_IS_JSON: path.join(TEST_DIR, 'js-i-think-is.json'), GOOD_JSON: path.join(TEST_DIR, 'docs.json'), LINE_DELIMITED_JSON: path.join(TEST_DIR, 'docs.jsonl'), @@ -16,9 +16,10 @@ const FIXTURES = { 'docs-with-newline-ending.jsonl' ) }; -function runParser(file, parser) { + +function runParser(src, parser) { const docs = []; - const source = fs.createReadStream(file); + const source = fs.createReadStream(src); const dest = new stream.Writable({ objectMode: true, write(chunk, encoding, callback) { @@ -36,53 +37,44 @@ function runParser(file, parser) { }); } -describe('parsers', () => { +describe('import-parser', () => { describe('json', () => { it('should parse a file', () => { - return runParser(FIXTURES.GOOD_JSON, createJSONParser()).then(docs => { + return runParser(FIXTURES.GOOD_JSON, createParser()).then((docs) => { expect(docs).to.have.length(3); }); }); it('should parse a line-delimited file', () => { return runParser( FIXTURES.LINE_DELIMITED_JSON, - createJSONParser({ selector: null }) - ).then(docs => { - expect(docs).to.have.length(3); - }); + createParser({ fileType: 'json', isMultilineJSON: true }) + ).then((docs) => expect(docs).to.have.length(3)); }); it('should parse a line-delimited file with an extra empty line', () => { return runParser( FIXTURES.LINE_DELIMITED_JSON_EXTRA_LINE, - createJSONParser({ selector: null }) - ).then(docs => { - expect(docs).to.have.length(3); - }); + createParser({ isMultilineJSON: true }) + ).then((docs) => expect(docs).to.have.length(3)); }); describe('deserialize', () => { - const DOCS = []; + const BSON_DOCS = []; before(() => { const src = FIXTURES.GOOD_JSON; - return runParser(src, createJSONParser({ fileName: src })).then( - docs => { - DOCS.push.apply(DOCS, docs); - } - ); + return runParser(src, createParser()).then(function(docs) { + BSON_DOCS.push.apply(BSON_DOCS, docs); + }); }); - it('should have bson ObjectId', () => { - expect(DOCS[0]._id._bsontype).to.equal('ObjectID'); + it('should have bson ObjectId for _id', () => { + expect(BSON_DOCS[0]._id._bsontype).to.equal('ObjectID'); }); }); describe('errors', () => { let parseError; - - before(done => { - const src = FIXTURES.JS_I_THINK_IS_JSON; - const p = runParser(src, createJSONParser({ fileName: src })); - p.catch(err => (parseError = err)); + before((done) => { + const p = runParser(FIXTURES.JS_I_THINK_IS_JSON, createParser()); + p.catch((err) => (parseError = err)); expect(p).to.be.rejected.and.notify(done); }); - it('should catch errors by default', () => { expect(parseError.name).to.equal('JSONError'); }); @@ -95,16 +87,24 @@ describe('parsers', () => { }); describe('csv', () => { it('should work', () => { - return runParser(FIXTURES.GOOD_CSV, createCSVParser()).then(docs => { + return runParser( + FIXTURES.GOOD_CSV, + createParser({ fileType: 'csv' }) + ).then((docs) => { expect(docs).to.have.length(3); }); }); - describe('errors', () => { + /** + * TODO: lucas: Revisit and unskip if we really want csv to be strict. + */ + describe.skip('errors', () => { let parseError; - before(done => { - const src = FIXTURES.BAD_CSV; - const p = runParser(src, createCSVParser()); - p.catch(err => (parseError = err)); + before((done) => { + const p = runParser( + FIXTURES.BAD_CSV, + createParser({ fileType: 'csv', delimiter: '\n' }) + ); + p.catch((err) => (parseError = err)); expect(p).to.be.rejected.and.notify(done); }); diff --git a/src/utils/import-preview.js b/src/utils/import-preview.js new file mode 100644 index 0000000..8bb26ed --- /dev/null +++ b/src/utils/import-preview.js @@ -0,0 +1,85 @@ +import { Writable } from 'stream'; +import peek from 'peek-stream'; +import createParser from './import-parser'; +import dotnotation from './dotnotation'; + +import { detectType } from './bson-csv'; +import { createLogger } from './logger'; +const debug = createLogger('import-preview'); + +/** + * Peek the first 20k of a file and parse it. + * + * @param {String} fileType csv|json + * @param {String} delimiter + * @param {Boolean} fileIsMultilineJSON + * @returns {stream.Transform} + */ +export const createPeekStream = function( + fileType, + delimiter, + fileIsMultilineJSON +) { + return peek({ maxBuffer: 20 * 1024 }, function(data, swap) { + return swap( + null, + createParser({ + fileType: fileType, + delimiter: delimiter, + fileIsMultilineJSON: fileIsMultilineJSON + }) + ); + }); +}; + +/** + * Collects 10 parsed documents from createPeekStream(). + * + * @option {Number} MAX_SIZE The number of documents/rows we want to preview [Default `10`] + * @returns {stream.Writable} + */ +export default function({ MAX_SIZE = 10 } = {}) { + return new Writable({ + objectMode: true, + write: function(doc, encoding, next) { + if (!this.docs) { + this.docs = []; + this.fields = []; + this.values = []; + } + + if (this.docs.length >= MAX_SIZE) { + return next(); + } + this.docs.push(doc); + + const docAsDotnotation = dotnotation.serialize(doc); + + if (this.fields.length === 0) { + // eslint-disable-next-line prefer-const + for (let [key, value] of Object.entries(docAsDotnotation)) { + // TODO: lucas: Document this weird bug I found with my apple health data. + // eslint-disable-next-line no-control-regex + key = key.replace(/[^\x00-\x7F]/g, ''); + this.fields.push({ + path: key, + checked: true, + type: detectType(value) + }); + } + debug('set fields', this.fields, { from: doc }); + } + + const keys = Object.keys(docAsDotnotation); + if (keys.length !== this.fields.length) { + debug('invariant detected!', { + expected: this.fields.map((f) => f.path), + got: keys + }); + } + this.values.push(Object.values(docAsDotnotation)); + + return next(null); + } + }); +} diff --git a/src/utils/import-preview.spec.js b/src/utils/import-preview.spec.js new file mode 100644 index 0000000..cef9f44 --- /dev/null +++ b/src/utils/import-preview.spec.js @@ -0,0 +1,66 @@ +import createPreviewWritable, { createPeekStream } from './import-preview'; +import { Readable, pipeline } from 'stream'; + +import fs from 'fs'; +import path from 'path'; + +const TEST_DIR = path.join(__dirname, '..', '..', '..', 'test'); +const FIXTURES = { + GOOD_CSV: path.join(TEST_DIR, 'good.csv'), + BAD_CSV: path.join(TEST_DIR, 'mongoimport', 'test_bad.csv'), + JS_I_THINK_IS_JSON: path.join(TEST_DIR, 'js-i-think-is.json'), + GOOD_JSON: path.join(TEST_DIR, 'docs.json'), + LINE_DELIMITED_JSON: path.join(TEST_DIR, 'docs.jsonl'), + LINE_DELIMITED_JSON_EXTRA_LINE: path.join( + TEST_DIR, + 'docs-with-newline-ending.jsonl' + ) +}; + +describe.skip('import-preview', () => { + describe('createPreviewWritable', () => { + it('should work with docs < MAX_SIZE', (done) => { + const dest = createPreviewWritable(); + const source = Readable.from([{ _id: 1 }]); + pipeline(source, dest, function(err) { + if (err) return done(err); + + expect(dest.docs.length).to.equal(1); + done(); + }); + }); + + it('should work with docs === MAX_SIZE', (done) => { + const dest = createPreviewWritable({ MAX_SIZE: 2 }); + const source = Readable.from([{ _id: 1 }, { _id: 2 }]); + pipeline(source, dest, function(err) { + if (err) return done(err); + + expect(dest.docs.length).to.equal(2); + done(); + }); + }); + + it('should stop when it has enough docs', (done) => { + const dest = createPreviewWritable({ MAX_SIZE: 2 }); + const source = Readable.from([{ _id: 1 }, { _id: 2 }, { _id: 3 }]); + pipeline(source, dest, function(err) { + if (err) return done(err); + + expect(dest.docs.length).to.equal(2); + done(); + }); + }); + }); + describe('func', () => { + it('should return 2 docs for a csv containing 3 docs', (done) => { + const src = fs.createReadStream(FIXTURES.GOOD_CSV); + const dest = createPreviewWritable({ MAX_SIZE: 2 }); + + pipeline(src, createPeekStream('csv'), dest, function(peeker) { + expect(dest.docs.length).to.equal(2); + done(); + }); + }); + }); +}); diff --git a/src/utils/import-size-guesstimator.spec.js b/src/utils/import-size-guesstimator.spec.js index 232d648..e7ea36e 100644 --- a/src/utils/import-size-guesstimator.spec.js +++ b/src/utils/import-size-guesstimator.spec.js @@ -10,12 +10,14 @@ // import-size-guesstimator.js?6e25:46 bytesPerDoc 458.752 // import-size-guesstimator.js?6e25:47 docs seen 1000 // import-size-guesstimator.js?6e25:48 est docs 202250.81743512835 -import { createCSVParser } from './parsers'; +import { createCSVParser } from './import-parser'; import createImportSizeGuesstimator from './import-size-guesstimator'; import { pipeline } from 'stream'; -// TODO: lucas: This works functionally in electron but can't -// figure out how/why in mocha-webpack. +/** + * TODO: lucas: This works functionally in electron but can't + * figure out how/why mocha-webpack until we get electron@6 + */ describe.skip('guesstimator', () => { it('should guess', function(done) { this.timeout(5000); diff --git a/src/utils/remove-empty-fields.js b/src/utils/remove-blanks.js similarity index 75% rename from src/utils/remove-empty-fields.js rename to src/utils/remove-blanks.js index bd0485b..84166ef 100644 --- a/src/utils/remove-empty-fields.js +++ b/src/utils/remove-blanks.js @@ -6,9 +6,9 @@ import { Transform, PassThrough } from 'stream'; * @returns {Object} * @param {Object} data */ -function removeEmptyFields(data) { +function removeBlanks(data) { if (Array.isArray(data)) { - return data.map(removeEmptyFields); + return data.map(removeBlanks); } else if (typeof data !== 'object' || data === null || data === undefined) { return data; } @@ -21,21 +21,21 @@ function removeEmptyFields(data) { if (typeof data[key] === 'string' && data[key] === '') { return doc; } - doc[key] = removeEmptyFields(data[key]); + doc[key] = removeBlanks(data[key]); return doc; }, {}); } -export function removeEmptyFieldsStream(ignoreEmptyFields) { +export function removeBlanksStream(ignoreEmptyFields) { if (!ignoreEmptyFields) { return new PassThrough(); } return new Transform({ objectMode: true, transform: function(doc, encoding, cb) { - cb(null, removeEmptyFields(doc)); + cb(null, removeBlanks(doc)); } }); } -export default removeEmptyFields; +export default removeBlanks; diff --git a/src/utils/remove-empty-fields.spec.js b/src/utils/remove-blanks.spec.js similarity index 71% rename from src/utils/remove-empty-fields.spec.js rename to src/utils/remove-blanks.spec.js index 7bb5010..146d986 100644 --- a/src/utils/remove-empty-fields.spec.js +++ b/src/utils/remove-blanks.spec.js @@ -1,12 +1,12 @@ -import removeEmptyFields from './remove-empty-fields'; +import removeBlanks from './remove-blanks'; -describe('remove-empty-fields', () => { +describe('remove-blanks', () => { it('should remove empty strings', () => { const source = { _id: 1, empty: '' }; - const result = removeEmptyFields(source); + const result = removeBlanks(source); expect(result).to.deep.equal({ _id: 1 }); }); @@ -18,7 +18,7 @@ describe('remove-empty-fields', () => { falsed: false, undef: undefined }; - const result = removeEmptyFields(source); + const result = removeBlanks(source); expect(result).to.deep.equal({ _id: 1, nulled: null, diff --git a/src/utils/reveal-file.js b/src/utils/reveal-file.js index 8aa4bfd..926ba15 100644 --- a/src/utils/reveal-file.js +++ b/src/utils/reveal-file.js @@ -1,3 +1,9 @@ +/** + * A helper function for opening the file explorer UI + * to a highlighted path of `fileName` (e.g. "Show in Finder" on macOS) + * using the builtin electron API. + * @param {String} fileName + **/ export default function revealFile(fileName) { const { shell } = require('electron'); shell.showItemInFolder(fileName); diff --git a/src/utils/styler.js b/src/utils/styler.js index a64503e..63130cc 100644 --- a/src/utils/styler.js +++ b/src/utils/styler.js @@ -26,7 +26,8 @@ * ``` */ export default function styler(styles, prefix) { - return function get_style_for_component(what='') { + // eslint-disable-next-line camelcase + return function get_style_for_component(what = '') { const k = `${prefix}${what !== '' ? '-' + what : ''}`; const def = styles[k]; if (!def) { diff --git a/test/bad.csv b/test/bad.csv deleted file mode 100644 index ce8b0c0..0000000 --- a/test/bad.csv +++ /dev/null @@ -1,2 +0,0 @@ -"I am", -A bad {csv}, \t, fo'o