Skip to content

Commit

Permalink
perf(core): increased throughput (mia-platform#145)
Browse files Browse the repository at this point in the history
* perf(core): increased throughput

* docs(CHANGELOG): update
  • Loading branch information
hiimjako committed Jul 24, 2023
1 parent c0c4b03 commit 98a7c04
Show file tree
Hide file tree
Showing 17 changed files with 96 additions and 91 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
### Fixed

- [#138](https://github.com/mia-platform/crud-service/pull/138) patch import route validate the presence for the `_id` field
- [#145](https://github.com/mia-platform/crud-service/pull/145) increased get response performances

## 6.8.0 - 2023-07-11

Expand Down
4 changes: 2 additions & 2 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ const ajvKeywords = require('ajv-keywords')

const { readdirSync } = require('fs')
const { join } = require('path')
const { omit } = require('ramda')
const lomit = require('lodash.omit')
const lunset = require('lodash.unset')

const myPackage = require('./package')
Expand Down Expand Up @@ -255,7 +255,7 @@ function createLookupModel(fastify, viewDefinition, mergedCollections) {
const lookupProjection = pipeline.find(({ $project }) => $project)?.$project ?? {}
const parsedLookupProjection = []
const lookupCollectionDefinition = {
...omit(['fields'], viewDefinition),
...lomit(viewDefinition, ['fields']),
schema: {
type: 'object',
properties: {},
Expand Down
64 changes: 28 additions & 36 deletions lib/AdditionalCaster.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,68 +26,60 @@ const { getPathFromPointer } = require('./JSONPath.utils')
class AdditionalCaster {
constructor(collectionDefinition) {
this._collectionSchema = collectionDefinition.schema ?? fieldsToSchema(collectionDefinition.fields)
}

castResultsAsStream() {
return through2.obj((chunk, _, callback) => {
const castedChunk = this.castItem(chunk)
callback(null, castedChunk)
})
}

castItem(item) {
let response = { ...item, _id: item._id?.toString() }
const pathToCoordinates = JSONPath({
json: response,
path: '$..[?(@property === "coordinates")]',
this._pathsToObjectIds = JSONPath({
json: this._collectionSchema.properties,
resultType: 'pointer',
path: '$..[?(@.type === "ObjectId")]',
})
.map(getPathFromPointer)

const pathToDates = JSONPath({
json: response,
path: '$..[?(Object.prototype.toString.call(@) === "[object Date]")]',
this._pathsToGeoPoint = JSONPath({
json: this._collectionSchema.properties,
path: '$..[?(@.type === "GeoPoint")]',
resultType: 'pointer',
})
.map(getPathFromPointer)

const pathsToObjectIds = JSONPath({
this._pathToCoordinates = this._pathsToGeoPoint.map(path => `${path}.coordinates`)

this._pathToDates = JSONPath({
json: this._collectionSchema.properties,
path: '$..[?(@.format === "date-time")]',
resultType: 'pointer',
path: '$..[?(@.type === "ObjectId")]',
})
.map(getPathFromPointer)
}

const pathsToGeoPoint = JSONPath({
json: this._collectionSchema.properties,
path: '$..[?(@.type === "GeoPoint")]',
resultType: 'pointer',
castResultsAsStream() {
return through2.obj((chunk, _, callback) => {
const castedChunk = this.castItem(chunk)
callback(null, castedChunk)
})
.map(getPathFromPointer)
}

const geoFieldRegexs = pathsToGeoPoint.map(path => new RegExp(`${path.split('.').join('\\.(\\d+\\.)?')}\\.(\\d+\\.)?coordinates`))
castItem(item) {
let response = { ...item, _id: item._id?.toString() }

for (const path of pathToCoordinates) {
if (geoFieldRegexs.some(geoFieldRegex => geoFieldRegex.test(path))) {
const pathLevels = path
.split('.')
const oneLevelUpPath = pathLevels
.slice(0, pathLevels.length - 1)
.join('.')
response = lset(response, oneLevelUpPath, lget(response, path))
for (const path of this._pathsToGeoPoint) {
const coordinatesPath = `${path}.coordinates`
const value = lget(response, coordinatesPath)
if (value) {
response = lset(response, path, value)
}
}

for (const path of pathsToObjectIds) {
for (const path of this._pathsToObjectIds) {
const value = lget(response, path)
if (value) {
response = lset(response, path, value.toString())
}
}

for (const path of pathToDates) {
for (const path of this._pathToDates) {
const value = lget(response, path)
if (value) {
// We want also support $dateToString mongo operator
// So we are not always sure that value is a Date
if (value instanceof Date) {
response = lset(response, path, value.toISOString())
}
}
Expand Down
26 changes: 14 additions & 12 deletions lib/JSONSchemaGenerator.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

'use strict'

const { omit } = require('ramda')
const lomit = require('lodash.omit')

const {
ARRAY,
Expand Down Expand Up @@ -65,7 +65,6 @@ const {
specialTypesValidationCompatibility,
SCHEMAS_ID,
} = require('./schemaGetters')
const Ramda = require('ramda')

module.exports = class JSONSchemaGenerator {
constructor(
Expand Down Expand Up @@ -387,7 +386,7 @@ module.exports = class JSONSchemaGenerator {
...schemaDetail.querystring,
type: 'object',
properties: {
...omit(unsupportedQueryParams, this._propertiesGetListValidation),
...lomit(this._propertiesGetListValidation, unsupportedQueryParams),
...getQueryStringFromRawSchema(this._pathFieldsRawSchema.paths),
},
...Object.keys(patternProperties).length > 0 ? { patternProperties } : {},
Expand Down Expand Up @@ -1698,14 +1697,17 @@ function formatEndpointTag(endpointBasePath) {
.replace(/-/g, ' ')
}

function removeFileTypeFromSchema(schema) {
const schemaWithoutFile = Ramda.evolve({
anyOf: Ramda.map(
Ramda.evolve({
properties:
Ramda.omit(['file']),
}),
),
}, schema)
function removeFileTypeFromSchema(bodySchema) {
const schemaWithoutFile = {
...bodySchema,
anyOf: bodySchema.anyOf.map((anyOfschema) => {
anyOfschema.properties = Object.fromEntries(
Object.entries(anyOfschema.properties).filter(([key]) => {
return key !== 'file'
})
)
return anyOfschema
}),
}
return schemaWithoutFile
}
4 changes: 2 additions & 2 deletions lib/generatePathFromJsonSchema.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

'use strict'

const { omit } = require('ramda')
const lomit = require('lodash.omit')

const {
JSON_SCHEMA_OBJECT_TYPE,
Expand Down Expand Up @@ -251,7 +251,7 @@ function generateOperators(paths, { escapeDotChar } = {}) {
}
if (paths[path].items.type === JSON_SCHEMA_OBJECT_TYPE) {
const mergePath = joinWithDot({ escapeDotChar }, path, DOLLAR, ARRAY_MERGE_ELEMENT_OPERATOR)
newPaths[mergePath + END_PATTERN] = omit(MERGE_FIELDS_TO_OMIT, paths[path].items)
newPaths[mergePath + END_PATTERN] = lomit(paths[path].items, MERGE_FIELDS_TO_OMIT)
}
// TODO: call generate function in order to support dot notation in $.replace and $.merge
// e.g. foo.bar.$.replace: {'lorem.ipsum': 2}
Expand Down
4 changes: 2 additions & 2 deletions lib/httpInterface.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ const JSONStream = require('JSONStream')
const through2 = require('through2')
const fastJson = require('fast-json-stringify')

const { isEmpty } = require('ramda')
const lisEmpty = require('lodash.isempty')


const {
Expand Down Expand Up @@ -1077,7 +1077,7 @@ function resolveRawProjectionString(rawProjection, _acls, allFieldNames, log) {
const rawProjectionObject = resolveRawProjection(rawProjection)
const projection = removeAclColumnsFromRawProjection(rawProjectionObject, _acls)

return !isEmpty(projection) ? [projection] : []
return !lisEmpty(projection) ? [projection] : []
} catch (errorMessage) {
log.error(errorMessage.message)
throw new BadRequestError(errorMessage.message)
Expand Down
28 changes: 18 additions & 10 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,14 @@
"jsonpath-plus": "^7.2.0",
"JSONStream": "^1.3.5",
"lodash.get": "^4.4.2",
"lodash.isempty": "^4.4.0",
"lodash.omit": "^4.5.0",
"lodash.pick": "^4.4.0",
"lodash.set": "^4.3.2",
"lodash.unset": "^4.5.2",
"mongodb-client-encryption": "^2.9.0",
"ndjson": "^2.0.0",
"pino": "^8.14.1",
"ramda": "^0.29.0",
"through2": "^4.0.2",
"uuid": "^9.0.0"
},
Expand Down
4 changes: 2 additions & 2 deletions tests/aggregate.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

const tap = require('tap')
const abstractLogger = require('abstract-logging')
const { omit } = require('ramda')
const lomit = require('lodash.omit')

const { STATES } = require('../lib/consts')
const CrudService = require('../lib/CrudService')
Expand Down Expand Up @@ -365,7 +365,7 @@ tap.test('aggregate', async t => {

const expectedDocsWithExcludedFields = fixtures.map((doc) => {
const docCopy = { ...doc }
return omit(['attachments', 'isbn', 'price'], docCopy)
return lomit(docCopy, ['attachments', 'isbn', 'price'])
})

t.test('should return only the right fields', async t => {
Expand Down
4 changes: 2 additions & 2 deletions tests/findAll.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
const tap = require('tap')
const abstractLogger = require('abstract-logging')
const { MongoClient } = require('mongodb')
const { omit } = require('ramda')
const lomit = require('lodash.omit')

const { STATES } = require('../lib/consts')
const CrudService = require('../lib/CrudService')
Expand Down Expand Up @@ -381,7 +381,7 @@ tap.test('findAll', async t => {

const expectedDocsWithExcludedFields = fixtures.map((doc) => {
const docCopy = { ...doc }
return omit(['attachments', 'isbn', 'price'], docCopy)
return lomit(docCopy, ['attachments', 'isbn', 'price'])
})

t.test('should return only the right fields', async t => {
Expand Down
4 changes: 2 additions & 2 deletions tests/generatePathFieldsForRawSchema.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

const tap = require('tap')
const pino = require('pino')
const R = require('ramda')
const lpick = require('lodash.pick')

const generatePathFieldsForRawSchema = require('../lib/generatePathFieldsForRawSchema')

Expand Down Expand Up @@ -142,7 +142,7 @@ tap.test('generatePathFieldsForRawSchema', t => {
field: 'the-invalid-field',
msg: errorMsg,
}
t.strictSame(R.pick(['collectionName', 'field', 'msg'], errorLogs[0]), EXPECTED_LOG)
t.strictSame(lpick(errorLogs[0], ['collectionName', 'field', 'msg']), EXPECTED_LOG)

t.end()
})
Expand Down
4 changes: 2 additions & 2 deletions tests/httpInterface.patchById.special.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

const tap = require('tap')
const { ObjectId } = require('mongodb')
const R = require('ramda')
const lomit = require('lodash.omit')
const { STANDARD_FIELDS } = require('../lib/CrudService')

const {
Expand Down Expand Up @@ -427,7 +427,7 @@ tap.test('HTTP PATCH /<id> - nested object', async t => {
})

t.equal(response.statusCode, 200)
t.strictSame(R.omit([UPDATEDAT, CREATEDAT, UPDATERID], JSON.parse(response.payload)), {
t.strictSame(lomit(JSON.parse(response.payload), [UPDATEDAT, CREATEDAT, UPDATERID]), {
_id: DOC_TEST._id.toHexString(),
name: DOC_TEST.name,
environments: [{
Expand Down
4 changes: 2 additions & 2 deletions tests/httpInterface.patchImport.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ const { expectedBooks, bookToUpdate } = require('./filesFixtures/expectedResults
const { setUpTest, prefix } = require('./httpInterface.utils')
const { newUpdaterId } = require('./utils')
const FormData = require('form-data')
const { omit } = require('ramda')
const lomit = require('lodash.omit')
const { CREATORID, UPDATERID, CREATEDAT, UPDATEDAT } = require('../lib/consts')

tap.test('HTTP PATCH /import', async t => {
Expand Down Expand Up @@ -206,7 +206,7 @@ tap.test('HTTP PATCH /import', async t => {
t.strictSame(body, { message: 'File uploaded successfully' })

const document = await collection.findOne({ _id: bookToUpdate._id })
t.strictSame(omit([CREATORID, UPDATERID, CREATEDAT, UPDATEDAT], document), bookToUpdate)
t.strictSame(lomit(document, [CREATORID, UPDATERID, CREATEDAT, UPDATEDAT]), bookToUpdate)
t.end()
})

Expand Down

0 comments on commit 98a7c04

Please sign in to comment.