Skip to content

Commit

Permalink
Merge pull request #59 from fergiemcdowall/tinacms-upstream_pr
Browse files Browse the repository at this point in the history
Tinacms upstream pr
  • Loading branch information
fergiemcdowall committed Jun 16, 2023
2 parents 3cc65b2 + b73fee1 commit d1054c7
Show file tree
Hide file tree
Showing 18 changed files with 1,321 additions and 886 deletions.
10 changes: 6 additions & 4 deletions .github/workflows/tests.yml → .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,14 @@ on:
jobs:
run-tests:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [14, 16, 18, 20]
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: '14'
node-version: ${{ matrix.node-version }}
- run: npm install
- run: sudo apt-get install xvfb
- run: xvfb-run --auto-servernum npm test

2,042 changes: 1,228 additions & 814 deletions package-lock.json

Large diffs are not rendered by default.

19 changes: 13 additions & 6 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,30 +1,34 @@
{
"name": "fergies-inverted-index",
"version": "11.0.0",
"version": "12.0.0",
"description": "An inverted index that allows javascript objects to be easily serialised and retrieved using promises and map-reduce",
"browser": "src/browser.js",
"main": "src/node.js",
"dependencies": {
"browser-level": "^1.0.1",
"charwise": "^3.0.1",
"classic-level": "^1.2.0",
"memory-level": "^1.0.0",
"level-read-stream": "^1.1.0",
"traverse": "^0.6.6"
},
"files": [
"src"
],
"devDependencies": {
"assert": "^2.0.0",
"buffer": "^6.0.3",
"classic-level": "^1.3.0",
"diacritic": "^0.0.2",
"level-out": "^1.0.1",
"memory-level": "^1.0.0",
"os-browserify": "^0.3.0",
"path-browserify": "^1.0.1",
"process": "^0.11.10",
"standard": "^16.0.4",
"stemmer": "^2.0.0",
"stopword": "^1.0.7",
"stream-browserify": "^3.0.0",
"tape": "^5.3.1",
"tape-run": "^9.0.0",
"tape": "^5.6.3",
"tape-run": "^10.0.0",
"webpack": "^5.62.2",
"webpack-cli": "^4.9.1",
"world-bank-dataset": "^1.0.0"
Expand All @@ -47,5 +51,8 @@
"bugs": {
"url": "https://github.com/fergiemcdowall/fergies-inverted-index/issues"
},
"homepage": "https://github.com/fergiemcdowall/fergies-inverted-index"
"homepage": "https://github.com/fergiemcdowall/fergies-inverted-index",
"resolutions": {
"minimist": "^1.2.8"
}
}
13 changes: 4 additions & 9 deletions src/browser.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,7 @@
const fii = require('./main.js')
const { BrowserLevel } = require('browser-level')

module.exports = ops =>
fii(
Object.assign(
{
db: BrowserLevel
},
ops
)
)
module.exports = ({ name = 'fii', ...ops }) => fii({
db: new BrowserLevel(name),
...ops
})
11 changes: 3 additions & 8 deletions src/main.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const charwise = require('charwise')
// const level = require('level')
const read = require('./read.js')
const write = require('./write.js')
const levelOptions = require('./options.js')

// _match is nested by default so that AND and OR work correctly under
// the bonnet. Flatten array before presenting to consumer
Expand Down Expand Up @@ -29,7 +29,6 @@ const initStore = (ops = {}) =>
new Promise((resolve, reject) => {
ops = Object.assign(
{
name: 'fii',
// TODO: is tokenAppens still needed?
// tokenAppend can be used to create 'comment' spaces in
// tokens. For example using '#' allows tokens like boom#1.00 to
Expand All @@ -46,12 +45,7 @@ const initStore = (ops = {}) =>
},
ops
)

const DB = ops.db
const db = new DB(ops.name, {
keyEncoding: charwise,
valueEncoding: 'json'
})
const db = ops.db
db.open(err =>
err ? reject(err) : resolve(Object.assign(ops, { _db: db }))
)
Expand All @@ -78,6 +72,7 @@ const makeAFii = ops => {
r.GET(tokens, pipeline).then(flattenMatchArrayInResults),
IMPORT: w.IMPORT,
LAST_UPDATED: r.LAST_UPDATED,
LEVEL_OPTIONS: levelOptions,
MAX: r.MAX,
MIN: r.MIN,
NOT: (...keys) =>
Expand Down
15 changes: 5 additions & 10 deletions src/node.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,7 @@
const fii = require('./main.js')
const { ClassicLevel } = require('classic-level')
const { MemoryLevel } = require('memory-level')

module.exports = ops =>
fii(
Object.assign(
{
db: ClassicLevel
},
ops
)
)
module.exports = ({ name = 'fii', ...ops }) => fii({
db: new MemoryLevel(),
...ops
})
6 changes: 6 additions & 0 deletions src/options.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
const charwise = require('charwise')

module.exports = {
keyEncoding: charwise,
valueEncoding: 'json'
}
19 changes: 11 additions & 8 deletions src/read.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
const tokenParser = require('./parseToken.js')
const charwise = require('charwise')
const { EntryStream } = require('level-read-stream')
const levelOptions = require('./options.js')

// polyfill- HI and LO coming in next version of charwise
charwise.LO = null
Expand Down Expand Up @@ -55,7 +56,7 @@ module.exports = ops => {
// If this token is a stopword then return 'undefined'
const removeStopwords = token =>
token.VALUE.GTE === token.VALUE.LTE &&
ops.stopwords.includes(token.VALUE.GTE)
ops.stopwords.includes(token.VALUE.GTE)
? undefined
: token

Expand Down Expand Up @@ -152,7 +153,8 @@ module.exports = ops => {
gte: formatKey(fieldName, token.VALUE.GTE),
lte: formatKey(fieldName, token.VALUE.LTE, true),
limit: token.LIMIT,
reverse: token.REVERSE
reverse: token.REVERSE,
...levelOptions
})
.on('data', token =>
token.value.forEach(docId =>
Expand Down Expand Up @@ -184,21 +186,22 @@ module.exports = ops => {
const fieldNames = []
new EntryStream(ops._db, {
gte: ['FIELD', charwise.LO],
lte: ['FIELD', charwise.HI]
lte: ['FIELD', charwise.HI],
...levelOptions
})
.on('data', d => fieldNames.push(d.value))
.on('end', () => resolve(fieldNames))
})

const CREATED = () => ops._db.get(['~CREATED'])
const CREATED = () => ops._db.get(['~CREATED'], levelOptions)

const LAST_UPDATED = () => ops._db.get(['~LAST_UPDATED'])
const LAST_UPDATED = () => ops._db.get(['~LAST_UPDATED'], levelOptions)

// takes an array of ids and determines if the corresponding
// documents exist in the index.
const EXIST = (...ids) =>
Promise.all(
ids.map(id => ops._db.get([ops.docExistsSpace, id]).catch(e => null))
ids.map(id => ops._db.get([ops.docExistsSpace, id], levelOptions).catch(e => null))
).then(result =>
result.reduce((acc, cur, i) => {
if (cur != null) acc.push(ids[i])
Expand Down Expand Up @@ -247,7 +250,7 @@ module.exports = ops => {

const OBJECT = _ids =>
Promise.all(
_ids.map(id => ops._db.get(['DOC', id._id]).catch(reason => null))
_ids.map(id => ops._db.get(['DOC', id._id], levelOptions).catch(reason => null))
).then(_objects =>
_ids.map((_id, i) => {
_id._object = _objects[i]
Expand All @@ -259,7 +262,7 @@ module.exports = ops => {
const getRange = rangeOps =>
new Promise((resolve, reject) => {
const keys = []
new EntryStream(ops._db, rangeOps)
new EntryStream(ops._db, { ...rangeOps, ...levelOptions })
.on('data', data => {
keys.push(data)
})
Expand Down
12 changes: 7 additions & 5 deletions src/write.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
const trav = require('traverse')
const reader = require('./read.js')
const levelOptions = require('./options.js')

module.exports = ops => {
// TODO: set reset this to the max value every time the DB is restarted
Expand Down Expand Up @@ -61,7 +62,7 @@ module.exports = ops => {
const gracefullGet = key =>
new Promise((resolve, reject) =>
_db
.get(key)
.get(key, levelOptions)
.then(resolve)
.catch(e => resolve([]))
)
Expand Down Expand Up @@ -159,6 +160,7 @@ module.exports = ops => {
mergedReverseIndex
.concat(putOptions.storeVectors ? objectIndex(docs, mode) : [])
.concat(availableFields(mergedReverseIndex)),
levelOptions,
e =>
resolve(
docs.map(doc => {
Expand Down Expand Up @@ -203,7 +205,7 @@ module.exports = ops => {
ops._db
.clear()
.then(() =>
ops._db.batch(index.map(entry => Object.assign(entry, { type: 'put' })))
ops._db.batch(index.map(entry => Object.assign(entry, { type: 'put' })), levelOptions)
)

const PUT = (docs, putOptions = {}) =>
Expand All @@ -219,14 +221,14 @@ module.exports = ops => {
).then(TIMESTAMP_LAST_UPDATED)

const TIMESTAMP_LAST_UPDATED = passThrough =>
ops._db.put(['~LAST_UPDATED'], Date.now()).then(() => passThrough)
ops._db.put(['~LAST_UPDATED'], Date.now(), levelOptions).then(() => passThrough)

const TIMESTAMP_CREATED = () =>
ops._db
.get(['~CREATED'])
.get(['~CREATED'], levelOptions)
.then(/* already created- do nothing */)
.catch(e =>
ops._db.put(['~CREATED'], Date.now()).then(TIMESTAMP_LAST_UPDATED)
ops._db.put(['~CREATED'], Date.now(), levelOptions).then(TIMESTAMP_LAST_UPDATED)
)

return {
Expand Down
16 changes: 12 additions & 4 deletions test/src/CREATED-test.js
Original file line number Diff line number Diff line change
@@ -1,26 +1,34 @@
const fii = require('../../')
const levelOptions = require('../../src/options.js')
const test = require('tape')

const sandbox = 'test/sandbox/'
const indexName = sandbox + 'CREATED'

let timestamp

const opts = {}
if (typeof window === 'undefined') {
const { ClassicLevel } = require('classic-level')
opts.db = new ClassicLevel(indexName)
}

test('create index', t => {
t.plan(1)
fii({ name: indexName }).then(db => {
fii({ name: indexName, ...opts }).then(db => {
global[indexName] = db
t.ok(db, !undefined)
})
})

test('timestamp was created', t => {
t.plan(1)
global[indexName].STORE.get(['~CREATED'])
global[indexName].STORE.get(['~CREATED'], levelOptions)
.then(created => {
timestamp = created
return t.pass('timestamp created')
}).catch(t.error)
})
.catch(t.error)
})

test('can read CREATED timestamp with API', t => {
Expand All @@ -43,7 +51,7 @@ test('confirm index is closed', t => {

test('recreate index', t => {
t.plan(1)
fii({ name: indexName }).then(db => {
fii({ name: indexName, ...opts }).then(db => {
global[indexName] = db
t.ok(db, !undefined)
})
Expand Down
13 changes: 8 additions & 5 deletions test/src/LAST_UPDATED-test.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
const fii = require('../../')
const levelOptions = require('../../src/options.js')
const test = require('tape')

const sandbox = 'test/sandbox/'
Expand All @@ -16,7 +17,7 @@ test('create index', t => {

test('LAST_UPDATED timestamp was created', t => {
t.plan(1)
global[indexName].STORE.get(['~LAST_UPDATED'])
global[indexName].STORE.get(['~LAST_UPDATED'], levelOptions)
.then(created => {
timestamp = created
return t.pass('LAST_UPDATED timestamp created ' + timestamp)
Expand All @@ -30,8 +31,10 @@ test('can read LAST_UPDATED timestamp with API', t => {

test('when adding a new doc, LAST_UPDATE increments', t => {
t.plan(1)
global[indexName].PUT([{
text: 'this is a new doc'.split()
}]).then(global[indexName].LAST_UPDATED)
.then(newTimestamp => t.ok(newTimestamp > timestamp))
setTimeout(function () { // wait to ensure that newer timestamp is bigger
global[indexName].PUT([{
text: 'this is a new doc'.split()
}]).then(global[indexName].LAST_UPDATED)
.then(newTimestamp => t.ok(newTimestamp > timestamp))
}, 100)
})
6 changes: 4 additions & 2 deletions test/src/dont-index-certain-fields-test.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
const fii = require('../../')
const levelOptions = require('../../src/options.js')

const test = require('tape')
const { EntryStream } = require('level-read-stream')

Expand Down Expand Up @@ -90,7 +92,7 @@ test('analyse index', t => {
{ key: ['IDX', 'make', ['Tesla']], value: ['0', '2'] }
]
t.plan(storeState.length)
const r = new EntryStream(global[indexName].STORE, { lt: ['~'] })
const r = new EntryStream(global[indexName].STORE, { lt: ['~'], ...levelOptions })
r.on('data', d => t.deepEqual(d, storeState.shift()))
})

Expand Down Expand Up @@ -178,6 +180,6 @@ test('analyse index', t => {
{ key: ['IDX', 'make', ['Tesla']], value: ['0', '2'] }
]
t.plan(storeState.length)
const r = new EntryStream(global[indexName2].STORE, { lt: ['~'] })
const r = new EntryStream(global[indexName2].STORE, { lt: ['~'], ...levelOptions })
r.on('data', d => t.deepEqual(d, storeState.shift()))
})
7 changes: 5 additions & 2 deletions test/src/indexing-arrays-test.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
const fii = require('../../')
const levelOptions = require('../../src/options.js')
const test = require('tape')
const { EntryStream } = require('level-read-stream')

Expand Down Expand Up @@ -84,7 +85,8 @@ test('fields are indexed correctly when there are nested arrays involved', t =>
t.plan(expected.length)
new EntryStream(global[indexName].STORE, {
gte: ['FIELD', ''],
lte: ['FIELD', '○']
lte: ['FIELD', '○'],
...levelOptions
}).on('data', d => t.deepEqual(d, expected.shift()))
})

Expand Down Expand Up @@ -122,6 +124,7 @@ test('tokens are indexed correctly when there are nested arrays involved', t =>
t.plan(expected.length)
new EntryStream(global[indexName].STORE, {
gte: ['IDX'],
lte: ['IDX', '○']
lte: ['IDX', '○'],
...levelOptions
}).on('data', d => t.deepEqual(d, expected.shift()))
})
Loading

0 comments on commit d1054c7

Please sign in to comment.