11'use strict' ;
22
3- import { isLocalId } from 'pouchdb-adapter-utils' ;
43import { createError , IDB_ERROR } from 'pouchdb-errors' ;
54import { collectConflicts } from 'pouchdb-merge' ;
65
@@ -27,33 +26,18 @@ function allDocsKeys(keys, docStore, allDocsInner) {
2726 } ) ;
2827}
2928
30- function createKeyRange ( start , end , inclusiveEnd , key , descending ) {
29+ function createKeyRange ( start , end , inclusiveStart , inclusiveEnd , key , descending ) {
3130 try {
32- if ( start && end ) {
33- if ( descending ) {
34- return IDBKeyRange . bound ( end , start , ! inclusiveEnd , false ) ;
35- } else {
36- return IDBKeyRange . bound ( start , end , false , ! inclusiveEnd ) ;
37- }
38- } else if ( start ) {
39- if ( descending ) {
40- return IDBKeyRange . upperBound ( start ) ;
41- } else {
42- return IDBKeyRange . lowerBound ( start ) ;
43- }
44- } else if ( end ) {
45- if ( descending ) {
46- return IDBKeyRange . lowerBound ( end , ! inclusiveEnd ) ;
47- } else {
48- return IDBKeyRange . upperBound ( end , ! inclusiveEnd ) ;
49- }
50- } else if ( key ) {
51- return IDBKeyRange . only ( key ) ;
31+ if ( key ) {
32+ return IDBKeyRange . only ( [ 0 , key ] ) ;
33+ } else if ( descending ) {
34+ return IDBKeyRange . bound ( end , start , ! inclusiveEnd , ! inclusiveStart ) ;
35+ } else {
36+ return IDBKeyRange . bound ( start , end , ! inclusiveStart , ! inclusiveEnd ) ;
5237 }
5338 } catch ( e ) {
5439 return { error : e } ;
5540 }
56- return null ;
5741}
5842
5943function handleKeyRangeError ( opts , metadata , err , callback ) {
@@ -96,31 +80,46 @@ export default function (txn, metadata, opts, callback) {
9680 var results = [ ] ;
9781 var processing = [ ] ;
9882
99- var start = 'startkey' in opts ? opts . startkey : false ;
100- var end = 'endkey' in opts ? opts . endkey : false ;
10183 var key = 'key' in opts ? opts . key : false ;
10284 var keys = 'keys' in opts ? opts . keys : false ;
10385 var skip = opts . skip || 0 ;
104- var limit = typeof opts . limit === 'number' ? opts . limit : - 1 ;
86+ var limit = typeof opts . limit === 'number' ? opts . limit : undefined ;
10587 var inclusiveEnd = opts . inclusive_end !== false ;
10688 var descending = 'descending' in opts && opts . descending ? 'prev' : null ;
107-
108- var keyRange ;
109- if ( ! keys ) {
110- keyRange = createKeyRange ( start , end , inclusiveEnd , key , descending ) ;
111- if ( keyRange && keyRange . error ) {
112- return handleKeyRangeError ( opts , metadata , keyRange . error , callback ) ;
113- }
114- }
89+ var start = 'startkey' in opts ? opts . startkey : ( descending ? '\uffff' : '' ) ;
90+ var end = 'endkey' in opts ? opts . endkey : ( descending ? '' : '\uffff' ) ;
11591
11692 var docStore = txn . txn . objectStore ( DOC_STORE ) ;
11793
118- txn . txn . oncomplete = onTxnComplete ;
119-
12094 if ( keys ) {
121- return allDocsKeys ( opts . keys , docStore , allDocsInner ) ;
95+ txn . txn . oncomplete = onTxnComplete ;
96+ const allDocsInner = doc => {
97+ if ( doc . error ) {
98+ return results . push ( doc ) ;
99+ }
100+
101+ const row = { id :doc . id , key :doc . id , value :{ rev :doc . rev } } ;
102+
103+ if ( doc . deleted ) {
104+ row . value . deleted = true ;
105+ row . doc = null ;
106+ } else if ( opts . include_docs ) {
107+ include_doc ( row , doc ) ;
108+ }
109+
110+ results . push ( row ) ;
111+ } ;
112+ return allDocsKeys ( keys , docStore , allDocsInner ) ;
113+ }
114+
115+ let keyRange = createKeyRange ( [ 0 , start ] , [ 0 , end ] , true , inclusiveEnd , key , descending ) ;
116+ if ( keyRange . error ) {
117+ return handleKeyRangeError ( opts , metadata , keyRange . error , callback ) ;
122118 }
123119
120+ // txn.oncomplete must be set AFTER key-range-error is generated
121+ txn . txn . oncomplete = onTxnComplete ;
122+
124123 function include_doc ( row , doc ) {
125124 var docData = doc . revs [ doc . rev ] . data ;
126125
@@ -141,79 +140,120 @@ export default function (txn, metadata, opts, callback) {
141140 }
142141 }
143142
144- function allDocsInner ( doc ) {
145- if ( doc . error && keys ) {
146- // key was not found with "keys" requests
147- results . push ( doc ) ;
148- return true ;
143+ function onTxnComplete ( ) {
144+ const returnVal = {
145+ total_rows : metadata . doc_count ,
146+ offset : 0 ,
147+ rows : results
148+ } ;
149+ /* istanbul ignore if */
150+ if ( opts . update_seq ) {
151+ returnVal . update_seq = metadata . seq ;
149152 }
150153
151- var row = {
152- id : doc . id ,
153- key : doc . id ,
154- value : {
155- rev : doc . rev
156- }
157- } ;
154+ if ( processing . length ) {
155+ Promise . all ( processing ) . then ( function ( ) {
156+ callback ( null , returnVal ) ;
157+ } ) ;
158+ } else {
159+ callback ( null , returnVal ) ;
160+ }
161+ }
158162
159- var deleted = doc . deleted ;
160- if ( deleted ) {
161- if ( keys ) {
162- results . push ( row ) ;
163- row . value . deleted = true ;
164- row . doc = null ;
165- }
166- } else if ( skip -- <= 0 ) {
167- results . push ( row ) ;
168- if ( opts . include_docs ) {
169- include_doc ( row , doc ) ;
163+ const dbIndex = docStore . index ( 'deleted,id' ) ;
164+
165+ if ( ! skip && ! limit ) {
166+ fetchResults ( ) ;
167+ } else {
168+ let firstKey ;
169+ let limitKey = limit > 0 ;
170+
171+ dbIndex . openKeyCursor ( keyRange , descending || 'next' ) . onsuccess = ( e ) => {
172+ const cursor = e . target . result ;
173+
174+ if ( skip ) {
175+ if ( ! cursor ) { return txn . txn . commit ( ) ; }
176+ cursor . advance ( skip ) ;
177+ skip = 0 ;
178+ return ;
170179 }
171- if ( -- limit === 0 ) {
172- return false ;
180+
181+ if ( firstKey === undefined ) {
182+ firstKey = cursor && cursor . key ;
183+ if ( ! firstKey ) { return txn . txn . commit ( ) ; }
173184 }
174- }
175- return true ;
176- }
177185
178- function onTxnComplete ( ) {
179- Promise . all ( processing ) . then ( function ( ) {
180- var returnVal = {
181- total_rows : metadata . doc_count ,
182- offset : 0 ,
183- rows : results
184- } ;
185-
186- /* istanbul ignore if */
187- if ( opts . update_seq ) {
188- returnVal . update_seq = metadata . seq ;
186+ if ( limit ) {
187+ if ( limit > 1 && cursor ) {
188+ cursor . advance ( limit - 1 ) ;
189+ limit = undefined ;
190+ return ;
191+ }
192+ limit = undefined ;
189193 }
190- callback ( null , returnVal ) ;
191- } ) ;
192- }
193194
194- var cursor = descending ?
195- docStore . openCursor ( keyRange , descending ) :
196- docStore . openCursor ( keyRange ) ;
197195
198- cursor . onsuccess = function ( e ) {
196+ if ( limitKey ) {
197+ limitKey = cursor && cursor . key ;
198+ }
199+ if ( ! limitKey ) {
200+ limitKey = descending ? keyRange . lower : keyRange . upper ;
201+ }
202+
203+ keyRange = createKeyRange ( firstKey , limitKey , true , inclusiveEnd , key , descending ) ;
204+ if ( keyRange . error ) {
205+ txn . txn . abort ( ) ;
206+ return handleKeyRangeError ( opts , metadata , keyRange . error , callback ) ;
207+ }
199208
200- var doc = e . target . result && e . target . result . value ;
209+ fetchResults ( ) ;
210+ } ;
211+ }
201212
202- // Happens if opts does not have limit,
203- // because cursor will end normally then,
204- // when all docs are retrieved.
205- // Would not be needed, if getAll() optimization was used like in #6059
206- if ( ! doc ) { return ; }
213+ async function fetchResults ( ) {
214+ // There is a risk here with getting all results into memory - if they have multiple
215+ // revs, then we risk loading loads of extra data which is then discarded. This is
216+ // reduced by batching. This also loads unused data when include_docs is false.
217+ //
218+ // Current batch size is quite arbitrary, but seems like (1) more than a typical
219+ // result size, and (2) not so big it's likely to cause issues.
220+ const batchSize = 100 ;
207221
208- // Skip local docs
209- if ( isLocalId ( doc . id ) ) {
210- return e . target . result . continue ( ) ;
222+ let kr = keyRange ;
223+ do {
224+ kr = await fetchNextBatch ( kr ) ;
225+ } while ( kr ) ;
226+ if ( descending ) {
227+ results . reverse ( ) ;
211228 }
229+ return txn . txn . commit ( ) ;
212230
213- var continueCursor = allDocsInner ( doc ) ;
214- if ( continueCursor ) {
215- e . target . result . continue ( ) ;
216- }
217- } ;
231+ function fetchNextBatch ( kr ) {
232+ return new Promise ( ( resolve ) => {
233+ dbIndex . getAll ( kr , batchSize ) . onsuccess = ( e ) => {
234+ const batch = e . target . result ;
235+ for ( let i = 0 ; i < batch . length ; ++ i ) {
236+ const doc = batch [ i ] ;
237+ const row = { id :doc . id , key :doc . id , value :{ rev :doc . rev } } ;
238+ if ( opts . include_docs ) {
239+ include_doc ( row , doc ) ;
240+ }
241+ results . push ( row ) ;
242+ }
218243
244+ if ( batch . length >= batchSize ) {
245+ const lastSeenKey = [ 0 , batch [ batch . length - 1 ] . id ] ;
246+ const startKey = descending ? kr . upper : lastSeenKey ;
247+ const endKey = descending ? lastSeenKey : kr . upper ;
248+ if ( startKey [ 1 ] !== endKey [ 1 ] ) {
249+ const incEnd = descending ? false : inclusiveEnd ;
250+ const incStart = descending ? true : false ;
251+ return resolve ( createKeyRange ( startKey , endKey , incStart , incEnd , key , descending ) ) ;
252+ }
253+ }
254+ return resolve ( ) ;
255+ } ;
256+ } ) ;
257+ }
258+ }
219259}
0 commit comments