Skip to content

Commit

Permalink
Merge branch 'master' into mikelehen/firestore-strict-bind-call-apply
Browse files Browse the repository at this point in the history
  • Loading branch information
Michael Lehenbauer committed Aug 9, 2019
2 parents f746285 + 140d352 commit 90f7de0
Show file tree
Hide file tree
Showing 3 changed files with 90 additions and 3 deletions.
6 changes: 4 additions & 2 deletions packages/database/src/core/RepoManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -99,15 +99,17 @@ export class RepoManager {
);
}

const parsedUrl = parseRepoInfo(dbUrl);
const repoInfo = parsedUrl.repoInfo;
let parsedUrl = parseRepoInfo(dbUrl);
let repoInfo = parsedUrl.repoInfo;

let dbEmulatorHost: string | undefined = undefined;
if (typeof process !== 'undefined') {
dbEmulatorHost = process.env[FIREBASE_DATABASE_EMULATOR_HOST_VAR];
}
if (dbEmulatorHost) {
dbUrl = `http://${dbEmulatorHost}?ns=${repoInfo.namespace}`;
parsedUrl = parseRepoInfo(dbUrl);
repoInfo = parsedUrl.repoInfo;
}

validateUrl('Invalid Firebase Database URL', 1, parsedUrl);
Expand Down
9 changes: 9 additions & 0 deletions packages/database/test/database.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,15 @@ describe('Database Tests', function() {
expect(db.ref().toString()).to.equal('https://bar.firebaseio.com/');
});

it('Interprets FIREBASE_DATABASE_EMULATOR_HOST var correctly', function() {
process.env['FIREBASE_DATABASE_EMULATOR_HOST'] = 'localhost:9000';
var db = defaultApp.database('https://bar.firebaseio.com');
expect(db).to.be.ok;
expect(db.repo_.repoInfo_.namespace).to.equal('bar');
expect(db.repo_.repoInfo_.host).to.equal('localhost:9000');
delete process.env['FIREBASE_DATABASE_EMULATOR_HOST'];
});

it('Different instances for different URLs', function() {
var db1 = defaultApp.database('http://foo1.bar.com');
var db2 = defaultApp.database('http://foo2.bar.com');
Expand Down
78 changes: 77 additions & 1 deletion packages/firestore/test/unit/specs/limit_spec.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ describeSpec('Limits:', [], () => {
});

specTest(
'Initial snapshots for limit queries are re-filled from cache',
'Initial snapshots for limit queries are re-filled from cache (with removal)',
[],
() => {
// Verify that views for limit queries are re-filled even if the initial
Expand All @@ -205,8 +205,84 @@ describeSpec('Limits:', [], () => {
.watchAcksFull(fullQuery, 1003, doc1, doc2, doc3)
.expectEvents(fullQuery, { added: [doc1, doc2, doc3] })
.userUnlistens(fullQuery)
.userListens(limitQuery)
.expectEvents(limitQuery, { added: [doc1, doc2], fromCache: true })
.watchAcksFull(limitQuery, 1004, doc1, doc2)
.expectEvents(limitQuery, {})
.userUnlistens(limitQuery)
.watchRemoves(limitQuery)
.userSets('collection/a', { matches: false })
.userListens(limitQuery, 'resume-token-1004')
.expectEvents(limitQuery, { added: [doc2, doc3], fromCache: true });
}
);

specTest(
'Initial snapshots for limit queries are re-filled from cache (with latency-compensated edit)',
[],
() => {
// Verify that views for limit queries contain the correct set of documents
// even if a previously matching document receives a latency-compensate update
// that makes it sort below an older document.
const fullQuery = Query.atPath(path('collection'));
const limitQuery = Query.atPath(path('collection'))
.addOrderBy(orderBy('pos'))
.withLimit(2);
const doc1 = doc('collection/a', 1001, { pos: 1 });
const doc2 = doc('collection/b', 1002, { pos: 2 });
const doc3 = doc('collection/c', 1003, { pos: 3 });
return spec()
.withGCEnabled(false)
.userListens(fullQuery)
.watchAcksFull(fullQuery, 1003, doc1, doc2, doc3)
.expectEvents(fullQuery, { added: [doc1, doc2, doc3] })
.userUnlistens(fullQuery)
.watchRemoves(fullQuery)
.userListens(limitQuery)
.expectEvents(limitQuery, { added: [doc1, doc2], fromCache: true })
.watchAcksFull(limitQuery, 1004, doc1, doc2)
.expectEvents(limitQuery, {})
.userUnlistens(limitQuery)
.watchRemoves(limitQuery)
.userSets('collection/a', { pos: 4 })
.userListens(limitQuery, 'resume-token-1004')
.expectEvents(limitQuery, { added: [doc2, doc3], fromCache: true });
}
);

specTest(
'Initial snapshots for limit queries are re-filled from cache (with update from backend)',
[],
() => {
// Verify that views for limit queries contain the correct set of documents
// even if a previously matching document receives an update from the backend
// that makes it sort below an older document.
const fullQuery = Query.atPath(path('collection'));
const limitQuery = Query.atPath(path('collection'))
.addOrderBy(orderBy('pos'))
.withLimit(2);
const doc1 = doc('collection/a', 1001, { pos: 1 });
const doc1Edited = doc('collection/a', 1005, { pos: 4 });
const doc2 = doc('collection/b', 1002, { pos: 2 });
const doc3 = doc('collection/c', 1003, { pos: 3 });
return spec()
.withGCEnabled(false)
.userListens(fullQuery)
.watchAcksFull(fullQuery, 1003, doc1, doc2, doc3)
.expectEvents(fullQuery, { added: [doc1, doc2, doc3] })
.userUnlistens(fullQuery)
.watchRemoves(fullQuery)
.userListens(limitQuery)
.expectEvents(limitQuery, { added: [doc1, doc2], fromCache: true })
.watchAcksFull(limitQuery, 1004, doc1, doc2)
.expectEvents(limitQuery, {})
.userUnlistens(limitQuery)
.watchRemoves(limitQuery)
.userListens(fullQuery, 'resume-token-1003')
.expectEvents(fullQuery, { added: [doc1, doc2, doc3], fromCache: true })
.watchAcksFull(fullQuery, 1005, doc1Edited)
.expectEvents(fullQuery, { modified: [doc1Edited] })
.userListens(limitQuery, 'resume-token-1004')
.expectEvents(limitQuery, { added: [doc2, doc3], fromCache: true });
}
);
Expand Down

0 comments on commit 90f7de0

Please sign in to comment.