Skip to content

Commit

Permalink
simplified crawlFactory
Browse files Browse the repository at this point in the history
  • Loading branch information
pieterjan84 committed Mar 19, 2024
1 parent 9760e4c commit 671f890
Show file tree
Hide file tree
Showing 7 changed files with 74 additions and 41 deletions.
16 changes: 8 additions & 8 deletions examples/crawl.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
const jsonStorage = require('../lib').jsonStorage;
const blocked = require('blocked-at');
const { QuorumSet } = require('@stellarbeat/js-stellarbeat-shared');
const { createCrawler } = require('../lib');
const { createCrawler, createCrawlFactory } = require('../lib');
const { getConfigFromEnv } = require('@stellarbeat/js-stellar-node-connector');
const { CrawlState } = require('../lib/crawl-state');
const { CrawlerConfiguration } = require('../lib/crawler-configuration');

// noinspection JSIgnoredPromiseFromCall
Expand Down Expand Up @@ -51,6 +49,7 @@ async function main() {
const crawlerConfig = new CrawlerConfiguration(config);
crawlerConfig.maxOpenConnections = 100;
let myCrawler = createCrawler(crawlerConfig);
const factory = createCrawlFactory(crawlerConfig);

try {
let knownQuorumSets = new Map();
Expand All @@ -65,17 +64,18 @@ async function main() {
.filter((node) => topTierQSet.validators.includes(node.publicKey))
.map((node) => [node.ip, node.port]);

const crawlState = new CrawlState(
const crawl = factory.createCrawl(
addresses,
topTierAddresses,
topTierQSet,
knownQuorumSets,
{
sequence: BigInt(0),
closeTime: new Date(0)
},
config.network,
myCrawler.logger
knownQuorumSets
);
let result = await myCrawler.crawl(addresses, topTierAddresses, crawlState);

let result = await myCrawler.startCrawl(crawl);
console.log(
'[MAIN] Writing results to file nodes.json in directory crawl_result'
);
Expand Down
36 changes: 20 additions & 16 deletions src/__tests__/crawler.integration.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ import { xdr, Keypair, hash, Networks } from '@stellar/stellar-base';
import { QuorumSet } from '@stellarbeat/js-stellarbeat-shared';
import { NodeConfig } from '@stellarbeat/js-stellar-node-connector/lib/node-config';
import { ok, Result, err } from 'neverthrow';
import { CrawlerConfiguration, createCrawler } from '../index';
import {
CrawlerConfiguration,
createCrawler,
createCrawlFactory
} from '../index';
import { StellarMessageWork } from '@stellarbeat/js-stellar-node-connector/lib/connection/connection';
import { NodeAddress } from '../node-address';
import { mock } from 'jest-mock-extended';
import { P } from 'pino';
import { CrawlFactory } from '../crawl-factory';
import { ObservationFactory } from '../network-observer/observation-factory';

jest.setTimeout(60000);

Expand Down Expand Up @@ -169,9 +169,8 @@ it('should crawl, listen for validating nodes and harvest quorumSets', async ()
crawlerConfig.syncingTimeoutMS = 100;
crawlerConfig.quorumSetRequestTimeoutMS = 100;
const crawler = createCrawler(crawlerConfig);
const crawlerFactory = new CrawlFactory(new ObservationFactory());
const crawlerFactory = createCrawlFactory(crawlerConfig);
const crawl = crawlerFactory.createCrawl(
nodeConfig.network,
[peerNodeAddress, publicKeyReusingPeerNodeAddress],
[],
trustedQSet,
Expand All @@ -181,8 +180,7 @@ it('should crawl, listen for validating nodes and harvest quorumSets', async ()
value: '',
localCloseTime: new Date(0)
},
new Map<string, QuorumSet>(),
mock<P.Logger>()
new Map<string, QuorumSet>()
);

const result = await crawler.startCrawl(crawl);
Expand Down Expand Up @@ -220,12 +218,19 @@ it('should hit the max crawl limit', async function () {
const nodeConfig = getConfigFromEnv();
nodeConfig.network = Networks.TESTNET;

const crawler = createCrawler(
new CrawlerConfiguration(nodeConfig, 25, 1000, new Set(), 1000, 100, 100)
const crawlerConfig = new CrawlerConfiguration(
nodeConfig,
25,
1000,
new Set(),
1000,
100,
100
);
const crawlerFactory = new CrawlFactory(new ObservationFactory());
const crawl = crawlerFactory.createCrawl(
nodeConfig.network,
const crawler = createCrawler(crawlerConfig);
const crawlFactory = createCrawlFactory(crawlerConfig);

const crawl = crawlFactory.createCrawl(
[peerNodeAddress, publicKeyReusingPeerNodeAddress],
[],
trustedQSet,
Expand All @@ -235,8 +240,7 @@ it('should hit the max crawl limit', async function () {
value: '',
localCloseTime: new Date(0)
},
new Map<string, QuorumSet>(),
mock<P.Logger>()
new Map<string, QuorumSet>()
);

try {
Expand Down
10 changes: 6 additions & 4 deletions src/__tests__/crawler.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,11 @@ import { CrawlFactory } from '../crawl-factory';
import { Observation } from '../network-observer/observation';

describe('Crawler', () => {
const crawlFactory = new CrawlFactory(new ObservationFactory());
const crawlFactory = new CrawlFactory(
new ObservationFactory(),
'test',
mock<P.Logger>()
);
beforeEach(() => {
jest.clearAllMocks();
});
Expand Down Expand Up @@ -47,7 +51,6 @@ describe('Crawler', () => {
logger
);
const crawl = crawlFactory.createCrawl(
'test',
[['peer', 2]],
[['top', 1]],
new QuorumSet(2, []),
Expand All @@ -57,8 +60,7 @@ describe('Crawler', () => {
sequence: BigInt(0),
value: ''
},
new Map(),
logger
new Map()
);

return {
Expand Down
14 changes: 8 additions & 6 deletions src/crawl-factory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,21 @@ import { P } from 'pino';
import { PeerNodeCollection } from './peer-node-collection';

export class CrawlFactory {
constructor(private observationFactory: ObservationFactory) {}
constructor(
private observationFactory: ObservationFactory,
private network: string,
private logger: P.Logger
) {}
public createCrawl(
network: string, //todo: configuration?
nodesToCrawl: NodeAddress[],
topTierAddresses: NodeAddress[],
topTierQuorumSet: QuorumSet,
latestConfirmedClosedLedger: Ledger,
quorumSets: Map<string, QuorumSet>,
logger: P.Logger
quorumSets: Map<string, QuorumSet>
): Crawl {
const observation = this.observationFactory.createObservation(
network,
new Slots(topTierQuorumSet, logger),
this.network,
new Slots(topTierQuorumSet, this.logger),
topTierAddresses,
new PeerNodeCollection(),
latestConfirmedClosedLedger,
Expand Down
4 changes: 2 additions & 2 deletions src/crawl-logger.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ export class CrawlLogger {
console.time('crawl');
this._crawl = crawl;
this.logger.info(
'Starting crawl with seed of ' + crawl.nodesToCrawl + 'addresses.'
'Starting crawl with seed of ' + crawl.nodesToCrawl.length + 'addresses.'
);
this.loggingTimer = setInterval(() => {
this.logger.info({
Expand All @@ -38,7 +38,7 @@ export class CrawlLogger {
crawl.observation.topTierAddressesSet.has(address)
).length
});
}, 10000);
}, 5000);
}

stop() {
Expand Down
27 changes: 23 additions & 4 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,21 +24,40 @@ import { Timers } from './utilities/timers';
import { TimerFactory } from './utilities/timer-factory';
import { ConsensusTimer } from './network-observer/consensus-timer';
import { ObservationFactory } from './network-observer/observation-factory';
import { CrawlFactory } from './crawl-factory';

export { Crawler } from './crawler';
export { CrawlResult } from './crawl-result';
export { PeerNode } from './peer-node';
export { default as jsonStorage } from './utilities/json-storage';

export function createLogger(): pino.Logger {
return pino({
level: process.env.LOG_LEVEL || 'info',
base: undefined
});
}

export function createCrawlFactory(
config: CrawlerConfiguration,
logger?: pino.Logger
) {
if (!logger) {
logger = createLogger();
}
return new CrawlFactory(
new ObservationFactory(),
config.nodeConfig.network,
logger
);
}

export function createCrawler(
config: CrawlerConfiguration,
logger?: pino.Logger
): Crawler {
if (!logger) {
logger = pino({
level: process.env.LOG_LEVEL || 'info',
base: undefined
});
logger = createLogger();
}

const node = createNode(config.nodeConfig, logger);
Expand Down
8 changes: 7 additions & 1 deletion src/network-observer/observation-manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,13 @@ export class ObservationManager {
}

private syncCompleted(observation: Observation) {
this.logger.info('Moving to synced state');
this.logger.info(
{
topTierConnections:
this.connectionManager.getNumberOfActiveConnections()
},
'Moving to synced state'
);
observation.moveToSyncedState();
this.startNetworkConsensusTimer(observation);
}
Expand Down

0 comments on commit 671f890

Please sign in to comment.