diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.test.tsx
index e46804a658803a8..ae4fc6b04b002d0 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.test.tsx
@@ -13,10 +13,14 @@ import React from 'react';
import { shallow } from 'enzyme';
+import { getPageHeaderActions } from '../../../test_helpers';
+
import { AddDomainFlyout } from './components/add_domain/add_domain_flyout';
import { AddDomainForm } from './components/add_domain/add_domain_form';
import { AddDomainFormSubmitButton } from './components/add_domain/add_domain_form_submit_button';
import { CrawlRequestsTable } from './components/crawl_requests_table';
+import { CrawlerStatusBanner } from './components/crawler_status_banner';
+import { CrawlerStatusIndicator } from './components/crawler_status_indicator/crawler_status_indicator';
import { DomainsTable } from './components/domains_table';
import { CrawlerOverview } from './crawler_overview';
import {
@@ -75,6 +79,7 @@ const crawlRequests: CrawlRequestFromServer[] = [
describe('CrawlerOverview', () => {
const mockActions = {
fetchCrawlerData: jest.fn(),
+ getLatestCrawlRequests: jest.fn(),
};
const mockValues = {
@@ -88,12 +93,26 @@ describe('CrawlerOverview', () => {
setMockActions(mockActions);
});
- it('calls fetchCrawlerData on page load', () => {
+ it('calls fetchCrawlerData and starts polling on page load', () => {
setMockValues(mockValues);
shallow();
expect(mockActions.fetchCrawlerData).toHaveBeenCalledTimes(1);
+ expect(mockActions.getLatestCrawlRequests).toHaveBeenCalledWith(false);
+ });
+
+ it('contains a crawler status banner', () => {
+ setMockValues(mockValues);
+ const wrapper = shallow();
+
+ expect(wrapper.find(CrawlerStatusBanner)).toHaveLength(1);
+ });
+
+ it('contains a crawler status indicator', () => {
+ const wrapper = shallow();
+
+ expect(getPageHeaderActions(wrapper).find(CrawlerStatusIndicator)).toHaveLength(1);
});
it('hides the domain and crawl request tables when there are no domains, and no crawl requests', () => {
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.tsx
index 1f676467a550387..c18c1a753d2478d 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.tsx
@@ -21,6 +21,8 @@ import { AddDomainFlyout } from './components/add_domain/add_domain_flyout';
import { AddDomainForm } from './components/add_domain/add_domain_form';
import { AddDomainFormSubmitButton } from './components/add_domain/add_domain_form_submit_button';
import { CrawlRequestsTable } from './components/crawl_requests_table';
+import { CrawlerStatusBanner } from './components/crawler_status_banner';
+import { CrawlerStatusIndicator } from './components/crawler_status_indicator/crawler_status_indicator';
import { DomainsTable } from './components/domains_table';
import { CRAWLER_TITLE } from './constants';
import { CrawlerOverviewLogic } from './crawler_overview_logic';
@@ -28,18 +30,24 @@ import { CrawlerOverviewLogic } from './crawler_overview_logic';
export const CrawlerOverview: React.FC = () => {
const { crawlRequests, dataLoading, domains } = useValues(CrawlerOverviewLogic);
- const { fetchCrawlerData } = useActions(CrawlerOverviewLogic);
+ const { fetchCrawlerData, getLatestCrawlRequests } = useActions(CrawlerOverviewLogic);
useEffect(() => {
fetchCrawlerData();
+ getLatestCrawlRequests(false);
}, []);
return (
],
+ }}
isLoading={dataLoading}
>
+
+
{domains.length > 0 ? (
<>