From 9e57f8522abfcff3e3d89d409221089f54151ae7 Mon Sep 17 00:00:00 2001 From: ricoberger Date: Sun, 31 Oct 2021 10:37:42 +0100 Subject: [PATCH] [elasticsearch] Use pagination instead for results Instead if infinite scrolling (via the load more button) we are now using pagination to display the documents returned by our API. For that we can remove the depracated scroll API and directly request 1000 documents from the Elasticsearch API. Using pagination instead of infite scrolling improves the performance of our react UI when the user wants to view a lot of documents, e.g. when the user wants to show 500 documents, the UI becomes laggy with infinite scrolling. This problem can be avoided with pagination. --- CHANGELOG.md | 1 + plugins/elasticsearch/elasticsearch.go | 7 +- .../elasticsearch/pkg/instance/instance.go | 17 ++-- plugins/elasticsearch/pkg/instance/structs.go | 3 +- .../src/components/page/PageLogs.tsx | 55 +++++-------- .../src/components/panel/Logs.tsx | 38 +++------ .../src/components/panel/LogsActions.tsx | 54 +++++++------ .../src/components/panel/LogsDocuments.tsx | 78 +++++++++++++++---- .../src/components/preview/Chart.tsx | 5 +- plugins/elasticsearch/src/utils/interfaces.ts | 1 - 10 files changed, 133 insertions(+), 126 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e88d6554a..6408ba56c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,7 @@ NOTE: As semantic versioning states all 0.y.z releases can contain breaking chan - [#185](https://github.com/kobsio/kobs/pull/185): [clickhouse] Use pagination instead of Intersection Observer API to display logs. - [#188](https://github.com/kobsio/kobs/pull/188): [sql] Replace the `GetQueryResults` function with the implemention used in the Clickhouse plugin, to have a proper handling for float values. - [#190](https://github.com/kobsio/kobs/pull/190): [core] Unify list layout across plugin. +- [#194](https://github.com/kobsio/kobs/pull/194): [elasticsearch] Use pagination instead of infinite scrolling to display logs. ## [v0.6.0](https://github.com/kobsio/kobs/releases/tag/v0.6.0) (2021-10-11) diff --git a/plugins/elasticsearch/elasticsearch.go b/plugins/elasticsearch/elasticsearch.go index 2e72c1bc5..d7c5e0f0c 100644 --- a/plugins/elasticsearch/elasticsearch.go +++ b/plugins/elasticsearch/elasticsearch.go @@ -43,17 +43,16 @@ func (router *Router) getInstance(name string) *instance.Instance { // getLogs returns the raw documents for a given query from Elasticsearch. The result also contains the distribution of // the documents in the given time range. The name of the Elasticsearch instance must be set via the name path -// parameter, all other values like the query, scrollID, start and end time are set via query parameters. These +// parameter, all other values like the query, start and end time are set via query parameters. These // parameters are then passed to the GetLogs function of the Elasticsearch instance, which returns the documents and // buckets. func (router *Router) getLogs(w http.ResponseWriter, r *http.Request) { name := chi.URLParam(r, "name") query := r.URL.Query().Get("query") - scrollID := r.URL.Query().Get("scrollID") timeStart := r.URL.Query().Get("timeStart") timeEnd := r.URL.Query().Get("timeEnd") - log.WithFields(logrus.Fields{"name": name, "query": query, "scrollID": scrollID, "timeStart": timeStart, "timeEnd": timeEnd}).Tracef("getLogs") + log.WithFields(logrus.Fields{"name": name, "query": query, "timeStart": timeStart, "timeEnd": timeEnd}).Tracef("getLogs") i := router.getInstance(name) if i == nil { @@ -73,7 +72,7 @@ func (router *Router) getLogs(w http.ResponseWriter, r *http.Request) { return } - data, err := i.GetLogs(r.Context(), query, scrollID, parsedTimeStart, parsedTimeEnd) + data, err := i.GetLogs(r.Context(), query, parsedTimeStart, parsedTimeEnd) if err != nil { errresponse.Render(w, r, err, http.StatusInternalServerError, "Could not get logs") return diff --git a/plugins/elasticsearch/pkg/instance/instance.go b/plugins/elasticsearch/pkg/instance/instance.go index 8c060aaab..82d06abb5 100644 --- a/plugins/elasticsearch/pkg/instance/instance.go +++ b/plugins/elasticsearch/pkg/instance/instance.go @@ -36,20 +36,14 @@ type Instance struct { } // GetLogs returns the raw log documents and the buckets for the distribution of the logs accross the selected time -// range. We have to pass a query, start and end time to the function. The scrollID can be an empty string to start a -// new query. If a scrollID is provided it will be used for pagination. -func (i *Instance) GetLogs(ctx context.Context, query, scrollID string, timeStart, timeEnd int64) (*Data, error) { +// range. We have to pass a query, start and end time to the function. +func (i *Instance) GetLogs(ctx context.Context, query string, timeStart, timeEnd int64) (*Data, error) { var err error var body []byte var url string - if scrollID == "" { - url = fmt.Sprintf("%s/_search?scroll=15m", i.address) - body = []byte(fmt.Sprintf(`{"size":100,"sort":[{"@timestamp":{"order":"desc"}}],"query":{"bool":{"must":[{"range":{"@timestamp":{"gte":"%d","lte":"%d"}}},{"query_string":{"query":"%s"}}]}},"aggs":{"logcount":{"auto_date_histogram":{"field":"@timestamp","buckets":30}}}}`, timeStart*1000, timeEnd*1000, strings.ReplaceAll(query, "\"", "\\\""))) - } else { - url = fmt.Sprintf("%s/_search/scroll", i.address) - body = []byte(`{"scroll" : "15m", "scroll_id" : "` + scrollID + `"}`) - } + url = fmt.Sprintf("%s/_search", i.address) + body = []byte(fmt.Sprintf(`{"size":1000,"sort":[{"@timestamp":{"order":"desc"}}],"query":{"bool":{"must":[{"range":{"@timestamp":{"gte":"%d","lte":"%d"}}},{"query_string":{"query":"%s"}}]}},"aggs":{"logcount":{"auto_date_histogram":{"field":"@timestamp","buckets":30}}}}`, timeStart*1000, timeEnd*1000, strings.ReplaceAll(query, "\"", "\\\""))) log.WithFields(logrus.Fields{"query": string(body)}).Debugf("Run Elasticsearch query") @@ -75,14 +69,13 @@ func (i *Instance) GetLogs(ctx context.Context, query, scrollID string, timeStar } data := &Data{ - ScrollID: res.ScrollID, Took: res.Took, Hits: res.Hits.Total.Value, Documents: res.Hits.Hits, Buckets: res.Aggregations.LogCount.Buckets, } - log.WithFields(logrus.Fields{"scrollID": data.ScrollID, "took": data.Took, "hits": data.Hits, "documents": len(data.Documents), "buckets": len(data.Buckets)}).Debugf("Elasticsearch query results") + log.WithFields(logrus.Fields{"took": data.Took, "hits": data.Hits, "documents": len(data.Documents), "buckets": len(data.Buckets)}).Debugf("Elasticsearch query results") return data, nil } diff --git a/plugins/elasticsearch/pkg/instance/structs.go b/plugins/elasticsearch/pkg/instance/structs.go index 2a6384766..e410d09c1 100644 --- a/plugins/elasticsearch/pkg/instance/structs.go +++ b/plugins/elasticsearch/pkg/instance/structs.go @@ -50,9 +50,8 @@ type ResponseError struct { } // Data is the transformed Response result, which is passed to the React UI. It contains only the important fields, like -// the scrollID, the time a request took, the number of hits, the documents and the buckets. +// the time a request took, the number of hits, the documents and the buckets. type Data struct { - ScrollID string `json:"scrollID"` Took int64 `json:"took"` Hits int64 `json:"hits"` Documents []map[string]interface{} `json:"documents"` diff --git a/plugins/elasticsearch/src/components/page/PageLogs.tsx b/plugins/elasticsearch/src/components/page/PageLogs.tsx index d8f76d241..2e98a42df 100644 --- a/plugins/elasticsearch/src/components/page/PageLogs.tsx +++ b/plugins/elasticsearch/src/components/page/PageLogs.tsx @@ -2,8 +2,6 @@ import { Alert, AlertActionLink, AlertVariant, - Button, - ButtonVariant, Card, CardActions, CardBody, @@ -14,7 +12,7 @@ import { GridItem, Spinner, } from '@patternfly/react-core'; -import { InfiniteData, InfiniteQueryObserverResult, QueryObserverResult, useInfiniteQuery } from 'react-query'; +import { QueryObserverResult, useQuery } from 'react-query'; import React from 'react'; import { useHistory } from 'react-router-dom'; @@ -46,14 +44,12 @@ const PageLogs: React.FunctionComponent = ({ }: IPageLogsProps) => { const history = useHistory(); - const { isError, isFetching, isLoading, data, error, fetchNextPage, refetch } = useInfiniteQuery( + const { isError, isFetching, isLoading, data, error, refetch } = useQuery( ['elasticsearch/logs', query, times], - async ({ pageParam }) => { + async () => { try { const response = await fetch( - `/api/plugins/elasticsearch/logs/${name}?query=${query}&timeStart=${times.timeStart}&timeEnd=${ - times.timeEnd - }&scrollID=${pageParam || ''}`, + `/api/plugins/elasticsearch/logs/${name}?query=${query}&timeStart=${times.timeStart}&timeEnd=${times.timeEnd}`, { method: 'get', }, @@ -74,7 +70,6 @@ const PageLogs: React.FunctionComponent = ({ } }, { - getNextPageParam: (lastPage, pages) => lastPage.scrollID, keepPreviousData: true, }, ); @@ -95,7 +90,7 @@ const PageLogs: React.FunctionComponent = ({ actionLinks={ history.push('/')}>Home - , Error>> => refetch()}> + > => refetch()}> Retry @@ -106,7 +101,7 @@ const PageLogs: React.FunctionComponent = ({ ); } - if (!data || data.pages.length === 0) { + if (!data) { return null; } @@ -114,11 +109,7 @@ const PageLogs: React.FunctionComponent = ({ - + @@ -126,42 +117,32 @@ const PageLogs: React.FunctionComponent = ({ - {data.pages[0].hits} Documents in {data.pages[0].took} Milliseconds + {data.hits} Documents in {data.took} Milliseconds {isFetching && } - + +

 

- {data.pages[0].documents.length > 0 ? ( + + {data.documents.length > 0 ? ( - - - - ) : null} -

 

- {data.pages[0].documents.length > 0 ? ( - - - + ) : null}
-

 

); }; diff --git a/plugins/elasticsearch/src/components/panel/Logs.tsx b/plugins/elasticsearch/src/components/panel/Logs.tsx index 52b43aa0c..fd4d33175 100644 --- a/plugins/elasticsearch/src/components/panel/Logs.tsx +++ b/plugins/elasticsearch/src/components/panel/Logs.tsx @@ -2,15 +2,13 @@ import { Alert, AlertActionLink, AlertVariant, - Button, - ButtonVariant, Select, SelectOption, SelectOptionObject, SelectVariant, Spinner, } from '@patternfly/react-core'; -import { InfiniteData, InfiniteQueryObserverResult, QueryObserverResult, useInfiniteQuery } from 'react-query'; +import { QueryObserverResult, useQuery } from 'react-query'; import React, { useState } from 'react'; import { ILogsData, IQuery } from '../../utils/interfaces'; @@ -39,14 +37,12 @@ const Logs: React.FunctionComponent = ({ const [showSelect, setShowSelect] = useState(false); const [selectedQuery, setSelectedQuery] = useState(queries[0]); - const { isError, isFetching, isLoading, data, error, fetchNextPage, refetch } = useInfiniteQuery( + const { isError, isFetching, isLoading, data, error, refetch } = useQuery( ['elasticsearch/logs', selectedQuery, times], - async ({ pageParam }) => { + async () => { try { const response = await fetch( - `/api/plugins/elasticsearch/logs/${name}?query=${selectedQuery.query}&timeStart=${times.timeStart}&timeEnd=${ - times.timeEnd - }&scrollID=${pageParam || ''}`, + `/api/plugins/elasticsearch/logs/${name}?query=${selectedQuery.query}&timeStart=${times.timeStart}&timeEnd=${times.timeEnd}`, { method: 'get', }, @@ -67,7 +63,6 @@ const Logs: React.FunctionComponent = ({ } }, { - getNextPageParam: (lastPage, pages) => lastPage.scrollID, keepPreviousData: true, }, ); @@ -87,7 +82,7 @@ const Logs: React.FunctionComponent = ({ } + actions={} >
{queries.length > 1 ? ( @@ -120,9 +115,7 @@ const Logs: React.FunctionComponent = ({ title="Could not get logs" actionLinks={ - , Error>> => refetch()} - > + > => refetch()}> Retry @@ -130,29 +123,16 @@ const Logs: React.FunctionComponent = ({ >

{error?.message}

- ) : data && data.pages.length > 0 ? ( + ) : data ? (
{showChart ? (
- +

 

) : null} - -

 

- - {data.pages[0].documents.length > 0 ? ( - - ) : null} +
) : null}
diff --git a/plugins/elasticsearch/src/components/panel/LogsActions.tsx b/plugins/elasticsearch/src/components/panel/LogsActions.tsx index ed4102c00..63b021852 100644 --- a/plugins/elasticsearch/src/components/panel/LogsActions.tsx +++ b/plugins/elasticsearch/src/components/panel/LogsActions.tsx @@ -1,4 +1,4 @@ -import { CardActions, Dropdown, DropdownItem, KebabToggle } from '@patternfly/react-core'; +import { CardActions, Dropdown, DropdownItem, KebabToggle, Spinner } from '@patternfly/react-core'; import React, { useState } from 'react'; import { Link } from 'react-router-dom'; @@ -9,33 +9,43 @@ interface IActionsProps { name: string; queries: IQuery[]; times: IPluginTimes; + isFetching: boolean; } -export const Actions: React.FunctionComponent = ({ name, queries, times }: IActionsProps) => { +export const Actions: React.FunctionComponent = ({ + name, + queries, + times, + isFetching, +}: IActionsProps) => { const [show, setShow] = useState(false); return ( - setShow(!show)} />} - isOpen={show} - isPlain={true} - position="right" - dropdownItems={queries.map((query, index) => ( - `&field=${field}`).join('') : '' - }`} - > - {query.name} - - } - /> - ))} - /> + {isFetching ? ( + + ) : ( + setShow(!show)} />} + isOpen={show} + isPlain={true} + position="right" + dropdownItems={queries.map((query, index) => ( + `&field=${field}`).join('') : '' + }`} + > + {query.name} + + } + /> + ))} + /> + )} ); }; diff --git a/plugins/elasticsearch/src/components/panel/LogsDocuments.tsx b/plugins/elasticsearch/src/components/panel/LogsDocuments.tsx index e0764ecd2..200aef2e8 100644 --- a/plugins/elasticsearch/src/components/panel/LogsDocuments.tsx +++ b/plugins/elasticsearch/src/components/panel/LogsDocuments.tsx @@ -1,22 +1,30 @@ -import { TableComposable, TableVariant, Tbody, Th, Thead, Tr } from '@patternfly/react-table'; -import React from 'react'; +import { Pagination, PaginationVariant } from '@patternfly/react-core'; +import React, { useState } from 'react'; +import { TableComposable, TableVariant, Tbody, Td, Th, Thead, Tr } from '@patternfly/react-table'; -import { ILogsData } from '../../utils/interfaces'; +import { IDocument } from '../../utils/interfaces'; import LogsDocument from './LogsDocument'; +interface IPage { + page: number; + perPage: number; +} + interface ILogsDocumentsProps { - pages: ILogsData[]; + documents: IDocument[]; fields?: string[]; addFilter?: (filter: string) => void; selectField?: (field: string) => void; } const LogsDocuments: React.FunctionComponent = ({ - pages, + documents, fields, addFilter, selectField, }: ILogsDocumentsProps) => { + const [page, setPage] = useState({ page: 1, perPage: 100 }); + return ( @@ -32,18 +40,56 @@ const LogsDocuments: React.FunctionComponent = ({ - {pages.map((page, pageIndex) => - page.documents.map((document, documentIndex) => ( - - )), - )} + {documents + ? documents + .slice((page.page - 1) * page.perPage, page.page * page.perPage) + .map((document, index) => ( + + )) + : null} + {documents && ( + + + + 0 ? fields.length + 1 : 2}> + + setPage({ page: newPage, perPage: page.perPage }) + } + onPerPageSelect={( + event: React.MouseEvent | React.KeyboardEvent | MouseEvent, + newPerPage: number, + ): void => setPage({ page: page.page, perPage: newPerPage })} + onFirstClick={(event: React.SyntheticEvent, newPage: number): void => + setPage({ page: newPage, perPage: page.perPage }) + } + onLastClick={(event: React.SyntheticEvent, newPage: number): void => + setPage({ page: newPage, perPage: page.perPage }) + } + onNextClick={(event: React.SyntheticEvent, newPage: number): void => + setPage({ page: newPage, perPage: page.perPage }) + } + onPreviousClick={(event: React.SyntheticEvent, newPage: number): void => + setPage({ page: newPage, perPage: page.perPage }) + } + /> + + + + + )} ); }; diff --git a/plugins/elasticsearch/src/components/preview/Chart.tsx b/plugins/elasticsearch/src/components/preview/Chart.tsx index d11756b1d..c898d610c 100644 --- a/plugins/elasticsearch/src/components/preview/Chart.tsx +++ b/plugins/elasticsearch/src/components/preview/Chart.tsx @@ -16,7 +16,7 @@ interface IChartProps { export const Chart: React.FunctionComponent = ({ name, times, title, options }: IChartProps) => { const { isError, isLoading, data, error } = useQuery( ['elasticsearch/logs', name, options, times], - async ({ pageParam }) => { + async () => { try { if ( !options || @@ -29,7 +29,7 @@ export const Chart: React.FunctionComponent = ({ name, times, title } const response = await fetch( - `/api/plugins/elasticsearch/logs/${name}?query=${options.queries[0].query}&timeStart=${times.timeStart}&timeEnd=${times.timeEnd}&scrollID=`, + `/api/plugins/elasticsearch/logs/${name}?query=${options.queries[0].query}&timeStart=${times.timeStart}&timeEnd=${times.timeEnd}`, { method: 'get', }, @@ -50,7 +50,6 @@ export const Chart: React.FunctionComponent = ({ name, times, title } }, { - getNextPageParam: (lastPage, pages) => lastPage.scrollID, keepPreviousData: true, }, ); diff --git a/plugins/elasticsearch/src/utils/interfaces.ts b/plugins/elasticsearch/src/utils/interfaces.ts index 045682349..965cd3386 100644 --- a/plugins/elasticsearch/src/utils/interfaces.ts +++ b/plugins/elasticsearch/src/utils/interfaces.ts @@ -24,7 +24,6 @@ export interface IQuery { // ILogsData is the interface of the data returned from our Go API for the Elasticsearch plugin. The interface must // have the same fields as the Data struct from the Go implementation. export interface ILogsData { - scrollID: string; took: number; hits: number; documents: IDocument[];