diff --git a/CHANGELOG.md b/CHANGELOG.md index 1ffe3ff93..3bcb5cebf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ NOTE: As semantic versioning states all 0.y.z releases can contain breaking chan - [#4](https://github.com/kobsio/kobs/pull/4): Add Custom Resource Definition for Applications. - [#6](https://github.com/kobsio/kobs/pull/6): Add Prometheus as datasource for Application metrics. - [#8](https://github.com/kobsio/kobs/pull/8): Add new page to directly query a configured Prometheus datasource. +- [#10](https://github.com/kobsio/kobs/pull/10): Add Elasticsearch as datasource for Application logs. ### Fixed diff --git a/app/public/img/datasources/elasticsearch.png b/app/public/img/datasources/elasticsearch.png new file mode 100644 index 000000000..0d50d8311 Binary files /dev/null and b/app/public/img/datasources/elasticsearch.png differ diff --git a/app/src/app.css b/app/src/app.css index 0db6a66f6..ba50b76b8 100644 --- a/app/src/app.css +++ b/app/src/app.css @@ -87,6 +87,11 @@ height: 300px; } +.kobsio-chart-container-default-small { + width: 100%; + height: 200px; +} + .kobsio-chart-container-sparkline { width: 100%; height: 150px; @@ -104,3 +109,16 @@ position: absolute; text-align: center } + +/* kobsis-table-wrapper + * Wrap a table component, so it looks nice within a page, but allow scrolling so the user can see all the data. */ +.kobsis-table-wrapper { + max-width: 100%; + overflow-x: scroll; +} + +/* kobsio-tab-content + * Set a min height of 100% for the tab content. */ +.kobsio-tab-content { + min-height: 100%; +} diff --git a/app/src/components/applications/Application.tsx b/app/src/components/applications/Application.tsx index d206232ce..d381c1cd0 100644 --- a/app/src/components/applications/Application.tsx +++ b/app/src/components/applications/Application.tsx @@ -39,6 +39,7 @@ const Applications: React.FunctionComponent = () => { const [tab, setTab] = useState(DEFAULT_TAB); const refResourcesContent = useRef(null); const refMetricsContent = useRef(null); + const refLogsContent = useRef(null); const goToOverview = (): void => { history.push('/'); @@ -117,6 +118,7 @@ const Applications: React.FunctionComponent = () => { setTab={(t: string): void => setTab(t)} refResourcesContent={refResourcesContent} refMetricsContent={refMetricsContent} + refLogsContent={refLogsContent} /> @@ -126,6 +128,7 @@ const Applications: React.FunctionComponent = () => { tab={tab} refResourcesContent={refResourcesContent} refMetricsContent={refMetricsContent} + refLogsContent={refLogsContent} /> diff --git a/app/src/components/applications/details/DrawerPanel.tsx b/app/src/components/applications/details/DrawerPanel.tsx index 37368c9a2..1981ed246 100644 --- a/app/src/components/applications/details/DrawerPanel.tsx +++ b/app/src/components/applications/details/DrawerPanel.tsx @@ -28,6 +28,7 @@ const DrawerPanel: React.FunctionComponent = ({ application, const [tab, setTab] = useState(DEFAULT_TAB); const refResourcesContent = useRef(null); const refMetricsContent = useRef(null); + const refLogsContent = useRef(null); return ( @@ -61,6 +62,7 @@ const DrawerPanel: React.FunctionComponent = ({ application, setTab={(t: string): void => setTab(t)} refResourcesContent={refResourcesContent} refMetricsContent={refMetricsContent} + refLogsContent={refLogsContent} /> = ({ application, tab={tab} refResourcesContent={refResourcesContent} refMetricsContent={refMetricsContent} + refLogsContent={refLogsContent} /> diff --git a/app/src/components/applications/details/Tabs.tsx b/app/src/components/applications/details/Tabs.tsx index f28e5af93..89f28830c 100644 --- a/app/src/components/applications/details/Tabs.tsx +++ b/app/src/components/applications/details/Tabs.tsx @@ -9,6 +9,7 @@ interface ITabsParams { setTab(tab: string): void; refResourcesContent: React.RefObject; refMetricsContent: React.RefObject; + refLogsContent: React.RefObject; } // Tabs renders the tabs header, which are used by the user to select a section he wants to view for an application. @@ -19,6 +20,7 @@ const Tabs: React.FunctionComponent = ({ setTab, refResourcesContent, refMetricsContent, + refLogsContent, }: ITabsParams) => { return ( = ({ tabContentId="refMetrics" tabContentRef={refMetricsContent} /> + Logs} + tabContentId="refLogs" + tabContentRef={refLogsContent} + /> ); }; diff --git a/app/src/components/applications/details/TabsContent.tsx b/app/src/components/applications/details/TabsContent.tsx index 4ae0df074..f767a9017 100644 --- a/app/src/components/applications/details/TabsContent.tsx +++ b/app/src/components/applications/details/TabsContent.tsx @@ -4,6 +4,7 @@ import { TabContent } from '@patternfly/react-core'; import { Application } from 'generated/proto/application_pb'; import { IDatasourceOptions } from 'utils/proto'; +import Logs from 'components/applications/details/logs/Logs'; import Metrics from 'components/applications/details/metrics/Metrics'; import Resources from 'components/applications/details/resources/Resources'; @@ -43,6 +44,7 @@ interface ITabsContent { tab: string; refResourcesContent: React.RefObject; refMetricsContent: React.RefObject; + refLogsContent: React.RefObject; } // TabsContent renders the content for a selected tab from the Tabs component. We also manage the datasource options, @@ -54,6 +56,7 @@ const TabsContent: React.FunctionComponent = ({ tab, refResourcesContent, refMetricsContent, + refLogsContent, }: ITabsContent) => { const history = useHistory(); const location = useLocation(); @@ -71,6 +74,7 @@ const TabsContent: React.FunctionComponent = ({ return ( = ({ - + + {/* We have to check if the refMetricsContent is not null, because otherwise the Metrics component will be shown below the resources component. */}
{refMetricsContent.current ? ( @@ -93,6 +105,26 @@ const TabsContent: React.FunctionComponent = ({ ) : null}
+ + + {/* We have to check if the refLogsContent is not null, because otherwise the Logs component will be shown below the resources component. */} +
+ {refLogsContent.current ? ( + + ) : null} +
+
); }; diff --git a/app/src/components/applications/details/logs/Elasticsearch.tsx b/app/src/components/applications/details/logs/Elasticsearch.tsx new file mode 100644 index 000000000..fc217f901 --- /dev/null +++ b/app/src/components/applications/details/logs/Elasticsearch.tsx @@ -0,0 +1,118 @@ +import { Alert, AlertVariant, Button } from '@patternfly/react-core'; +import React, { useCallback, useEffect, useState } from 'react'; + +import { DatasourceLogsBucket, GetLogsRequest, GetLogsResponse } from 'generated/proto/datasources_pb'; +import { ApplicationLogsQuery } from 'generated/proto/application_pb'; +import Buckets from 'components/datasources/elasticsearch/Buckets'; +import { DatasourcesPromiseClient } from 'generated/proto/datasources_grpc_web_pb'; +import Documents from 'components/datasources/elasticsearch/Documents'; +import { IDatasourceOptions } from 'utils/proto'; +import { IDocument } from 'components/datasources/elasticsearch/helpers'; +import { apiURL } from 'utils/constants'; +import { convertDatasourceOptionsToProto } from 'utils/proto'; + +const datasourcesService = new DatasourcesPromiseClient(apiURL, null, null); + +export interface IElasticsearchProps { + query?: string; + fields?: string[]; + datasourceName: string; + datasourceType: string; + datasourceOptions: IDatasourceOptions; +} + +// Elasticsearhc implements the Elasticsearch UI for kobs. It can be used to query a configured Elasticsearch instance +// and show the logs in a table. +const Elasticsearch: React.FunctionComponent = ({ + query, + fields, + datasourceName, + datasourceType, + datasourceOptions, +}: IElasticsearchProps) => { + const [hits, setHits] = useState(0); + const [took, setTook] = useState(0); + const [documents, setDocuments] = useState([]); + const [buckets, setBuckets] = useState([]); + const [error, setError] = useState(''); + + // fetchLogs fetches the logs for a given query. For the applications view, we do not care about infinite scrolling. + // When a user wants to see more then the fetched logs, he has to go to the datasource view. + const fetchLogs = useCallback(async (): Promise => { + try { + if (query) { + const logsQuery = new ApplicationLogsQuery(); + logsQuery.setQuery(query); + + const getLogsRequest = new GetLogsRequest(); + getLogsRequest.setName(datasourceName); + getLogsRequest.setScrollid(''); + getLogsRequest.setOptions(convertDatasourceOptionsToProto(datasourceOptions)); + getLogsRequest.setQuery(logsQuery); + + const getLogsResponse: GetLogsResponse = await datasourcesService.getLogs(getLogsRequest, null); + + const parsed = JSON.parse(getLogsResponse.getLogs()); + if (parsed.length === 0) { + throw new Error('No documents were found'); + } else { + if (getLogsResponse.toObject().bucketsList.length > 0) { + setBuckets(getLogsResponse.toObject().bucketsList); + } + + setDocuments(parsed); + setHits(getLogsResponse.getHits()); + setTook(getLogsResponse.getTook()); + setError(''); + } + } + } catch (err) { + setError(err.message); + } + }, [query, datasourceName, datasourceOptions]); + + useEffect(() => { + fetchLogs(); + }, [fetchLogs]); + + return ( + + {error ? ( + +

 

+ +

{error}

+
+
+ ) : ( + +

 

+ + {buckets.length > 0 ? : null} + +

 

+ + {documents.length > 0 ? ( + + ) : null} + +

 

+ + +
+ )} +
+ ); +}; + +export default Elasticsearch; diff --git a/app/src/components/applications/details/logs/Logs.tsx b/app/src/components/applications/details/logs/Logs.tsx new file mode 100644 index 000000000..36c397d63 --- /dev/null +++ b/app/src/components/applications/details/logs/Logs.tsx @@ -0,0 +1,126 @@ +import { Alert, AlertActionLink, AlertVariant } from '@patternfly/react-core'; +import React, { useCallback, useEffect, useState } from 'react'; +import ChartAreaIcon from '@patternfly/react-icons/dist/js/icons/chart-area-icon'; + +import { Application, ApplicationLogsQuery } from 'generated/proto/application_pb'; +import { GetDatasourceRequest, GetDatasourceResponse } from 'generated/proto/datasources_pb'; +import { DatasourcesPromiseClient } from 'generated/proto/datasources_grpc_web_pb'; +import Elasticsearch from 'components/applications/details/logs/Elasticsearch'; +import { IDatasourceOptions } from 'utils/proto'; +import NotDefined from 'components/applications/details/NotDefined'; +import Toolbar from 'components/applications/details/logs/Toolbar'; +import { apiURL } from 'utils/constants'; + +const datasourcesService = new DatasourcesPromiseClient(apiURL, null, null); + +interface ILogsProps { + datasourceOptions: IDatasourceOptions; + setDatasourceOptions: (options: IDatasourceOptions) => void; + application: Application; +} + +// Logs is the component, which is shown inside the logs tab of an application. It is used as wrapper component for the +// toolbar and results component. For the results we show different components, depending on the datasource type. +const Logs: React.FunctionComponent = ({ + datasourceOptions, + setDatasourceOptions, + application, +}: ILogsProps) => { + const logs = application.getLogs(); + + const [datasourceName, setDatasourceName] = useState(''); + const [datasourceType, setDatasourceType] = useState(''); + const [query, setQuery] = useState(logs ? logs.getQueriesList()[0] : undefined); + const [error, setError] = useState(''); + + // fetchDatasourceDetails fetch all details, which are specific for a datasource. Currently this is only the type of + // the datasource, but can be extended in the future if needed. More information can be found in the datasources.proto + // file and the documentation for the GetDatasourceResponse message format. + const fetchDatasourceDetails = useCallback(async () => { + try { + if (!logs) { + throw new Error('Logs are not defined.'); + } else { + const getDatasourceRequest = new GetDatasourceRequest(); + getDatasourceRequest.setName(logs.getDatasource()); + + const getDatasourceResponse: GetDatasourceResponse = await datasourcesService.getDatasource( + getDatasourceRequest, + null, + ); + + const datasource = getDatasourceResponse.getDatasource(); + if (datasource) { + setDatasourceName(datasource.getName()); + setDatasourceType(datasource.getType()); + setError(''); + } else { + throw new Error('Datasource is not defined.'); + } + } + } catch (err) { + setError(err.message); + } + }, [logs]); + + useEffect(() => { + fetchDatasourceDetails(); + }, [fetchDatasourceDetails]); + + // If the logs seticon in the Application CR isn't defined, we return the NotDefined component, with a link to the + // documentation, where a user can find more information on who to define logs. + if (!logs) { + return ( + + ); + } + + // If an error occured during, we show the user the error, with an option to retry the request. + if (error) { + return ( + + Retry + + } + > +

{error}

+
+ ); + } + + return ( +
+ setQuery(q)} + /> + + {datasourceType === 'elasticsearch' ? ( + + ) : null} +
+ ); +}; + +export default Logs; diff --git a/app/src/components/applications/details/logs/Toolbar.tsx b/app/src/components/applications/details/logs/Toolbar.tsx new file mode 100644 index 000000000..fc482345a --- /dev/null +++ b/app/src/components/applications/details/logs/Toolbar.tsx @@ -0,0 +1,91 @@ +import { + Dropdown, + DropdownItem, + DropdownToggle, + Toolbar as PatternflyToolbar, + ToolbarContent, + ToolbarGroup, + ToolbarItem, + ToolbarToggleGroup, +} from '@patternfly/react-core'; +import React, { useState } from 'react'; +import CaretDownIcon from '@patternfly/react-icons/dist/js/icons/caret-down-icon'; +import FilterIcon from '@patternfly/react-icons/dist/js/icons/filter-icon'; + +import { ApplicationLogsQuery } from 'generated/proto/application_pb'; +import { IDatasourceOptions } from 'utils/proto'; +import Options from 'components/applications/details/metrics/Options'; + +interface IToolbarProps { + datasourcenName: string; + datasourceType: string; + datasourceOptions: IDatasourceOptions; + setDatasourceOptions: (options: IDatasourceOptions) => void; + queries: ApplicationLogsQuery[]; + query?: ApplicationLogsQuery; + selectQuery: (q: ApplicationLogsQuery) => void; +} + +// Toolbar shows the options for a logs datasource, where the user can select the time range for which he wants to fetch +// the logs. The user can also select a defined query for an application. +const Toolbar: React.FunctionComponent = ({ + datasourcenName, + datasourceType, + datasourceOptions, + setDatasourceOptions, + queries, + query, + selectQuery, +}: IToolbarProps) => { + const [show, setShow] = useState(false); + + // selectDropdownItem is the function is called, when a user selects a query from the dropdown component. When a query + // is selected, we set this query as the current query and we close the dropdown. + const selectDropdownItem = (q: ApplicationLogsQuery): void => { + selectQuery(q); + setShow(false); + }; + + return ( + + + } breakpoint="lg"> + + + setShow(!show)} + toggleIndicator={CaretDownIcon} + > + {query ? query.getName() : 'Queries'} + + } + isOpen={show} + dropdownItems={queries.map((q, index) => ( + selectDropdownItem(q)} + description={q.getQuery()} + > + {q.getName()} + + ))} + /> + + + + + + + + + + + ); +}; + +export default Toolbar; diff --git a/app/src/components/applications/details/metrics/Options.tsx b/app/src/components/applications/details/metrics/Options.tsx index bc0834f97..ea9c0b4a2 100644 --- a/app/src/components/applications/details/metrics/Options.tsx +++ b/app/src/components/applications/details/metrics/Options.tsx @@ -32,7 +32,7 @@ const Options: React.FunctionComponent = ({ type, options, setOpt const [timeStartError, setTimeStartError] = useState(''); const [timeEndError, setTimeEndError] = useState(''); - const [resolution, setResolution] = useState(options.resolution); + const [resolution, setResolution] = useState(options.resolution ? options.resolution : ''); // apply parses the value of the start and end input fields. If the user provided a correct data/time format, we // change the start and end time to the new values. If the string couldn't be parsed, the user will see an error below @@ -85,7 +85,7 @@ const Options: React.FunctionComponent = ({ type, options, setOpt setTimeStart(formatTime(options.timeStart)); setTimeEnd(formatTime(options.timeEnd)); - setResolution(options.resolution); + setResolution(options.resolution ? options.resolution : ''); }, [options.timeEnd, options.timeStart, options.resolution]); return ( diff --git a/app/src/components/applications/details/metrics/Toolbar.tsx b/app/src/components/applications/details/metrics/Toolbar.tsx index 13c7bcf93..0cc726040 100644 --- a/app/src/components/applications/details/metrics/Toolbar.tsx +++ b/app/src/components/applications/details/metrics/Toolbar.tsx @@ -33,7 +33,6 @@ interface IToolbarProps { setDatasourceOptions: (options: IDatasourceOptions) => void; variables: IApplicationMetricsVariable[]; setVariables: (variables: IApplicationMetricsVariable[]) => void; - isDrawer?: boolean; } // Toolbar component displays a list of all variables and an options field. The variables are displayed via a dropdown diff --git a/app/src/components/applications/details/metrics/charts/Actions.tsx b/app/src/components/applications/details/metrics/charts/Actions.tsx index 3a832ebe2..e8747f048 100644 --- a/app/src/components/applications/details/metrics/charts/Actions.tsx +++ b/app/src/components/applications/details/metrics/charts/Actions.tsx @@ -34,6 +34,7 @@ const Actions: React.FunctionComponent = ({ component={ Explore {query} diff --git a/app/src/components/datasources/Datasource.tsx b/app/src/components/datasources/Datasource.tsx index 76ce38b4d..d84a62043 100644 --- a/app/src/components/datasources/Datasource.tsx +++ b/app/src/components/datasources/Datasource.tsx @@ -2,6 +2,7 @@ import { Alert, AlertActionLink, AlertVariant, PageSection, PageSectionVariants import { useHistory, useParams } from 'react-router-dom'; import React from 'react'; +import Elasticsearch from 'components/datasources/elasticsearch/Elasticsearch'; import Prometheus from 'components/datasources/prometheus/Prometheus'; interface IDatasourceParams { @@ -23,6 +24,10 @@ const Datasource: React.FunctionComponent = () => { return ; } + if (params.type === 'elasticsearch') { + return ; + } + // When the provided datasource type, isn't valid, the user will see the following error, with an action to go back to // the datasource page. return ( diff --git a/app/src/components/datasources/Item.tsx b/app/src/components/datasources/Item.tsx index d488ee86a..64dea8bef 100644 --- a/app/src/components/datasources/Item.tsx +++ b/app/src/components/datasources/Item.tsx @@ -2,6 +2,17 @@ import { Card, CardBody, CardTitle } from '@patternfly/react-core'; import React from 'react'; import { useHistory } from 'react-router-dom'; +// ILogos is the interface for the datasource logos. +interface ILogos { + [key: string]: string; +} + +// logos is an object, with the datasource types as key and the image path for that datasource as logo. +const logos: ILogos = { + elasticsearch: '/img/datasources/elasticsearch.png', + prometheus: '/img/datasources/prometheus.png', +}; + interface IItemProps { name: string; type: string; @@ -21,7 +32,9 @@ const Item: React.FunctionComponent = ({ name, type, link }: IItemPr return ( {name} - {type === 'prometheus' ? {type} : null} + + {type} + ); }; diff --git a/app/src/components/datasources/elasticsearch/Buckets.tsx b/app/src/components/datasources/elasticsearch/Buckets.tsx new file mode 100644 index 000000000..8839931a5 --- /dev/null +++ b/app/src/components/datasources/elasticsearch/Buckets.tsx @@ -0,0 +1,83 @@ +import { Card, CardBody, CardTitle } from '@patternfly/react-core'; +import { + Chart, + ChartAxis, + ChartBar, + ChartLegendTooltip, + ChartThemeColor, + createContainer, +} from '@patternfly/react-charts'; +import React, { useEffect, useRef, useState } from 'react'; + +import { DatasourceLogsBucket } from 'generated/proto/datasources_pb'; +import { formatTime } from 'utils/helpers'; + +interface ILabels { + datum: DatasourceLogsBucket.AsObject; +} + +export interface IBucketsProps { + hits: number; + took: number; + buckets: DatasourceLogsBucket.AsObject[]; +} + +// Buckets renders a bar chart with the distribution of the number of logs accross the selected time range. +const Buckets: React.FunctionComponent = ({ hits, took, buckets }: IBucketsProps) => { + const refChart = useRef(null); + const [width, setWidth] = useState(0); + const [height, setHeight] = useState(0); + + // useEffect is executed on every render of this component. This is needed, so that we are able to use a width of 100% + // and a static height for the chart. + useEffect(() => { + if (refChart && refChart.current) { + setWidth(refChart.current.getBoundingClientRect().width); + setHeight(refChart.current.getBoundingClientRect().height); + } + }, []); + + const CursorVoronoiContainer = createContainer('voronoi', 'cursor'); + const legendData = [{ childName: 'count', name: 'Number of Documents' }]; + + return ( + + + {hits} Documents in {took} Milliseconds + + +
+ `${datum.y}`} + labelComponent={ + formatTime(Math.floor(point.x / 1000))} + /> + } + mouseFollowTooltips + voronoiDimension="x" + voronoiPadding={0} + /> + } + height={height} + legendData={legendData} + legendPosition={undefined} + padding={{ bottom: 30, left: 0, right: 0, top: 0 }} + scale={{ x: 'time', y: 'linear' }} + themeColor={ChartThemeColor.multiOrdered} + width={width} + > + + + +
+
+
+ ); +}; + +export default Buckets; diff --git a/app/src/components/datasources/elasticsearch/Document.tsx b/app/src/components/datasources/elasticsearch/Document.tsx new file mode 100644 index 000000000..3e5a3add6 --- /dev/null +++ b/app/src/components/datasources/elasticsearch/Document.tsx @@ -0,0 +1,58 @@ +import { + DrawerActions, + DrawerCloseButton, + DrawerHead, + DrawerPanelBody, + DrawerPanelContent, +} from '@patternfly/react-core'; +import React, { useEffect, useRef } from 'react'; +import { highlightBlock, registerLanguage } from 'highlight.js'; +import json from 'highlight.js/lib/languages/json'; + +import 'highlight.js/styles/nord.css'; + +import { IDocument, formatTimeWrapper } from 'components/datasources/elasticsearch/helpers'; +import Title from 'components/shared/Title'; + +registerLanguage('json', json); + +export interface IDocumentProps { + document: IDocument; + close: () => void; +} + +// Document renders a single document in a drawer panel. We show the whole JSON representation for this document in a +// code view. The highlighting of this JSON document is handled by highlight.js. +const Document: React.FunctionComponent = ({ document, close }: IDocumentProps) => { + const code = useRef(null); + + // useEffect apply the highlighting to the given JSON document. + useEffect(() => { + if (code.current) { + highlightBlock(code.current); + } + }); + + return ( + + + + <DrawerActions className="kobs-drawer-actions"> + <DrawerCloseButton onClose={close} /> + </DrawerActions> + </DrawerHead> + + <DrawerPanelBody className="kobs-drawer-panel-body"> + <pre className="pf-u-pb-md"> + <code ref={code}>{JSON.stringify(document, null, 2)}</code> + </pre> + </DrawerPanelBody> + </DrawerPanelContent> + ); +}; + +export default Document; diff --git a/app/src/components/datasources/elasticsearch/Documents.tsx b/app/src/components/datasources/elasticsearch/Documents.tsx new file mode 100644 index 000000000..820ff3f62 --- /dev/null +++ b/app/src/components/datasources/elasticsearch/Documents.tsx @@ -0,0 +1,54 @@ +import { TableComposable, TableVariant, Tbody, Td, Th, Thead, Tr } from '@patternfly/react-table'; +import React from 'react'; + +import { IDocument, formatTimeWrapper, getProperty } from 'components/datasources/elasticsearch/helpers'; + +export interface IDocumentsProps { + selectedFields: string[]; + documents: IDocument[]; + select?: (doc: IDocument) => void; +} + +// Documents renders a list of documents. If the user has selected some fields, we will render the table with the +// selected fields. If the selected fields list is empty, we only render the @timestamp field and the _source field as +// the only two columns +const Documents: React.FunctionComponent<IDocumentsProps> = ({ + selectedFields, + documents, + select, +}: IDocumentsProps) => { + return ( + <div className="kobsis-table-wrapper"> + <TableComposable aria-label="Logs" variant={TableVariant.compact} borders={false}> + <Thead> + <Tr> + <Th>Time</Th> + {selectedFields.length > 0 ? ( + selectedFields.map((selectedField, index) => <Th key={index}>{selectedField}</Th>) + ) : ( + <Th>_source</Th> + )} + </Tr> + </Thead> + <Tbody> + {documents.map((document, index) => ( + <Tr key={index} onClick={select ? (): void => select(document) : undefined}> + <Td dataLabel="Time">{formatTimeWrapper(document['_source']['@timestamp'])}</Td> + {selectedFields.length > 0 ? ( + selectedFields.map((selectedField, index) => ( + <Td key={index} dataLabel={selectedField}> + {getProperty(document['_source'], selectedField)} + </Td> + )) + ) : ( + <Td dataLabel="_source">{JSON.stringify(document['_source'])}</Td> + )} + </Tr> + ))} + </Tbody> + </TableComposable> + </div> + ); +}; + +export default Documents; diff --git a/app/src/components/datasources/elasticsearch/Elasticsearch.tsx b/app/src/components/datasources/elasticsearch/Elasticsearch.tsx new file mode 100644 index 000000000..88d858f62 --- /dev/null +++ b/app/src/components/datasources/elasticsearch/Elasticsearch.tsx @@ -0,0 +1,279 @@ +import { + Alert, + AlertVariant, + Button, + ButtonVariant, + Drawer, + DrawerContent, + DrawerContentBody, + Grid, + GridItem, + PageSection, + PageSectionVariants, + TextInput, + Title, + Toolbar, + ToolbarContent, + ToolbarGroup, + ToolbarItem, + ToolbarToggleGroup, +} from '@patternfly/react-core'; +import React, { useCallback, useEffect, useState } from 'react'; +import { useHistory, useLocation } from 'react-router-dom'; +import FilterIcon from '@patternfly/react-icons/dist/js/icons/filter-icon'; + +import { DatasourceLogsBucket, GetLogsRequest, GetLogsResponse } from 'generated/proto/datasources_pb'; +import { IDocument, getFields } from 'components/datasources/elasticsearch/helpers'; +import { ApplicationLogsQuery } from 'generated/proto/application_pb'; +import Buckets from 'components/datasources/elasticsearch/Buckets'; +import { DatasourcesPromiseClient } from 'generated/proto/datasources_grpc_web_pb'; +import Document from 'components/datasources/elasticsearch/Document'; +import Documents from 'components/datasources/elasticsearch/Documents'; +import Fields from 'components/datasources/elasticsearch/Fields'; +import { IDatasourceOptions } from 'utils/proto'; +import Options from 'components/applications/details/metrics/Options'; +import { apiURL } from 'utils/constants'; +import { convertDatasourceOptionsToProto } from 'utils/proto'; + +const datasourcesService = new DatasourcesPromiseClient(apiURL, null, null); + +// IQueryOptions is the interface for all query options. It extends the existing datasource options interface and adds +// a new query property. +interface IQueryOptions extends IDatasourceOptions { + query: string; + scrollID: string; + selectedFields: string[]; +} + +// parseSearch parses the provided query parameters and returns a query options object. This is needed so that an user +// can share his current URL with other users. So that this URL must contain all properties provided by the user. +const parseSearch = (search: string): IQueryOptions => { + const params = new URLSearchParams(search); + const fields = params.get('fields'); + + return { + query: params.get('query') ? (params.get('query') as string) : '', + scrollID: params.get('scrollID') ? (params.get('scrollID') as string) : '', + selectedFields: fields ? fields.split(',') : [], + timeEnd: params.get('timeEnd') ? parseInt(params.get('timeEnd') as string) : Math.floor(Date.now() / 1000), + timeStart: params.get('timeStart') + ? parseInt(params.get('timeStart') as string) + : Math.floor(Date.now() / 1000) - 3600, + }; +}; + +export interface IElasticsearchProps { + name: string; +} + +// Elasticsearhc implements the Elasticsearch UI for kobs. It can be used to query a configured Elasticsearch instance +// and show the logs in a table. +const Elasticsearch: React.FunctionComponent<IElasticsearchProps> = ({ name }: IElasticsearchProps) => { + const history = useHistory(); + const location = useLocation(); + const [query, setQuery] = useState<string>(''); + const [hits, setHits] = useState<number>(0); + const [took, setTook] = useState<number>(0); + const [scrollID, setScrollID] = useState<string>(''); + const [options, setOptions] = useState<IDatasourceOptions>(); + const [selectedFields, setSelectedFields] = useState<string[]>([]); + const [fields, setFields] = useState<string[]>([]); + const [documents, setDocuments] = useState<IDocument[]>([]); + const [document, setDocument] = useState<IDocument>(); + const [buckets, setBuckets] = useState<DatasourceLogsBucket.AsObject[]>([]); + const [error, setError] = useState<string>(''); + const [isLoading, setIsLoading] = useState<boolean>(false); + + // load changes the query parameters for the current page, to the user provided values. We change the query + // parameters, instead of directly fetching the logs, so that a user can share his current view with other users. + const load = async (): Promise<void> => { + history.push({ + pathname: location.pathname, + search: `?query=${query}&fields=${selectedFields.join(',')}&timeEnd=${options?.timeEnd}&timeStart=${ + options?.timeStart + }`, + }); + }; + + // loadMore is called, when the user clicks the load more button. Instead to the normal load function we set the + // scroll id as additional query parameter. + const loadMore = async (): Promise<void> => { + history.push({ + pathname: location.pathname, + search: `?query=${query}&fields=${selectedFields.join(',')}&scrollID=${scrollID}&timeEnd=${ + options?.timeEnd + }&timeStart=${options?.timeStart}`, + }); + }; + + // selectField adds the given field to the list of selected fields. + const selectField = (field: string): void => { + setSelectedFields((f) => [...f, field]); + }; + + // unselectField removes the given field from the list of selected fields. + const unselectField = (field: string): void => { + setSelectedFields(selectedFields.filter((f) => f !== field)); + }; + + // fetchLogs call the getLogs function to retrieve the logs for a given query. If the scroll id is present in the + // query options, we are fetching more logs for a query and adding the logs to the documents list. If the scroll id + // isn't present we set the documents to the result list. + // The returned logs are a string, but since we know that this is a Elasticsearch datasource, we can savely parse the + // string into a JSON array. + const fetchLogs = useCallback( + async (queryOptions: IQueryOptions): Promise<void> => { + try { + if (queryOptions.query) { + setIsLoading(true); + const logsQuery = new ApplicationLogsQuery(); + logsQuery.setQuery(queryOptions.query); + + const getLogsRequest = new GetLogsRequest(); + getLogsRequest.setName(name); + getLogsRequest.setScrollid(queryOptions.scrollID); + getLogsRequest.setOptions(convertDatasourceOptionsToProto(queryOptions)); + getLogsRequest.setQuery(logsQuery); + + const getLogsResponse: GetLogsResponse = await datasourcesService.getLogs(getLogsRequest, null); + + if (queryOptions.scrollID === '') { + const parsed = JSON.parse(getLogsResponse.getLogs()); + setFields(getFields(parsed.slice(parsed.length > 10 ? 10 : parsed.length))); + setDocuments(parsed); + } else { + setDocuments((d) => [...d, ...JSON.parse(getLogsResponse.getLogs())]); + } + + if (getLogsResponse.toObject().bucketsList.length > 0) { + setBuckets(getLogsResponse.toObject().bucketsList); + } + + setHits(getLogsResponse.getHits()); + setTook(getLogsResponse.getTook()); + setScrollID(getLogsResponse.getScrollid()); + setIsLoading(false); + setError(''); + } + } catch (err) { + setIsLoading(false); + setError(err.message); + } + }, + [name], + ); + + // useEffect is called every time, when the query parameters for the current location are changing. Then we parse the + // query parameters, setting our states to the new values and finally we are calling the fetch logs function. + useEffect(() => { + const queryOptions = parseSearch(location.search); + setQuery(queryOptions.query); + setSelectedFields(queryOptions.selectedFields); + setOptions(queryOptions); + fetchLogs(queryOptions); + }, [fetchLogs, location.search]); + + return ( + <React.Fragment> + <PageSection variant={PageSectionVariants.light}> + <Title headingLevel="h6" size="xl"> + {name} + + + + + } breakpoint="lg"> + + + setQuery(value)} + /> + + {options ? ( + + setOptions(opts)} /> + + ) : null} + + + + + + + + + + + setDocument(undefined)} /> : undefined + } + > + + + {error ? ( + +

{error}

+
+ ) : ( + + + + {fields.length > 0 || selectedFields.length > 0 ? ( + + ) : null} + + + {buckets.length > 0 ? : null} + +

 

+ + {documents.length > 0 ? ( + setDocument(doc)} + /> + ) : null} + +

 

+ + {scrollID !== '' ? ( + + ) : null} +
+
+
+ )} +
+
+
+
+ + ); +}; + +export default Elasticsearch; diff --git a/app/src/components/datasources/elasticsearch/Fields.tsx b/app/src/components/datasources/elasticsearch/Fields.tsx new file mode 100644 index 000000000..b089c8694 --- /dev/null +++ b/app/src/components/datasources/elasticsearch/Fields.tsx @@ -0,0 +1,49 @@ +import { SimpleList, SimpleListItem } from '@patternfly/react-core'; +import React from 'react'; + +export interface IFieldsProps { + fields: string[]; + selectedFields: string[]; + selectField: (field: string) => void; + unselectField: (field: string) => void; +} + +// Fields is used to show the list of parsed and selected fields. When a user selects a field from the fields list, this +// field is added to the list of selected fields. When the user selects a field from the selected fields list this field +// will be removed from this list. +const Fields: React.FunctionComponent = ({ + fields, + selectedFields, + selectField, + unselectField, +}: IFieldsProps) => { + return ( + + {selectedFields.length > 0 ?

Selected Fields

: null} + + {selectedFields.length > 0 ? ( + + {selectedFields.map((selectedField, index) => ( + unselectField(selectedField)} isActive={false}> + {selectedField} + + ))} + + ) : null} + + {fields.length > 0 ?

Fields

: null} + + {fields.length > 0 ? ( + + {fields.map((field, index) => ( + selectField(field)} isActive={false}> + {field} + + ))} + + ) : null} +
+ ); +}; + +export default Fields; diff --git a/app/src/components/datasources/elasticsearch/helpers.ts b/app/src/components/datasources/elasticsearch/helpers.ts new file mode 100644 index 000000000..385ec39ef --- /dev/null +++ b/app/src/components/datasources/elasticsearch/helpers.ts @@ -0,0 +1,54 @@ +import { formatTime } from 'utils/helpers'; + +// IDocument is the interface for a single Elasticsearch document. It is just an general interface for the JSON +// representation of this document. +export interface IDocument { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + [key: string]: any; +} + +// getFieldsRecursively returns the fields for a single document as a list of string. +export const getFieldsRecursively = (prefix: string, document: IDocument): string[] => { + const fields: string[] = []; + for (const field in document) { + if (typeof document[field] === 'object') { + fields.push(...getFieldsRecursively(prefix ? `${prefix}.${field}` : field, document[field])); + } else { + fields.push(prefix ? `${prefix}.${field}` : field); + } + } + + return fields; +}; + +// getFields is used to get all fields as strings for the given documents. To get the fields we are looping over the +// given documents and adding each field from this document. As a last step we have to remove all duplicated fields. +export const getFields = (documents: IDocument[]): string[] => { + const fields: string[] = []; + for (const document of documents) { + fields.push(...getFieldsRecursively('', document['_source'])); + } + + const uniqueFields: string[] = []; + for (const field of fields) { + if (uniqueFields.indexOf(field) === -1) { + uniqueFields.push(field); + } + } + + return uniqueFields; +}; + +// getProperty returns the property of an object for a given key. +// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/explicit-module-boundary-types +export const getProperty = (obj: any, key: string): string | number => { + return key.split('.').reduce((o, x) => { + return typeof o == 'undefined' || o === null ? o : o[x]; + }, obj); +}; + +// formatTimeWrapper is a wrapper for our shared formatTime function. It is needed to convert a given time string to the +// corresponding timestamp representation, which we need for the formatTime function. +export const formatTimeWrapper = (time: string): string => { + return formatTime(Math.floor(new Date(time).getTime() / 1000)); +}; diff --git a/app/src/generated/proto/application_pb.d.ts b/app/src/generated/proto/application_pb.d.ts index e3f7b9b9a..2432ac2a6 100644 --- a/app/src/generated/proto/application_pb.d.ts +++ b/app/src/generated/proto/application_pb.d.ts @@ -258,6 +258,14 @@ export namespace ApplicationMetricsQuery { } export class ApplicationLogs extends jspb.Message { + getDatasource(): string; + setDatasource(value: string): void; + + clearQueriesList(): void; + getQueriesList(): Array; + setQueriesList(value: Array): void; + addQueries(value?: ApplicationLogsQuery, index?: number): ApplicationLogsQuery; + serializeBinary(): Uint8Array; toObject(includeInstance?: boolean): ApplicationLogs.AsObject; static toObject(includeInstance: boolean, msg: ApplicationLogs): ApplicationLogs.AsObject; @@ -270,6 +278,38 @@ export class ApplicationLogs extends jspb.Message { export namespace ApplicationLogs { export type AsObject = { + datasource: string, + queriesList: Array, + } +} + +export class ApplicationLogsQuery extends jspb.Message { + getName(): string; + setName(value: string): void; + + getQuery(): string; + setQuery(value: string): void; + + clearFieldsList(): void; + getFieldsList(): Array; + setFieldsList(value: Array): void; + addFields(value: string, index?: number): string; + + serializeBinary(): Uint8Array; + toObject(includeInstance?: boolean): ApplicationLogsQuery.AsObject; + static toObject(includeInstance: boolean, msg: ApplicationLogsQuery): ApplicationLogsQuery.AsObject; + static extensions: {[key: number]: jspb.ExtensionFieldInfo}; + static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; + static serializeBinaryToWriter(message: ApplicationLogsQuery, writer: jspb.BinaryWriter): void; + static deserializeBinary(bytes: Uint8Array): ApplicationLogsQuery; + static deserializeBinaryFromReader(message: ApplicationLogsQuery, reader: jspb.BinaryReader): ApplicationLogsQuery; +} + +export namespace ApplicationLogsQuery { + export type AsObject = { + name: string, + query: string, + fieldsList: Array, } } diff --git a/app/src/generated/proto/application_pb.js b/app/src/generated/proto/application_pb.js index d83f3a21d..9a5487368 100644 --- a/app/src/generated/proto/application_pb.js +++ b/app/src/generated/proto/application_pb.js @@ -17,6 +17,7 @@ var global = Function('return this')(); goog.exportSymbol('proto.application.Application', null, global); goog.exportSymbol('proto.application.ApplicationLink', null, global); goog.exportSymbol('proto.application.ApplicationLogs', null, global); +goog.exportSymbol('proto.application.ApplicationLogsQuery', null, global); goog.exportSymbol('proto.application.ApplicationMetrics', null, global); goog.exportSymbol('proto.application.ApplicationMetricsChart', null, global); goog.exportSymbol('proto.application.ApplicationMetricsQuery', null, global); @@ -181,7 +182,7 @@ if (goog.DEBUG && !COMPILED) { * @constructor */ proto.application.ApplicationLogs = function(opt_data) { - jspb.Message.initialize(this, opt_data, 0, -1, null, null); + jspb.Message.initialize(this, opt_data, 0, -1, proto.application.ApplicationLogs.repeatedFields_, null); }; goog.inherits(proto.application.ApplicationLogs, jspb.Message); if (goog.DEBUG && !COMPILED) { @@ -191,6 +192,27 @@ if (goog.DEBUG && !COMPILED) { */ proto.application.ApplicationLogs.displayName = 'proto.application.ApplicationLogs'; } +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.application.ApplicationLogsQuery = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, proto.application.ApplicationLogsQuery.repeatedFields_, null); +}; +goog.inherits(proto.application.ApplicationLogsQuery, jspb.Message); +if (goog.DEBUG && !COMPILED) { + /** + * @public + * @override + */ + proto.application.ApplicationLogsQuery.displayName = 'proto.application.ApplicationLogsQuery'; +} /** * Generated by JsPbCodeGenerator. * @param {Array=} opt_data Optional initial data array, typically from a @@ -2085,6 +2107,13 @@ proto.application.ApplicationMetricsQuery.prototype.setLabel = function(value) { +/** + * List of repeated fields within this message type. + * @private {!Array} + * @const + */ +proto.application.ApplicationLogs.repeatedFields_ = [2]; + if (jspb.Message.GENERATE_TO_OBJECT) { @@ -2116,7 +2145,9 @@ proto.application.ApplicationLogs.prototype.toObject = function(opt_includeInsta */ proto.application.ApplicationLogs.toObject = function(includeInstance, msg) { var f, obj = { - + datasource: jspb.Message.getFieldWithDefault(msg, 1, ""), + queriesList: jspb.Message.toObjectList(msg.getQueriesList(), + proto.application.ApplicationLogsQuery.toObject, includeInstance) }; if (includeInstance) { @@ -2153,6 +2184,15 @@ proto.application.ApplicationLogs.deserializeBinaryFromReader = function(msg, re } var field = reader.getFieldNumber(); switch (field) { + case 1: + var value = /** @type {string} */ (reader.readString()); + msg.setDatasource(value); + break; + case 2: + var value = new proto.application.ApplicationLogsQuery; + reader.readMessage(value,proto.application.ApplicationLogsQuery.deserializeBinaryFromReader); + msg.addQueries(value); + break; default: reader.skipField(); break; @@ -2182,6 +2222,293 @@ proto.application.ApplicationLogs.prototype.serializeBinary = function() { */ proto.application.ApplicationLogs.serializeBinaryToWriter = function(message, writer) { var f = undefined; + f = message.getDatasource(); + if (f.length > 0) { + writer.writeString( + 1, + f + ); + } + f = message.getQueriesList(); + if (f.length > 0) { + writer.writeRepeatedMessage( + 2, + f, + proto.application.ApplicationLogsQuery.serializeBinaryToWriter + ); + } +}; + + +/** + * optional string datasource = 1; + * @return {string} + */ +proto.application.ApplicationLogs.prototype.getDatasource = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, "")); +}; + + +/** + * @param {string} value + * @return {!proto.application.ApplicationLogs} returns this + */ +proto.application.ApplicationLogs.prototype.setDatasource = function(value) { + return jspb.Message.setProto3StringField(this, 1, value); +}; + + +/** + * repeated ApplicationLogsQuery queries = 2; + * @return {!Array} + */ +proto.application.ApplicationLogs.prototype.getQueriesList = function() { + return /** @type{!Array} */ ( + jspb.Message.getRepeatedWrapperField(this, proto.application.ApplicationLogsQuery, 2)); +}; + + +/** + * @param {!Array} value + * @return {!proto.application.ApplicationLogs} returns this +*/ +proto.application.ApplicationLogs.prototype.setQueriesList = function(value) { + return jspb.Message.setRepeatedWrapperField(this, 2, value); +}; + + +/** + * @param {!proto.application.ApplicationLogsQuery=} opt_value + * @param {number=} opt_index + * @return {!proto.application.ApplicationLogsQuery} + */ +proto.application.ApplicationLogs.prototype.addQueries = function(opt_value, opt_index) { + return jspb.Message.addToRepeatedWrapperField(this, 2, opt_value, proto.application.ApplicationLogsQuery, opt_index); +}; + + +/** + * Clears the list making it empty but non-null. + * @return {!proto.application.ApplicationLogs} returns this + */ +proto.application.ApplicationLogs.prototype.clearQueriesList = function() { + return this.setQueriesList([]); +}; + + + +/** + * List of repeated fields within this message type. + * @private {!Array} + * @const + */ +proto.application.ApplicationLogsQuery.repeatedFields_ = [3]; + + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * Optional fields that are not set will be set to undefined. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * net/proto2/compiler/js/internal/generator.cc#kKeyword. + * @param {boolean=} opt_includeInstance Deprecated. whether to include the + * JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @return {!Object} + */ +proto.application.ApplicationLogsQuery.prototype.toObject = function(opt_includeInstance) { + return proto.application.ApplicationLogsQuery.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Deprecated. Whether to include + * the JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.application.ApplicationLogsQuery} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.application.ApplicationLogsQuery.toObject = function(includeInstance, msg) { + var f, obj = { + name: jspb.Message.getFieldWithDefault(msg, 1, ""), + query: jspb.Message.getFieldWithDefault(msg, 2, ""), + fieldsList: (f = jspb.Message.getRepeatedField(msg, 3)) == null ? undefined : f + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.application.ApplicationLogsQuery} + */ +proto.application.ApplicationLogsQuery.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.application.ApplicationLogsQuery; + return proto.application.ApplicationLogsQuery.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.application.ApplicationLogsQuery} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.application.ApplicationLogsQuery} + */ +proto.application.ApplicationLogsQuery.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = /** @type {string} */ (reader.readString()); + msg.setName(value); + break; + case 2: + var value = /** @type {string} */ (reader.readString()); + msg.setQuery(value); + break; + case 3: + var value = /** @type {string} */ (reader.readString()); + msg.addFields(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.application.ApplicationLogsQuery.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.application.ApplicationLogsQuery.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.application.ApplicationLogsQuery} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.application.ApplicationLogsQuery.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getName(); + if (f.length > 0) { + writer.writeString( + 1, + f + ); + } + f = message.getQuery(); + if (f.length > 0) { + writer.writeString( + 2, + f + ); + } + f = message.getFieldsList(); + if (f.length > 0) { + writer.writeRepeatedString( + 3, + f + ); + } +}; + + +/** + * optional string name = 1; + * @return {string} + */ +proto.application.ApplicationLogsQuery.prototype.getName = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, "")); +}; + + +/** + * @param {string} value + * @return {!proto.application.ApplicationLogsQuery} returns this + */ +proto.application.ApplicationLogsQuery.prototype.setName = function(value) { + return jspb.Message.setProto3StringField(this, 1, value); +}; + + +/** + * optional string query = 2; + * @return {string} + */ +proto.application.ApplicationLogsQuery.prototype.getQuery = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, "")); +}; + + +/** + * @param {string} value + * @return {!proto.application.ApplicationLogsQuery} returns this + */ +proto.application.ApplicationLogsQuery.prototype.setQuery = function(value) { + return jspb.Message.setProto3StringField(this, 2, value); +}; + + +/** + * repeated string fields = 3; + * @return {!Array} + */ +proto.application.ApplicationLogsQuery.prototype.getFieldsList = function() { + return /** @type {!Array} */ (jspb.Message.getRepeatedField(this, 3)); +}; + + +/** + * @param {!Array} value + * @return {!proto.application.ApplicationLogsQuery} returns this + */ +proto.application.ApplicationLogsQuery.prototype.setFieldsList = function(value) { + return jspb.Message.setField(this, 3, value || []); +}; + + +/** + * @param {string} value + * @param {number=} opt_index + * @return {!proto.application.ApplicationLogsQuery} returns this + */ +proto.application.ApplicationLogsQuery.prototype.addFields = function(value, opt_index) { + return jspb.Message.addToRepeatedField(this, 3, value, opt_index); +}; + + +/** + * Clears the list making it empty but non-null. + * @return {!proto.application.ApplicationLogsQuery} returns this + */ +proto.application.ApplicationLogsQuery.prototype.clearFieldsList = function() { + return this.setFieldsList([]); }; diff --git a/app/src/generated/proto/datasources_pb.d.ts b/app/src/generated/proto/datasources_pb.d.ts index 236bfdf25..1cec422a9 100644 --- a/app/src/generated/proto/datasources_pb.d.ts +++ b/app/src/generated/proto/datasources_pb.d.ts @@ -232,11 +232,19 @@ export class GetLogsRequest extends jspb.Message { getName(): string; setName(value: string): void; + getScrollid(): string; + setScrollid(value: string): void; + hasOptions(): boolean; clearOptions(): void; getOptions(): DatasourceOptions | undefined; setOptions(value?: DatasourceOptions): void; + hasQuery(): boolean; + clearQuery(): void; + getQuery(): application_pb.ApplicationLogsQuery | undefined; + setQuery(value?: application_pb.ApplicationLogsQuery): void; + serializeBinary(): Uint8Array; toObject(includeInstance?: boolean): GetLogsRequest.AsObject; static toObject(includeInstance: boolean, msg: GetLogsRequest): GetLogsRequest.AsObject; @@ -250,11 +258,30 @@ export class GetLogsRequest extends jspb.Message { export namespace GetLogsRequest { export type AsObject = { name: string, + scrollid: string, options?: DatasourceOptions.AsObject, + query?: application_pb.ApplicationLogsQuery.AsObject, } } export class GetLogsResponse extends jspb.Message { + getHits(): number; + setHits(value: number): void; + + getTook(): number; + setTook(value: number): void; + + getScrollid(): string; + setScrollid(value: string): void; + + getLogs(): string; + setLogs(value: string): void; + + clearBucketsList(): void; + getBucketsList(): Array; + setBucketsList(value: Array): void; + addBuckets(value?: DatasourceLogsBucket, index?: number): DatasourceLogsBucket; + serializeBinary(): Uint8Array; toObject(includeInstance?: boolean): GetLogsResponse.AsObject; static toObject(includeInstance: boolean, msg: GetLogsResponse): GetLogsResponse.AsObject; @@ -267,6 +294,11 @@ export class GetLogsResponse extends jspb.Message { export namespace GetLogsResponse { export type AsObject = { + hits: number, + took: number, + scrollid: string, + logs: string, + bucketsList: Array, } } @@ -398,3 +430,27 @@ export namespace DatasourceMetricsData { } } +export class DatasourceLogsBucket extends jspb.Message { + getX(): number; + setX(value: number): void; + + getY(): number; + setY(value: number): void; + + serializeBinary(): Uint8Array; + toObject(includeInstance?: boolean): DatasourceLogsBucket.AsObject; + static toObject(includeInstance: boolean, msg: DatasourceLogsBucket): DatasourceLogsBucket.AsObject; + static extensions: {[key: number]: jspb.ExtensionFieldInfo}; + static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; + static serializeBinaryToWriter(message: DatasourceLogsBucket, writer: jspb.BinaryWriter): void; + static deserializeBinary(bytes: Uint8Array): DatasourceLogsBucket; + static deserializeBinaryFromReader(message: DatasourceLogsBucket, reader: jspb.BinaryReader): DatasourceLogsBucket; +} + +export namespace DatasourceLogsBucket { + export type AsObject = { + x: number, + y: number, + } +} + diff --git a/app/src/generated/proto/datasources_pb.js b/app/src/generated/proto/datasources_pb.js index 768690232..097d496eb 100644 --- a/app/src/generated/proto/datasources_pb.js +++ b/app/src/generated/proto/datasources_pb.js @@ -17,6 +17,7 @@ var global = Function('return this')(); var application_pb = require('./application_pb.js'); goog.object.extend(proto, application_pb); goog.exportSymbol('proto.datasources.Datasource', null, global); +goog.exportSymbol('proto.datasources.DatasourceLogsBucket', null, global); goog.exportSymbol('proto.datasources.DatasourceMetrics', null, global); goog.exportSymbol('proto.datasources.DatasourceMetricsData', null, global); goog.exportSymbol('proto.datasources.DatasourceOptions', null, global); @@ -253,7 +254,7 @@ if (goog.DEBUG && !COMPILED) { * @constructor */ proto.datasources.GetLogsResponse = function(opt_data) { - jspb.Message.initialize(this, opt_data, 0, -1, null, null); + jspb.Message.initialize(this, opt_data, 0, -1, proto.datasources.GetLogsResponse.repeatedFields_, null); }; goog.inherits(proto.datasources.GetLogsResponse, jspb.Message); if (goog.DEBUG && !COMPILED) { @@ -368,6 +369,27 @@ if (goog.DEBUG && !COMPILED) { */ proto.datasources.DatasourceMetricsData.displayName = 'proto.datasources.DatasourceMetricsData'; } +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.datasources.DatasourceLogsBucket = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, null, null); +}; +goog.inherits(proto.datasources.DatasourceLogsBucket, jspb.Message); +if (goog.DEBUG && !COMPILED) { + /** + * @public + * @override + */ + proto.datasources.DatasourceLogsBucket.displayName = 'proto.datasources.DatasourceLogsBucket'; +} @@ -2007,7 +2029,9 @@ proto.datasources.GetLogsRequest.prototype.toObject = function(opt_includeInstan proto.datasources.GetLogsRequest.toObject = function(includeInstance, msg) { var f, obj = { name: jspb.Message.getFieldWithDefault(msg, 1, ""), - options: (f = msg.getOptions()) && proto.datasources.DatasourceOptions.toObject(includeInstance, f) + scrollid: jspb.Message.getFieldWithDefault(msg, 2, ""), + options: (f = msg.getOptions()) && proto.datasources.DatasourceOptions.toObject(includeInstance, f), + query: (f = msg.getQuery()) && application_pb.ApplicationLogsQuery.toObject(includeInstance, f) }; if (includeInstance) { @@ -2049,10 +2073,19 @@ proto.datasources.GetLogsRequest.deserializeBinaryFromReader = function(msg, rea msg.setName(value); break; case 2: + var value = /** @type {string} */ (reader.readString()); + msg.setScrollid(value); + break; + case 3: var value = new proto.datasources.DatasourceOptions; reader.readMessage(value,proto.datasources.DatasourceOptions.deserializeBinaryFromReader); msg.setOptions(value); break; + case 4: + var value = new application_pb.ApplicationLogsQuery; + reader.readMessage(value,application_pb.ApplicationLogsQuery.deserializeBinaryFromReader); + msg.setQuery(value); + break; default: reader.skipField(); break; @@ -2089,14 +2122,29 @@ proto.datasources.GetLogsRequest.serializeBinaryToWriter = function(message, wri f ); } + f = message.getScrollid(); + if (f.length > 0) { + writer.writeString( + 2, + f + ); + } f = message.getOptions(); if (f != null) { writer.writeMessage( - 2, + 3, f, proto.datasources.DatasourceOptions.serializeBinaryToWriter ); } + f = message.getQuery(); + if (f != null) { + writer.writeMessage( + 4, + f, + application_pb.ApplicationLogsQuery.serializeBinaryToWriter + ); + } }; @@ -2119,12 +2167,30 @@ proto.datasources.GetLogsRequest.prototype.setName = function(value) { /** - * optional DatasourceOptions options = 2; + * optional string scrollID = 2; + * @return {string} + */ +proto.datasources.GetLogsRequest.prototype.getScrollid = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, "")); +}; + + +/** + * @param {string} value + * @return {!proto.datasources.GetLogsRequest} returns this + */ +proto.datasources.GetLogsRequest.prototype.setScrollid = function(value) { + return jspb.Message.setProto3StringField(this, 2, value); +}; + + +/** + * optional DatasourceOptions options = 3; * @return {?proto.datasources.DatasourceOptions} */ proto.datasources.GetLogsRequest.prototype.getOptions = function() { return /** @type{?proto.datasources.DatasourceOptions} */ ( - jspb.Message.getWrapperField(this, proto.datasources.DatasourceOptions, 2)); + jspb.Message.getWrapperField(this, proto.datasources.DatasourceOptions, 3)); }; @@ -2133,7 +2199,7 @@ proto.datasources.GetLogsRequest.prototype.getOptions = function() { * @return {!proto.datasources.GetLogsRequest} returns this */ proto.datasources.GetLogsRequest.prototype.setOptions = function(value) { - return jspb.Message.setWrapperField(this, 2, value); + return jspb.Message.setWrapperField(this, 3, value); }; @@ -2151,10 +2217,54 @@ proto.datasources.GetLogsRequest.prototype.clearOptions = function() { * @return {boolean} */ proto.datasources.GetLogsRequest.prototype.hasOptions = function() { - return jspb.Message.getField(this, 2) != null; + return jspb.Message.getField(this, 3) != null; }; +/** + * optional application.ApplicationLogsQuery query = 4; + * @return {?proto.application.ApplicationLogsQuery} + */ +proto.datasources.GetLogsRequest.prototype.getQuery = function() { + return /** @type{?proto.application.ApplicationLogsQuery} */ ( + jspb.Message.getWrapperField(this, application_pb.ApplicationLogsQuery, 4)); +}; + + +/** + * @param {?proto.application.ApplicationLogsQuery|undefined} value + * @return {!proto.datasources.GetLogsRequest} returns this +*/ +proto.datasources.GetLogsRequest.prototype.setQuery = function(value) { + return jspb.Message.setWrapperField(this, 4, value); +}; + + +/** + * Clears the message field making it undefined. + * @return {!proto.datasources.GetLogsRequest} returns this + */ +proto.datasources.GetLogsRequest.prototype.clearQuery = function() { + return this.setQuery(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {boolean} + */ +proto.datasources.GetLogsRequest.prototype.hasQuery = function() { + return jspb.Message.getField(this, 4) != null; +}; + + + +/** + * List of repeated fields within this message type. + * @private {!Array} + * @const + */ +proto.datasources.GetLogsResponse.repeatedFields_ = [5]; @@ -2187,7 +2297,12 @@ proto.datasources.GetLogsResponse.prototype.toObject = function(opt_includeInsta */ proto.datasources.GetLogsResponse.toObject = function(includeInstance, msg) { var f, obj = { - + hits: jspb.Message.getFieldWithDefault(msg, 1, 0), + took: jspb.Message.getFieldWithDefault(msg, 2, 0), + scrollid: jspb.Message.getFieldWithDefault(msg, 3, ""), + logs: jspb.Message.getFieldWithDefault(msg, 4, ""), + bucketsList: jspb.Message.toObjectList(msg.getBucketsList(), + proto.datasources.DatasourceLogsBucket.toObject, includeInstance) }; if (includeInstance) { @@ -2224,6 +2339,27 @@ proto.datasources.GetLogsResponse.deserializeBinaryFromReader = function(msg, re } var field = reader.getFieldNumber(); switch (field) { + case 1: + var value = /** @type {number} */ (reader.readInt64()); + msg.setHits(value); + break; + case 2: + var value = /** @type {number} */ (reader.readInt64()); + msg.setTook(value); + break; + case 3: + var value = /** @type {string} */ (reader.readString()); + msg.setScrollid(value); + break; + case 4: + var value = /** @type {string} */ (reader.readString()); + msg.setLogs(value); + break; + case 5: + var value = new proto.datasources.DatasourceLogsBucket; + reader.readMessage(value,proto.datasources.DatasourceLogsBucket.deserializeBinaryFromReader); + msg.addBuckets(value); + break; default: reader.skipField(); break; @@ -2253,6 +2389,152 @@ proto.datasources.GetLogsResponse.prototype.serializeBinary = function() { */ proto.datasources.GetLogsResponse.serializeBinaryToWriter = function(message, writer) { var f = undefined; + f = message.getHits(); + if (f !== 0) { + writer.writeInt64( + 1, + f + ); + } + f = message.getTook(); + if (f !== 0) { + writer.writeInt64( + 2, + f + ); + } + f = message.getScrollid(); + if (f.length > 0) { + writer.writeString( + 3, + f + ); + } + f = message.getLogs(); + if (f.length > 0) { + writer.writeString( + 4, + f + ); + } + f = message.getBucketsList(); + if (f.length > 0) { + writer.writeRepeatedMessage( + 5, + f, + proto.datasources.DatasourceLogsBucket.serializeBinaryToWriter + ); + } +}; + + +/** + * optional int64 hits = 1; + * @return {number} + */ +proto.datasources.GetLogsResponse.prototype.getHits = function() { + return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 1, 0)); +}; + + +/** + * @param {number} value + * @return {!proto.datasources.GetLogsResponse} returns this + */ +proto.datasources.GetLogsResponse.prototype.setHits = function(value) { + return jspb.Message.setProto3IntField(this, 1, value); +}; + + +/** + * optional int64 took = 2; + * @return {number} + */ +proto.datasources.GetLogsResponse.prototype.getTook = function() { + return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 2, 0)); +}; + + +/** + * @param {number} value + * @return {!proto.datasources.GetLogsResponse} returns this + */ +proto.datasources.GetLogsResponse.prototype.setTook = function(value) { + return jspb.Message.setProto3IntField(this, 2, value); +}; + + +/** + * optional string scrollID = 3; + * @return {string} + */ +proto.datasources.GetLogsResponse.prototype.getScrollid = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 3, "")); +}; + + +/** + * @param {string} value + * @return {!proto.datasources.GetLogsResponse} returns this + */ +proto.datasources.GetLogsResponse.prototype.setScrollid = function(value) { + return jspb.Message.setProto3StringField(this, 3, value); +}; + + +/** + * optional string logs = 4; + * @return {string} + */ +proto.datasources.GetLogsResponse.prototype.getLogs = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 4, "")); +}; + + +/** + * @param {string} value + * @return {!proto.datasources.GetLogsResponse} returns this + */ +proto.datasources.GetLogsResponse.prototype.setLogs = function(value) { + return jspb.Message.setProto3StringField(this, 4, value); +}; + + +/** + * repeated DatasourceLogsBucket buckets = 5; + * @return {!Array} + */ +proto.datasources.GetLogsResponse.prototype.getBucketsList = function() { + return /** @type{!Array} */ ( + jspb.Message.getRepeatedWrapperField(this, proto.datasources.DatasourceLogsBucket, 5)); +}; + + +/** + * @param {!Array} value + * @return {!proto.datasources.GetLogsResponse} returns this +*/ +proto.datasources.GetLogsResponse.prototype.setBucketsList = function(value) { + return jspb.Message.setRepeatedWrapperField(this, 5, value); +}; + + +/** + * @param {!proto.datasources.DatasourceLogsBucket=} opt_value + * @param {number=} opt_index + * @return {!proto.datasources.DatasourceLogsBucket} + */ +proto.datasources.GetLogsResponse.prototype.addBuckets = function(opt_value, opt_index) { + return jspb.Message.addToRepeatedWrapperField(this, 5, opt_value, proto.datasources.DatasourceLogsBucket, opt_index); +}; + + +/** + * Clears the list making it empty but non-null. + * @return {!proto.datasources.GetLogsResponse} returns this + */ +proto.datasources.GetLogsResponse.prototype.clearBucketsList = function() { + return this.setBucketsList([]); }; @@ -3138,4 +3420,164 @@ proto.datasources.DatasourceMetricsData.prototype.setY = function(value) { }; + + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * Optional fields that are not set will be set to undefined. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * net/proto2/compiler/js/internal/generator.cc#kKeyword. + * @param {boolean=} opt_includeInstance Deprecated. whether to include the + * JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @return {!Object} + */ +proto.datasources.DatasourceLogsBucket.prototype.toObject = function(opt_includeInstance) { + return proto.datasources.DatasourceLogsBucket.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Deprecated. Whether to include + * the JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.datasources.DatasourceLogsBucket} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.datasources.DatasourceLogsBucket.toObject = function(includeInstance, msg) { + var f, obj = { + x: jspb.Message.getFieldWithDefault(msg, 1, 0), + y: jspb.Message.getFieldWithDefault(msg, 2, 0) + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.datasources.DatasourceLogsBucket} + */ +proto.datasources.DatasourceLogsBucket.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.datasources.DatasourceLogsBucket; + return proto.datasources.DatasourceLogsBucket.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.datasources.DatasourceLogsBucket} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.datasources.DatasourceLogsBucket} + */ +proto.datasources.DatasourceLogsBucket.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = /** @type {number} */ (reader.readInt64()); + msg.setX(value); + break; + case 2: + var value = /** @type {number} */ (reader.readInt64()); + msg.setY(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.datasources.DatasourceLogsBucket.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.datasources.DatasourceLogsBucket.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.datasources.DatasourceLogsBucket} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.datasources.DatasourceLogsBucket.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getX(); + if (f !== 0) { + writer.writeInt64( + 1, + f + ); + } + f = message.getY(); + if (f !== 0) { + writer.writeInt64( + 2, + f + ); + } +}; + + +/** + * optional int64 x = 1; + * @return {number} + */ +proto.datasources.DatasourceLogsBucket.prototype.getX = function() { + return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 1, 0)); +}; + + +/** + * @param {number} value + * @return {!proto.datasources.DatasourceLogsBucket} returns this + */ +proto.datasources.DatasourceLogsBucket.prototype.setX = function(value) { + return jspb.Message.setProto3IntField(this, 1, value); +}; + + +/** + * optional int64 y = 2; + * @return {number} + */ +proto.datasources.DatasourceLogsBucket.prototype.getY = function() { + return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 2, 0)); +}; + + +/** + * @param {number} value + * @return {!proto.datasources.DatasourceLogsBucket} returns this + */ +proto.datasources.DatasourceLogsBucket.prototype.setY = function(value) { + return jspb.Message.setProto3IntField(this, 2, value); +}; + + goog.object.extend(exports, proto.datasources); diff --git a/app/src/utils/proto.ts b/app/src/utils/proto.ts index 271667c56..d77d0e513 100644 --- a/app/src/utils/proto.ts +++ b/app/src/utils/proto.ts @@ -4,7 +4,7 @@ import { DatasourceOptions } from 'generated/proto/datasources_pb'; // IDatasourceOptions must implement the DatasourceOptions message format from the datasources.proto file. It is used, // to enable the usage of DatasourceOptions message formate within a React State. export interface IDatasourceOptions { - resolution: string; + resolution?: string; timeEnd: number; timeStart: number; } @@ -15,7 +15,7 @@ export const convertDatasourceOptionsToProto = (options: IDatasourceOptions): Da const datasourceOptions = new DatasourceOptions(); datasourceOptions.setTimestart(options.timeStart); datasourceOptions.setTimeend(options.timeEnd); - datasourceOptions.setResolution(options.resolution); + datasourceOptions.setResolution(options.resolution ? options.resolution : ''); return datasourceOptions; }; diff --git a/deploy/docker/kobs/config.yaml b/deploy/docker/kobs/config.yaml index 7bea91ccf..9a2f2937d 100644 --- a/deploy/docker/kobs/config.yaml +++ b/deploy/docker/kobs/config.yaml @@ -5,7 +5,7 @@ clusters: path: ${HOME}/.kube/config defaultDatasources: metrics: Prometheus - logs: elasticsearch + logs: Elasticsearch traces: jaeger datasources: @@ -13,3 +13,7 @@ datasources: type: prometheus prometheus: address: http://localhost:9090 + - name: Elasticsearch + type: elasticsearch + elasticsearch: + address: http://localhost:9200 diff --git a/deploy/kustomize/crds/kobs.io_applications.yaml b/deploy/kustomize/crds/kobs.io_applications.yaml index 229cd29c6..714ee904d 100644 --- a/deploy/kustomize/crds/kobs.io_applications.yaml +++ b/deploy/kustomize/crds/kobs.io_applications.yaml @@ -54,6 +54,32 @@ spec: type: object type: array logs: + description: ApplicationLogs defines the structure of the logs section + of an application. To get the logs of an application we need a datasource + field with the name of the datasource. This field will be set to the + configured logs datasource for a cluster, when the user doesn't provide + the field. The second field is a list of queries, for the application. + properties: + datasource: + type: string + queries: + items: + description: ApplicationLogsQuery represents a single query for + an application. A query is identified by a name, a query and + a list of fields, which should be shown in the results table. + If the fields list is empty, we show the complete document in + the table. + properties: + fields: + items: + type: string + type: array + name: + type: string + query: + type: string + type: object + type: array type: object metrics: description: ApplicationMetrics defines the structure of the metrics diff --git a/pkg/api/clusters/cluster/cluster.go b/pkg/api/clusters/cluster/cluster.go index d5a135ee0..c3eccbd67 100644 --- a/pkg/api/clusters/cluster/cluster.go +++ b/pkg/api/clusters/cluster/cluster.go @@ -132,6 +132,10 @@ func (c *Cluster) GetApplications(ctx context.Context, namespace string) ([]*pro application.Metrics.Datasource = c.options.datasources.Metrics } + if application.Logs != nil && application.Logs.Datasource == "" { + application.Logs.Datasource = c.options.datasources.Logs + } + applications = append(applications, application) } @@ -156,6 +160,10 @@ func (c *Cluster) GetApplication(ctx context.Context, namespace, name string) (* application.Metrics.Datasource = c.options.datasources.Metrics } + if application.Logs != nil && application.Logs.Datasource == "" { + application.Logs.Datasource = c.options.datasources.Logs + } + return application, nil } diff --git a/pkg/api/datasources/datasource/datasource.go b/pkg/api/datasources/datasource/datasource.go index 66e4289e4..4244d3bc3 100644 --- a/pkg/api/datasources/datasource/datasource.go +++ b/pkg/api/datasources/datasource/datasource.go @@ -3,6 +3,7 @@ package datasource import ( "context" + "github.com/kobsio/kobs/pkg/api/datasources/datasource/elasticsearch" "github.com/kobsio/kobs/pkg/api/datasources/datasource/prometheus" "github.com/kobsio/kobs/pkg/generated/proto" @@ -16,9 +17,10 @@ var ( // Config is the configuration for a datasource. Each datasource must contain a name and type. Each datasource also // contains a type specific configuration. type Config struct { - Name string `yaml:"name"` - Type string `yaml:"type"` - Prometheus prometheus.Config `yaml:"prometheus"` + Name string `yaml:"name"` + Type string `yaml:"type"` + Prometheus prometheus.Config `yaml:"prometheus"` + Elasticsearch elasticsearch.Config `yaml:"elasticsearch"` } // Datasource is the interface, which must be implemented by each datasource. Also when a datasource doesn't support @@ -28,7 +30,7 @@ type Datasource interface { GetDatasource() (string, string) GetVariables(ctx context.Context, options *proto.DatasourceOptions, variables []*proto.ApplicationMetricsVariable) ([]*proto.ApplicationMetricsVariable, error) GetMetrics(ctx context.Context, options *proto.DatasourceOptions, variables []*proto.ApplicationMetricsVariable, queries []*proto.ApplicationMetricsQuery) ([]*proto.DatasourceMetrics, []string, error) - GetLogs(ctx context.Context, options *proto.DatasourceOptions) error + GetLogs(ctx context.Context, scrollID string, options *proto.DatasourceOptions, query *proto.ApplicationLogsQuery) (int64, int64, string, string, []*proto.DatasourceLogsBucket, error) GetTraces(ctx context.Context, options *proto.DatasourceOptions) error } @@ -39,6 +41,9 @@ func New(config Config) (Datasource, error) { case "prometheus": log.WithFields(logrus.Fields{"name": config.Name, "type": config.Type}).Debugf("Load datasource.") return prometheus.New(config.Name, config.Prometheus) + case "elasticsearch": + log.WithFields(logrus.Fields{"name": config.Name, "type": config.Type}).Debugf("Load datasource.") + return elasticsearch.New(config.Name, config.Elasticsearch) default: log.WithFields(logrus.Fields{"type": config.Type}).Warnf("Invalid datasource.") return nil, nil diff --git a/pkg/api/datasources/datasource/elasticsearch/elasticsearch.go b/pkg/api/datasources/datasource/elasticsearch/elasticsearch.go new file mode 100644 index 000000000..feef3552f --- /dev/null +++ b/pkg/api/datasources/datasource/elasticsearch/elasticsearch.go @@ -0,0 +1,199 @@ +package elasticsearch + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + + "github.com/kobsio/kobs/pkg/api/datasources/datasource/shared" + "github.com/kobsio/kobs/pkg/generated/proto" + + "github.com/sirupsen/logrus" +) + +var ( + log = logrus.WithFields(logrus.Fields{"package": "elasticsearch"}) +) + +// Config contains all required fields to create a new Elasticsearch datasource. +type Config struct { + Address string `yaml:"address"` + Username string `yaml:"username"` + Password string `yaml:"password"` + Token string `yaml:"token"` +} + +// Elasticsearch implements the Elasticsearch datasource. +type Elasticsearch struct { + name string + client *http.Client + endpoint string +} + +// Response is the structure of successful Elasticsearch API call. +type Response struct { + ScrollID string `json:"_scroll_id"` + Took int64 `json:"took"` + TimedOut bool `json:"timed_out"` + Shards struct { + Total int64 `json:"total"` + Successful int64 `json:"successful"` + Skipped int64 `json:"skipped"` + Failed int64 `json:"failed"` + } `json:"_shards"` + Hits struct { + Total struct { + Value int64 `json:"value"` + Relation string `json:"relation"` + } `json:"total"` + Hits []map[string]interface{} `json:"hits"` + } `json:"hits"` + Aggregations struct { + LogCount struct { + Buckets []struct { + KeyAsString string `json:"key_as_string"` + Key int64 `json:"key"` + DocCount int64 `json:"doc_count"` + } `json:"buckets"` + } `json:"logcount"` + } `json:"aggregations"` +} + +// ResponseError is the structure of failed Elasticsearch API call. +type ResponseError struct { + Error struct { + RootCause []struct { + Type string `json:"type"` + Reason string `json:"reason"` + } `json:"root_cause"` + Type string `json:"type"` + Reason string `json:"reason"` + CausedBy struct { + Type string `json:"type"` + Reason string `json:"reason"` + } `json:"caused_by"` + } `json:"error"` + Status int `json:"status"` +} + +// GetDatasource returns the details for the datasource. Currently this is only the name and the type of the datasource. +func (e *Elasticsearch) GetDatasource() (string, string) { + return e.name, "elasticsearch" +} + +// GetVariables is not implemented for Elasticsearch. +func (e *Elasticsearch) GetVariables(ctx context.Context, options *proto.DatasourceOptions, variables []*proto.ApplicationMetricsVariable) ([]*proto.ApplicationMetricsVariable, error) { + return nil, fmt.Errorf("logs interface isn't implemented for elasticsearch") +} + +// GetMetrics is not implemented for Elasticsearch. +func (e *Elasticsearch) GetMetrics(ctx context.Context, options *proto.DatasourceOptions, variables []*proto.ApplicationMetricsVariable, queries []*proto.ApplicationMetricsQuery) ([]*proto.DatasourceMetrics, []string, error) { + return nil, nil, fmt.Errorf("logs interface isn't implemented for elasticsearch") +} + +// GetLogs implements the GetLogs method for the Elasticsearch datasource. If the request contains a scrollID, we will +// use this ID to retrieve more logs for a previous request. If the scroll ID is an empty string we will perform a new +// search request against the Elasticsearch API. +// If the request succeeds we will return the hits as string in the logs field. We also set the buckets to render a bar +// chart with the distribution of the log lines over the selected time range. If the request fails we parse the returned +// body into the error struct and return the error. +func (e *Elasticsearch) GetLogs(ctx context.Context, scrollID string, options *proto.DatasourceOptions, query *proto.ApplicationLogsQuery) (int64, int64, string, string, []*proto.DatasourceLogsBucket, error) { + var err error + var body []byte + var url string + + if scrollID == "" { + url = fmt.Sprintf("%s/_search?scroll=15m", e.endpoint) + body = []byte(fmt.Sprintf(`{"size":100,"sort":[{"@timestamp":{"order":"desc"}}],"query":{"bool":{"must":[{"range":{"@timestamp":{"gte":"%d","lte":"%d"}}},{"query_string":{"query":"%s"}}]}},"aggs":{"logcount":{"auto_date_histogram":{"field":"@timestamp","buckets":30}}}}`, options.TimeStart*1000, options.TimeEnd*1000, query.Query)) + } else { + url = fmt.Sprintf("%s/_search/scroll", e.endpoint) + body = []byte(`{"scroll" : "15m", "scroll_id" : "` + scrollID + `"}`) + } + + log.WithFields(logrus.Fields{"query": string(body)}).Debugf("Query body.") + + req, err := http.NewRequest(http.MethodPost, url, bytes.NewBuffer(body)) + if err != nil { + return 0, 0, "", "", nil, err + } + req.Header.Add("Content-Type", "application/json") + + resp, err := e.client.Do(req) + if err != nil { + return 0, 0, "", "", nil, err + } + + defer resp.Body.Close() + + if resp.StatusCode >= 200 && resp.StatusCode < 300 { + var res Response + + err = json.NewDecoder(resp.Body).Decode(&res) + if err != nil { + return 0, 0, "", "", nil, err + } + + log.WithFields(logrus.Fields{"took": res.Took, "hits": res.Hits.Total.Value}).Debugf("Query stats.") + + jsonString, err := json.Marshal(res.Hits.Hits) + if err != nil { + return 0, 0, "", "", nil, err + } + + var buckets []*proto.DatasourceLogsBucket + + for _, bucket := range res.Aggregations.LogCount.Buckets { + buckets = append(buckets, &proto.DatasourceLogsBucket{ + X: bucket.Key, + Y: bucket.DocCount, + }) + } + + return res.Hits.Total.Value, res.Took, res.ScrollID, string(jsonString), buckets, nil + } + + var res ResponseError + + err = json.NewDecoder(resp.Body).Decode(&res) + if err != nil { + return 0, 0, "", "", nil, err + } + + log.WithFields(logrus.Fields{"type": res.Error.Type, "reason": res.Error.Reason}).Error("The query returned an error.") + + return 0, 0, "", "", nil, fmt.Errorf("%s: %s", res.Error.Type, res.Error.Reason) +} + +// GetTraces is not implemented for Elasticsearch. +func (e *Elasticsearch) GetTraces(ctx context.Context, options *proto.DatasourceOptions) error { + return fmt.Errorf("logs interface isn't implemented for elasticsearch") +} + +// New returns a new Elasticsearch datasource. We are using a simular logic like for Prometheus to create the http +// client for the calls against the Elasticsearch API. +func New(name string, config Config) (*Elasticsearch, error) { + roundTripper := shared.DefaultRoundTripper + + if config.Username != "" && config.Password != "" { + roundTripper = shared.BasicAuthTransport{ + Transport: roundTripper, + Username: config.Username, + Password: config.Password, + } + } + + if config.Token != "" { + roundTripper = shared.TokenAuthTransporter{ + Transport: roundTripper, + Token: config.Token, + } + } + + return &Elasticsearch{ + name: name, + client: &http.Client{Transport: roundTripper}, + endpoint: config.Address, + }, nil +} diff --git a/pkg/api/datasources/datasource/prometheus/prometheus.go b/pkg/api/datasources/datasource/prometheus/prometheus.go index d10b70b83..631824a1c 100644 --- a/pkg/api/datasources/datasource/prometheus/prometheus.go +++ b/pkg/api/datasources/datasource/prometheus/prometheus.go @@ -3,10 +3,10 @@ package prometheus import ( "context" "fmt" - "net/http" "strings" "time" + "github.com/kobsio/kobs/pkg/api/datasources/datasource/shared" "github.com/kobsio/kobs/pkg/generated/proto" "github.com/prometheus/client_golang/api" @@ -27,27 +27,6 @@ type Config struct { Token string `yaml:"token"` } -type basicAuthTransport struct { - Transport http.RoundTripper - username string - password string -} - -type tokenAuthTransporter struct { - Transport http.RoundTripper - token string -} - -func (bat basicAuthTransport) RoundTrip(req *http.Request) (*http.Response, error) { - req.SetBasicAuth(bat.username, bat.password) - return bat.Transport.RoundTrip(req) -} - -func (tat tokenAuthTransporter) RoundTrip(req *http.Request) (*http.Response, error) { - req.Header.Set("Authorization", "Bearer "+tat.token) - return tat.Transport.RoundTrip(req) -} - // Prometheus implements the Prometheus datasource. type Prometheus struct { name string @@ -231,8 +210,8 @@ func (p *Prometheus) GetMetrics(ctx context.Context, options *proto.DatasourceOp } // GetLogs is not implemented for Prometheus. -func (p *Prometheus) GetLogs(ctx context.Context, options *proto.DatasourceOptions) error { - return fmt.Errorf("logs interface isn't implemented for prometheus") +func (p *Prometheus) GetLogs(ctx context.Context, scrollID string, options *proto.DatasourceOptions, query *proto.ApplicationLogsQuery) (int64, int64, string, string, []*proto.DatasourceLogsBucket, error) { + return 0, 0, "", "", nil, fmt.Errorf("logs interface isn't implemented for prometheus") } // GetTraces is not implemented for Prometheus. @@ -244,20 +223,20 @@ func (p *Prometheus) GetTraces(ctx context.Context, options *proto.DatasourceOpt // a Prometheus API client. We also set the round tripper for basic or token authentication, when the settings are // provided. func New(name string, config Config) (*Prometheus, error) { - roundTripper := api.DefaultRoundTripper + roundTripper := shared.DefaultRoundTripper if config.Username != "" && config.Password != "" { - roundTripper = basicAuthTransport{ + roundTripper = shared.BasicAuthTransport{ Transport: roundTripper, - username: config.Username, - password: config.Password, + Username: config.Username, + Password: config.Password, } } if config.Token != "" { - roundTripper = tokenAuthTransporter{ + roundTripper = shared.TokenAuthTransporter{ Transport: roundTripper, - token: config.Token, + Token: config.Token, } } diff --git a/pkg/api/datasources/datasource/shared/roundtripper.go b/pkg/api/datasources/datasource/shared/roundtripper.go new file mode 100644 index 000000000..61372fac2 --- /dev/null +++ b/pkg/api/datasources/datasource/shared/roundtripper.go @@ -0,0 +1,42 @@ +package shared + +import ( + "net" + "net/http" + "time" +) + +// DefaultRoundTripper is our default RoundTripper. +var DefaultRoundTripper http.RoundTripper = &http.Transport{ + Proxy: http.ProxyFromEnvironment, + DialContext: (&net.Dialer{ + Timeout: 30 * time.Second, + KeepAlive: 30 * time.Second, + }).DialContext, + TLSHandshakeTimeout: 10 * time.Second, +} + +// BasicAuthTransport is the struct to add basic auth to a RoundTripper. +type BasicAuthTransport struct { + Transport http.RoundTripper + Username string + Password string +} + +// RoundTrip implements the RoundTrip for our RoundTripper with basic auth support. +func (bat BasicAuthTransport) RoundTrip(req *http.Request) (*http.Response, error) { + req.SetBasicAuth(bat.Username, bat.Password) + return bat.Transport.RoundTrip(req) +} + +// TokenAuthTransporter is the struct to add token auth to a RoundTripper. +type TokenAuthTransporter struct { + Transport http.RoundTripper + Token string +} + +// RoundTrip implements the RoundTrip for our RoundTripper with token auth support. +func (tat TokenAuthTransporter) RoundTrip(req *http.Request) (*http.Response, error) { + req.Header.Set("Authorization", "Bearer "+tat.Token) + return tat.Transport.RoundTrip(req) +} diff --git a/pkg/api/datasources/datasources.go b/pkg/api/datasources/datasources.go index ddbe94402..1741b9f61 100644 --- a/pkg/api/datasources/datasources.go +++ b/pkg/api/datasources/datasources.go @@ -111,6 +111,28 @@ func (d *Datasources) GetMetrics(ctx context.Context, getMetricsRequest *proto.G }, nil } +func (d *Datasources) GetLogs(ctx context.Context, getLogsRequest *proto.GetLogsRequest) (*proto.GetLogsResponse, error) { + log.WithFields(logrus.Fields{"name": getLogsRequest.Name}).Tracef("Get metrics.") + + ds := d.getDatasource(getLogsRequest.Name) + if ds == nil { + return nil, fmt.Errorf("invalid datasource name") + } + + hits, took, scrollID, logs, buckets, err := ds.GetLogs(ctx, getLogsRequest.ScrollID, getLogsRequest.Options, getLogsRequest.Query) + if err != nil { + return nil, err + } + + return &proto.GetLogsResponse{ + Hits: hits, + Took: took, + ScrollID: scrollID, + Logs: logs, + Buckets: buckets, + }, nil +} + // Load loads all given datasources from the configuration, so that we can use them within the datasources gRPC service. func Load(config []datasource.Config) (*Datasources, error) { var datasources []datasource.Datasource diff --git a/pkg/generated/proto/application.pb.go b/pkg/generated/proto/application.pb.go index 2d058ea83..623647147 100644 --- a/pkg/generated/proto/application.pb.go +++ b/pkg/generated/proto/application.pb.go @@ -567,10 +567,16 @@ func (x *ApplicationMetricsQuery) GetLabel() string { return "" } +// ApplicationLogs defines the structure of the logs section of an application. To get the logs of an application we +// need a datasource field with the name of the datasource. This field will be set to the configured logs datasource for +// a cluster, when the user doesn't provide the field. The second field is a list of queries, for the application. type ApplicationLogs struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields + + Datasource string `protobuf:"bytes,1,opt,name=datasource,proto3" json:"datasource,omitempty"` + Queries []*ApplicationLogsQuery `protobuf:"bytes,2,rep,name=queries,proto3" json:"queries,omitempty"` } func (x *ApplicationLogs) Reset() { @@ -605,6 +611,86 @@ func (*ApplicationLogs) Descriptor() ([]byte, []int) { return file_application_proto_rawDescGZIP(), []int{7} } +func (x *ApplicationLogs) GetDatasource() string { + if x != nil { + return x.Datasource + } + return "" +} + +func (x *ApplicationLogs) GetQueries() []*ApplicationLogsQuery { + if x != nil { + return x.Queries + } + return nil +} + +// ApplicationLogsQuery represents a single query for an application. A query is identified by a name, a query and a +// list of fields, which should be shown in the results table. If the fields list is empty, we show the complete +// document in the table. +type ApplicationLogsQuery struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Query string `protobuf:"bytes,2,opt,name=query,proto3" json:"query,omitempty"` + Fields []string `protobuf:"bytes,3,rep,name=fields,proto3" json:"fields,omitempty"` +} + +func (x *ApplicationLogsQuery) Reset() { + *x = ApplicationLogsQuery{} + if protoimpl.UnsafeEnabled { + mi := &file_application_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ApplicationLogsQuery) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ApplicationLogsQuery) ProtoMessage() {} + +func (x *ApplicationLogsQuery) ProtoReflect() protoreflect.Message { + mi := &file_application_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ApplicationLogsQuery.ProtoReflect.Descriptor instead. +func (*ApplicationLogsQuery) Descriptor() ([]byte, []int) { + return file_application_proto_rawDescGZIP(), []int{8} +} + +func (x *ApplicationLogsQuery) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ApplicationLogsQuery) GetQuery() string { + if x != nil { + return x.Query + } + return "" +} + +func (x *ApplicationLogsQuery) GetFields() []string { + if x != nil { + return x.Fields + } + return nil +} + type ApplicationTraces struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -614,7 +700,7 @@ type ApplicationTraces struct { func (x *ApplicationTraces) Reset() { *x = ApplicationTraces{} if protoimpl.UnsafeEnabled { - mi := &file_application_proto_msgTypes[8] + mi := &file_application_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -627,7 +713,7 @@ func (x *ApplicationTraces) String() string { func (*ApplicationTraces) ProtoMessage() {} func (x *ApplicationTraces) ProtoReflect() protoreflect.Message { - mi := &file_application_proto_msgTypes[8] + mi := &file_application_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -640,7 +726,7 @@ func (x *ApplicationTraces) ProtoReflect() protoreflect.Message { // Deprecated: Use ApplicationTraces.ProtoReflect.Descriptor instead. func (*ApplicationTraces) Descriptor() ([]byte, []int) { - return file_application_proto_rawDescGZIP(), []int{8} + return file_application_proto_rawDescGZIP(), []int{9} } var File_application_proto protoreflect.FileDescriptor @@ -723,12 +809,24 @@ var file_application_proto_rawDesc = []byte{ 0x65, 0x72, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x22, - 0x11, 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4c, 0x6f, - 0x67, 0x73, 0x22, 0x13, 0x0a, 0x11, 0x41, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x54, 0x72, 0x61, 0x63, 0x65, 0x73, 0x42, 0x2c, 0x5a, 0x2a, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x6f, 0x62, 0x73, 0x69, 0x6f, 0x2f, 0x6b, 0x6f, 0x62, - 0x73, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x2f, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x6e, 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4c, 0x6f, + 0x67, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x12, 0x3b, 0x0a, 0x07, 0x71, 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4c, 0x6f, 0x67, + 0x73, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x07, 0x71, 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x22, + 0x58, 0x0a, 0x14, 0x41, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4c, 0x6f, + 0x67, 0x73, 0x51, 0x75, 0x65, 0x72, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x71, + 0x75, 0x65, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x71, 0x75, 0x65, 0x72, + 0x79, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x22, 0x13, 0x0a, 0x11, 0x41, 0x70, 0x70, + 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x72, 0x61, 0x63, 0x65, 0x73, 0x42, 0x2c, + 0x5a, 0x2a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x6f, 0x62, + 0x73, 0x69, 0x6f, 0x2f, 0x6b, 0x6f, 0x62, 0x73, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x67, 0x65, 0x6e, + 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -743,7 +841,7 @@ func file_application_proto_rawDescGZIP() []byte { return file_application_proto_rawDescData } -var file_application_proto_msgTypes = make([]protoimpl.MessageInfo, 9) +var file_application_proto_msgTypes = make([]protoimpl.MessageInfo, 10) var file_application_proto_goTypes = []interface{}{ (*Application)(nil), // 0: application.Application (*ApplicationLink)(nil), // 1: application.ApplicationLink @@ -753,23 +851,25 @@ var file_application_proto_goTypes = []interface{}{ (*ApplicationMetricsChart)(nil), // 5: application.ApplicationMetricsChart (*ApplicationMetricsQuery)(nil), // 6: application.ApplicationMetricsQuery (*ApplicationLogs)(nil), // 7: application.ApplicationLogs - (*ApplicationTraces)(nil), // 8: application.ApplicationTraces + (*ApplicationLogsQuery)(nil), // 8: application.ApplicationLogsQuery + (*ApplicationTraces)(nil), // 9: application.ApplicationTraces } var file_application_proto_depIdxs = []int32{ - 1, // 0: application.Application.links:type_name -> application.ApplicationLink - 2, // 1: application.Application.resources:type_name -> application.ApplicationResources - 3, // 2: application.Application.metrics:type_name -> application.ApplicationMetrics - 7, // 3: application.Application.logs:type_name -> application.ApplicationLogs - 8, // 4: application.Application.traces:type_name -> application.ApplicationTraces - 5, // 5: application.ApplicationMetrics.health:type_name -> application.ApplicationMetricsChart - 4, // 6: application.ApplicationMetrics.variables:type_name -> application.ApplicationMetricsVariable - 5, // 7: application.ApplicationMetrics.charts:type_name -> application.ApplicationMetricsChart - 6, // 8: application.ApplicationMetricsChart.queries:type_name -> application.ApplicationMetricsQuery - 9, // [9:9] is the sub-list for method output_type - 9, // [9:9] is the sub-list for method input_type - 9, // [9:9] is the sub-list for extension type_name - 9, // [9:9] is the sub-list for extension extendee - 0, // [0:9] is the sub-list for field type_name + 1, // 0: application.Application.links:type_name -> application.ApplicationLink + 2, // 1: application.Application.resources:type_name -> application.ApplicationResources + 3, // 2: application.Application.metrics:type_name -> application.ApplicationMetrics + 7, // 3: application.Application.logs:type_name -> application.ApplicationLogs + 9, // 4: application.Application.traces:type_name -> application.ApplicationTraces + 5, // 5: application.ApplicationMetrics.health:type_name -> application.ApplicationMetricsChart + 4, // 6: application.ApplicationMetrics.variables:type_name -> application.ApplicationMetricsVariable + 5, // 7: application.ApplicationMetrics.charts:type_name -> application.ApplicationMetricsChart + 6, // 8: application.ApplicationMetricsChart.queries:type_name -> application.ApplicationMetricsQuery + 8, // 9: application.ApplicationLogs.queries:type_name -> application.ApplicationLogsQuery + 10, // [10:10] is the sub-list for method output_type + 10, // [10:10] is the sub-list for method input_type + 10, // [10:10] is the sub-list for extension type_name + 10, // [10:10] is the sub-list for extension extendee + 0, // [0:10] is the sub-list for field type_name } func init() { file_application_proto_init() } @@ -875,6 +975,18 @@ func file_application_proto_init() { } } file_application_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ApplicationLogsQuery); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_application_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ApplicationTraces); i { case 0: return &v.state @@ -893,7 +1005,7 @@ func file_application_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_application_proto_rawDesc, NumEnums: 0, - NumMessages: 9, + NumMessages: 10, NumExtensions: 0, NumServices: 0, }, diff --git a/pkg/generated/proto/application_deepcopy.gen.go b/pkg/generated/proto/application_deepcopy.gen.go index 324c48b51..5d88702be 100644 --- a/pkg/generated/proto/application_deepcopy.gen.go +++ b/pkg/generated/proto/application_deepcopy.gen.go @@ -182,6 +182,27 @@ func (in *ApplicationLogs) DeepCopyInterface() interface{} { return in.DeepCopy() } +// DeepCopyInto supports using ApplicationLogsQuery within kubernetes types, where deepcopy-gen is used. +func (in *ApplicationLogsQuery) DeepCopyInto(out *ApplicationLogsQuery) { + p := proto.Clone(in).(*ApplicationLogsQuery) + *out = *p +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ApplicationLogsQuery. Required by controller-gen. +func (in *ApplicationLogsQuery) DeepCopy() *ApplicationLogsQuery { + if in == nil { + return nil + } + out := new(ApplicationLogsQuery) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInterface is an autogenerated deepcopy function, copying the receiver, creating a new ApplicationLogsQuery. Required by controller-gen. +func (in *ApplicationLogsQuery) DeepCopyInterface() interface{} { + return in.DeepCopy() +} + // DeepCopyInto supports using ApplicationTraces within kubernetes types, where deepcopy-gen is used. func (in *ApplicationTraces) DeepCopyInto(out *ApplicationTraces) { p := proto.Clone(in).(*ApplicationTraces) diff --git a/pkg/generated/proto/datasources.pb.go b/pkg/generated/proto/datasources.pb.go index a8c282d25..b38711fab 100644 --- a/pkg/generated/proto/datasources.pb.go +++ b/pkg/generated/proto/datasources.pb.go @@ -515,13 +515,17 @@ func (x *GetMetricsResponse) GetInterpolatedQueries() []string { return nil } +// GetLogsRequest is the structure of a call to get the logs for a query. It must contain the name of the datasource, +// an optional scroll id, which can be used for pagination, the datasource options and a query. type GetLogsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Options *DatasourceOptions `protobuf:"bytes,2,opt,name=options,proto3" json:"options,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + ScrollID string `protobuf:"bytes,2,opt,name=scrollID,proto3" json:"scrollID,omitempty"` + Options *DatasourceOptions `protobuf:"bytes,3,opt,name=options,proto3" json:"options,omitempty"` + Query *ApplicationLogsQuery `protobuf:"bytes,4,opt,name=query,proto3" json:"query,omitempty"` } func (x *GetLogsRequest) Reset() { @@ -563,6 +567,13 @@ func (x *GetLogsRequest) GetName() string { return "" } +func (x *GetLogsRequest) GetScrollID() string { + if x != nil { + return x.ScrollID + } + return "" +} + func (x *GetLogsRequest) GetOptions() *DatasourceOptions { if x != nil { return x.Options @@ -570,10 +581,26 @@ func (x *GetLogsRequest) GetOptions() *DatasourceOptions { return nil } +func (x *GetLogsRequest) GetQuery() *ApplicationLogsQuery { + if x != nil { + return x.Query + } + return nil +} + +// GetLogsResponse is the response for a GetLogs call. It contains the hits (number of documentes), the time, which was +// needed to execute the query and a scroll id for pagination. It also contains all logs as single string, which must +// be parsed in the frontend and a list of buckets, for the logs chart. type GetLogsResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields + + Hits int64 `protobuf:"varint,1,opt,name=hits,proto3" json:"hits,omitempty"` + Took int64 `protobuf:"varint,2,opt,name=took,proto3" json:"took,omitempty"` + ScrollID string `protobuf:"bytes,3,opt,name=scrollID,proto3" json:"scrollID,omitempty"` + Logs string `protobuf:"bytes,4,opt,name=logs,proto3" json:"logs,omitempty"` + Buckets []*DatasourceLogsBucket `protobuf:"bytes,5,rep,name=buckets,proto3" json:"buckets,omitempty"` } func (x *GetLogsResponse) Reset() { @@ -608,6 +635,41 @@ func (*GetLogsResponse) Descriptor() ([]byte, []int) { return file_datasources_proto_rawDescGZIP(), []int{10} } +func (x *GetLogsResponse) GetHits() int64 { + if x != nil { + return x.Hits + } + return 0 +} + +func (x *GetLogsResponse) GetTook() int64 { + if x != nil { + return x.Took + } + return 0 +} + +func (x *GetLogsResponse) GetScrollID() string { + if x != nil { + return x.ScrollID + } + return "" +} + +func (x *GetLogsResponse) GetLogs() string { + if x != nil { + return x.Logs + } + return "" +} + +func (x *GetLogsResponse) GetBuckets() []*DatasourceLogsBucket { + if x != nil { + return x.Buckets + } + return nil +} + type GetTracesRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -897,6 +959,63 @@ func (x *DatasourceMetricsData) GetY() float64 { return 0 } +// DatasourceLogsBucket is one bucket for the logs bar chart. Each bucket must contain a timestamp (x value) and the +// number of log lines for this timestamp (y value). +type DatasourceLogsBucket struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + X int64 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"` + Y int64 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"` +} + +func (x *DatasourceLogsBucket) Reset() { + *x = DatasourceLogsBucket{} + if protoimpl.UnsafeEnabled { + mi := &file_datasources_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DatasourceLogsBucket) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DatasourceLogsBucket) ProtoMessage() {} + +func (x *DatasourceLogsBucket) ProtoReflect() protoreflect.Message { + mi := &file_datasources_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DatasourceLogsBucket.ProtoReflect.Descriptor instead. +func (*DatasourceLogsBucket) Descriptor() ([]byte, []int) { + return file_datasources_proto_rawDescGZIP(), []int{16} +} + +func (x *DatasourceLogsBucket) GetX() int64 { + if x != nil { + return x.X + } + return 0 +} + +func (x *DatasourceLogsBucket) GetY() int64 { + if x != nil { + return x.Y + } + return 0 +} + var File_datasources_proto protoreflect.FileDescriptor var file_datasources_proto_rawDesc = []byte{ @@ -961,39 +1080,57 @@ var file_datasources_proto_rawDesc = []byte{ 0x13, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x6f, 0x6c, 0x61, 0x74, 0x65, 0x64, 0x51, 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x13, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x6f, 0x6c, 0x61, 0x74, 0x65, 0x64, 0x51, 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x22, - 0x5e, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x73, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, - 0x11, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x22, 0x60, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x63, 0x65, 0x73, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x07, 0x6f, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x64, 0x61, - 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x13, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x63, 0x65, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x6b, 0x0a, 0x11, 0x44, 0x61, 0x74, - 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1c, - 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x03, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x18, 0x0a, 0x07, - 0x74, 0x69, 0x6d, 0x65, 0x45, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x74, - 0x69, 0x6d, 0x65, 0x45, 0x6e, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x73, 0x6f, - 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x85, 0x01, 0x0a, 0x11, 0x44, 0x61, 0x74, 0x61, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x14, 0x0a, 0x05, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, - 0x03, 0x6d, 0x69, 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x61, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x01, 0x52, 0x03, 0x6d, 0x61, 0x78, 0x12, 0x36, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x73, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, - 0x72, 0x69, 0x63, 0x73, 0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x33, - 0x0a, 0x15, 0x44, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x72, - 0x69, 0x63, 0x73, 0x44, 0x61, 0x74, 0x61, 0x12, 0x0c, 0x0a, 0x01, 0x78, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x03, 0x52, 0x01, 0x78, 0x12, 0x0c, 0x0a, 0x01, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, + 0xb3, 0x01, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, + 0x49, 0x44, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, + 0x49, 0x44, 0x12, 0x38, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x73, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x37, 0x0a, 0x05, + 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x61, 0x70, + 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x69, 0x63, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4c, 0x6f, 0x67, 0x73, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x05, + 0x71, 0x75, 0x65, 0x72, 0x79, 0x22, 0xa6, 0x01, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, + 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x69, 0x74, + 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x68, 0x69, 0x74, 0x73, 0x12, 0x12, 0x0a, + 0x04, 0x74, 0x6f, 0x6f, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x74, 0x6f, 0x6f, + 0x6b, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x49, 0x44, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x49, 0x44, 0x12, 0x12, 0x0a, + 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x6f, 0x67, + 0x73, 0x12, 0x3b, 0x0a, 0x07, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, + 0x2e, 0x44, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4c, 0x6f, 0x67, 0x73, 0x42, + 0x75, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x07, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x22, 0x60, + 0x0a, 0x10, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x63, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x22, 0x13, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x63, 0x65, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x6b, 0x0a, 0x11, 0x44, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, + 0x6d, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x74, + 0x69, 0x6d, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, + 0x45, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x45, + 0x6e, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x85, 0x01, 0x0a, 0x11, 0x44, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x10, + 0x0a, 0x03, 0x6d, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x03, 0x6d, 0x69, 0x6e, + 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x61, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x03, 0x6d, + 0x61, 0x78, 0x12, 0x36, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x22, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x44, + 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, + 0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x33, 0x0a, 0x15, 0x44, 0x61, + 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x44, + 0x61, 0x74, 0x61, 0x12, 0x0c, 0x0a, 0x01, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x01, + 0x78, 0x12, 0x0c, 0x0a, 0x01, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x01, 0x79, 0x22, + 0x32, 0x0a, 0x14, 0x44, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4c, 0x6f, 0x67, + 0x73, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x0c, 0x0a, 0x01, 0x78, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x01, 0x78, 0x12, 0x0c, 0x0a, 0x01, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x01, 0x79, 0x32, 0x82, 0x04, 0x0a, 0x0b, 0x44, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x5b, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x22, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x73, 0x6f, 0x75, 0x72, @@ -1044,7 +1181,7 @@ func file_datasources_proto_rawDescGZIP() []byte { return file_datasources_proto_rawDescData } -var file_datasources_proto_msgTypes = make([]protoimpl.MessageInfo, 16) +var file_datasources_proto_msgTypes = make([]protoimpl.MessageInfo, 17) var file_datasources_proto_goTypes = []interface{}{ (*Datasource)(nil), // 0: datasources.Datasource (*GetDatasourcesRequest)(nil), // 1: datasources.GetDatasourcesRequest @@ -1062,39 +1199,43 @@ var file_datasources_proto_goTypes = []interface{}{ (*DatasourceOptions)(nil), // 13: datasources.DatasourceOptions (*DatasourceMetrics)(nil), // 14: datasources.DatasourceMetrics (*DatasourceMetricsData)(nil), // 15: datasources.DatasourceMetricsData - (*ApplicationMetricsVariable)(nil), // 16: application.ApplicationMetricsVariable - (*ApplicationMetricsQuery)(nil), // 17: application.ApplicationMetricsQuery + (*DatasourceLogsBucket)(nil), // 16: datasources.DatasourceLogsBucket + (*ApplicationMetricsVariable)(nil), // 17: application.ApplicationMetricsVariable + (*ApplicationMetricsQuery)(nil), // 18: application.ApplicationMetricsQuery + (*ApplicationLogsQuery)(nil), // 19: application.ApplicationLogsQuery } var file_datasources_proto_depIdxs = []int32{ 0, // 0: datasources.GetDatasourcesResponse.datasources:type_name -> datasources.Datasource 0, // 1: datasources.GetDatasourceResponse.datasource:type_name -> datasources.Datasource 13, // 2: datasources.GetVariablesRequest.options:type_name -> datasources.DatasourceOptions - 16, // 3: datasources.GetVariablesRequest.variables:type_name -> application.ApplicationMetricsVariable - 16, // 4: datasources.GetVariablesResponse.variables:type_name -> application.ApplicationMetricsVariable + 17, // 3: datasources.GetVariablesRequest.variables:type_name -> application.ApplicationMetricsVariable + 17, // 4: datasources.GetVariablesResponse.variables:type_name -> application.ApplicationMetricsVariable 13, // 5: datasources.GetMetricsRequest.options:type_name -> datasources.DatasourceOptions - 16, // 6: datasources.GetMetricsRequest.variables:type_name -> application.ApplicationMetricsVariable - 17, // 7: datasources.GetMetricsRequest.queries:type_name -> application.ApplicationMetricsQuery + 17, // 6: datasources.GetMetricsRequest.variables:type_name -> application.ApplicationMetricsVariable + 18, // 7: datasources.GetMetricsRequest.queries:type_name -> application.ApplicationMetricsQuery 14, // 8: datasources.GetMetricsResponse.metrics:type_name -> datasources.DatasourceMetrics 13, // 9: datasources.GetLogsRequest.options:type_name -> datasources.DatasourceOptions - 13, // 10: datasources.GetTracesRequest.options:type_name -> datasources.DatasourceOptions - 15, // 11: datasources.DatasourceMetrics.data:type_name -> datasources.DatasourceMetricsData - 1, // 12: datasources.Datasources.GetDatasources:input_type -> datasources.GetDatasourcesRequest - 3, // 13: datasources.Datasources.GetDatasource:input_type -> datasources.GetDatasourceRequest - 5, // 14: datasources.Datasources.GetVariables:input_type -> datasources.GetVariablesRequest - 7, // 15: datasources.Datasources.GetMetrics:input_type -> datasources.GetMetricsRequest - 9, // 16: datasources.Datasources.GetLogs:input_type -> datasources.GetLogsRequest - 11, // 17: datasources.Datasources.GetTraces:input_type -> datasources.GetTracesRequest - 2, // 18: datasources.Datasources.GetDatasources:output_type -> datasources.GetDatasourcesResponse - 4, // 19: datasources.Datasources.GetDatasource:output_type -> datasources.GetDatasourceResponse - 6, // 20: datasources.Datasources.GetVariables:output_type -> datasources.GetVariablesResponse - 8, // 21: datasources.Datasources.GetMetrics:output_type -> datasources.GetMetricsResponse - 10, // 22: datasources.Datasources.GetLogs:output_type -> datasources.GetLogsResponse - 12, // 23: datasources.Datasources.GetTraces:output_type -> datasources.GetTracesResponse - 18, // [18:24] is the sub-list for method output_type - 12, // [12:18] is the sub-list for method input_type - 12, // [12:12] is the sub-list for extension type_name - 12, // [12:12] is the sub-list for extension extendee - 0, // [0:12] is the sub-list for field type_name + 19, // 10: datasources.GetLogsRequest.query:type_name -> application.ApplicationLogsQuery + 16, // 11: datasources.GetLogsResponse.buckets:type_name -> datasources.DatasourceLogsBucket + 13, // 12: datasources.GetTracesRequest.options:type_name -> datasources.DatasourceOptions + 15, // 13: datasources.DatasourceMetrics.data:type_name -> datasources.DatasourceMetricsData + 1, // 14: datasources.Datasources.GetDatasources:input_type -> datasources.GetDatasourcesRequest + 3, // 15: datasources.Datasources.GetDatasource:input_type -> datasources.GetDatasourceRequest + 5, // 16: datasources.Datasources.GetVariables:input_type -> datasources.GetVariablesRequest + 7, // 17: datasources.Datasources.GetMetrics:input_type -> datasources.GetMetricsRequest + 9, // 18: datasources.Datasources.GetLogs:input_type -> datasources.GetLogsRequest + 11, // 19: datasources.Datasources.GetTraces:input_type -> datasources.GetTracesRequest + 2, // 20: datasources.Datasources.GetDatasources:output_type -> datasources.GetDatasourcesResponse + 4, // 21: datasources.Datasources.GetDatasource:output_type -> datasources.GetDatasourceResponse + 6, // 22: datasources.Datasources.GetVariables:output_type -> datasources.GetVariablesResponse + 8, // 23: datasources.Datasources.GetMetrics:output_type -> datasources.GetMetricsResponse + 10, // 24: datasources.Datasources.GetLogs:output_type -> datasources.GetLogsResponse + 12, // 25: datasources.Datasources.GetTraces:output_type -> datasources.GetTracesResponse + 20, // [20:26] is the sub-list for method output_type + 14, // [14:20] is the sub-list for method input_type + 14, // [14:14] is the sub-list for extension type_name + 14, // [14:14] is the sub-list for extension extendee + 0, // [0:14] is the sub-list for field type_name } func init() { file_datasources_proto_init() } @@ -1296,6 +1437,18 @@ func file_datasources_proto_init() { return nil } } + file_datasources_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DatasourceLogsBucket); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } } type x struct{} out := protoimpl.TypeBuilder{ @@ -1303,7 +1456,7 @@ func file_datasources_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_datasources_proto_rawDesc, NumEnums: 0, - NumMessages: 16, + NumMessages: 17, NumExtensions: 0, NumServices: 1, }, diff --git a/pkg/generated/proto/datasources_deepcopy.gen.go b/pkg/generated/proto/datasources_deepcopy.gen.go index 782c70cc4..d7e8fa566 100644 --- a/pkg/generated/proto/datasources_deepcopy.gen.go +++ b/pkg/generated/proto/datasources_deepcopy.gen.go @@ -349,3 +349,24 @@ func (in *DatasourceMetricsData) DeepCopy() *DatasourceMetricsData { func (in *DatasourceMetricsData) DeepCopyInterface() interface{} { return in.DeepCopy() } + +// DeepCopyInto supports using DatasourceLogsBucket within kubernetes types, where deepcopy-gen is used. +func (in *DatasourceLogsBucket) DeepCopyInto(out *DatasourceLogsBucket) { + p := proto.Clone(in).(*DatasourceLogsBucket) + *out = *p +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasourceLogsBucket. Required by controller-gen. +func (in *DatasourceLogsBucket) DeepCopy() *DatasourceLogsBucket { + if in == nil { + return nil + } + out := new(DatasourceLogsBucket) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInterface is an autogenerated deepcopy function, copying the receiver, creating a new DatasourceLogsBucket. Required by controller-gen. +func (in *DatasourceLogsBucket) DeepCopyInterface() interface{} { + return in.DeepCopy() +} diff --git a/proto/application.proto b/proto/application.proto index 6ced1d32f..2405ea385 100644 --- a/proto/application.proto +++ b/proto/application.proto @@ -83,6 +83,21 @@ message ApplicationMetricsQuery { string label = 2; } -message ApplicationLogs {} +// ApplicationLogs defines the structure of the logs section of an application. To get the logs of an application we +// need a datasource field with the name of the datasource. This field will be set to the configured logs datasource for +// a cluster, when the user doesn't provide the field. The second field is a list of queries, for the application. +message ApplicationLogs { + string datasource = 1; + repeated ApplicationLogsQuery queries = 2; +} + +// ApplicationLogsQuery represents a single query for an application. A query is identified by a name, a query and a +// list of fields, which should be shown in the results table. If the fields list is empty, we show the complete +// document in the table. +message ApplicationLogsQuery { + string name = 1; + string query = 2; + repeated string fields = 3; +} message ApplicationTraces {} diff --git a/proto/datasources.proto b/proto/datasources.proto index 2ba306020..9bbae5edf 100644 --- a/proto/datasources.proto +++ b/proto/datasources.proto @@ -79,12 +79,25 @@ message GetMetricsResponse { repeated string interpolatedQueries = 2; } +// GetLogsRequest is the structure of a call to get the logs for a query. It must contain the name of the datasource, +// an optional scroll id, which can be used for pagination, the datasource options and a query. message GetLogsRequest { string name = 1; - DatasourceOptions options = 2; + string scrollID = 2; + DatasourceOptions options = 3; + application.ApplicationLogsQuery query = 4; } -message GetLogsResponse {} +// GetLogsResponse is the response for a GetLogs call. It contains the hits (number of documentes), the time, which was +// needed to execute the query and a scroll id for pagination. It also contains all logs as single string, which must +// be parsed in the frontend and a list of buckets, for the logs chart. +message GetLogsResponse { + int64 hits = 1; + int64 took = 2; + string scrollID = 3; + string logs = 4; + repeated DatasourceLogsBucket buckets = 5; +} message GetTracesRequest { string name = 1; @@ -117,3 +130,10 @@ message DatasourceMetricsData { int64 x = 1; double y = 2; } + +// DatasourceLogsBucket is one bucket for the logs bar chart. Each bucket must contain a timestamp (x value) and the +// number of log lines for this timestamp (y value). +message DatasourceLogsBucket { + int64 x = 1; + int64 y = 2; +}