diff --git a/parseable-backstage-plugin/plugins/parseable-logstream/src/api/ParseableClient.ts b/parseable-backstage-plugin/plugins/parseable-logstream/src/api/ParseableClient.ts index adbc7ca..b2157f2 100644 --- a/parseable-backstage-plugin/plugins/parseable-logstream/src/api/ParseableClient.ts +++ b/parseable-backstage-plugin/plugins/parseable-logstream/src/api/ParseableClient.ts @@ -190,7 +190,7 @@ export class ParseableClient { } /** - * Get logs for a specific dataset + * Get logs for a specific dataset using PostgreSQL syntax */ async getLogs( baseUrl: string, @@ -212,24 +212,61 @@ export class ParseableClient { startTime = fiveMinutesAgo.toISOString(); } - // Build the SQL query - let sqlQuery = `select * from ${dataset}`; + // We'll use ISO format timestamps directly in the SQL query + // but keep the original ISO strings for the request body + + // Build the SQL query with proper PostgreSQL syntax + let sqlQuery = `SELECT * FROM ${dataset}`; + + // Add WHERE clause for search query if provided + const whereConditions = []; + + // Add search query condition if provided if (query && query.trim() !== '') { - sqlQuery += ` where ${query}`; + // If the query is a simple text search, wrap it in a LIKE clause + if (!query.includes('=') && !query.includes('<') && !query.includes('>') && + !query.toLowerCase().includes(' and ') && !query.toLowerCase().includes(' or ')) { + // Search in all fields using ILIKE for case-insensitive search + whereConditions.push(`body ILIKE '%${query}%'`); + } else { + // User provided a more complex query, use it as is + whereConditions.push(query); + } + } + + // Add time range conditions + if (startTime) { + whereConditions.push(`p_timestamp >= '${startTime}'`); } + if (endTime) { + whereConditions.push(`p_timestamp <= '${endTime}'`); + } + + // Combine all WHERE conditions + if (whereConditions.length > 0) { + sqlQuery += ` WHERE ${whereConditions.join(' AND ')}`; + } + + // Add ORDER BY to get newest logs first + sqlQuery += ` ORDER BY p_timestamp DESC`; + // Add limit to the query if specified if (limit && limit > 0) { - sqlQuery += ` limit ${limit}`; + sqlQuery += ` LIMIT ${limit}`; } const requestBody = { query: sqlQuery, - startTime: startTime, - endTime: endTime, + streamName: dataset, + startTime: startTime || '', + endTime: endTime || '' }; + console.log('Request body:', JSON.stringify(requestBody, null, 2)); + try { + console.log('Executing query:', sqlQuery); const response = await this.fetchApi.fetch(`${baseUrl}/api/v1/query`, { method: 'POST', @@ -254,6 +291,44 @@ export class ParseableClient { } } + /** + * Get logs directly from the logstream API + * This is a simpler approach that doesn't use the query API + */ + async getLogsByStream( + baseUrl: string, + dataset: string, + limit: number = 100 + ): Promise { + const headers = await this.getAuthHeader(baseUrl); + + try { + // Use the logstream API endpoint directly + const url = `${baseUrl}/api/v1/logstream/${dataset}/logs?limit=${limit}`; + + console.log('Fetching logs from logstream API:', url); + + const response = await this.fetchApi.fetch(url, { + headers, + }); + + if (!response.ok) { + if (response.status === 401 || response.status === 403) { + throw new Error('Authentication failed. Please check your Parseable credentials.'); + } + throw new Error(`Failed to fetch logs: ${response.statusText}`); + } + + const data = await response.json(); + return Array.isArray(data) ? data.map(entry => LogEntrySchema.parse(entry)) : []; + } catch (e) { + if (e instanceof z.ZodError) { + throw new Error(`Invalid log format from Parseable API: ${e.message}`); + } + throw e; + } + } + /** * Export logs to CSV */ diff --git a/parseable-backstage-plugin/plugins/parseable-logstream/src/components/EntityParseableLogstreamContent.tsx b/parseable-backstage-plugin/plugins/parseable-logstream/src/components/EntityParseableLogstreamContent.tsx index e43e3b0..1305840 100644 --- a/parseable-backstage-plugin/plugins/parseable-logstream/src/components/EntityParseableLogstreamContent.tsx +++ b/parseable-backstage-plugin/plugins/parseable-logstream/src/components/EntityParseableLogstreamContent.tsx @@ -1,6 +1,6 @@ import { Grid } from '@material-ui/core'; import { LogStreamCard } from './LogStreamCard'; - +import React from 'react'; /** * Component to render Parseable logstream content on an entity page */ diff --git a/parseable-backstage-plugin/plugins/parseable-logstream/src/components/LogStreamCard.tsx b/parseable-backstage-plugin/plugins/parseable-logstream/src/components/LogStreamCard.tsx index 9a94218..626fee1 100644 --- a/parseable-backstage-plugin/plugins/parseable-logstream/src/components/LogStreamCard.tsx +++ b/parseable-backstage-plugin/plugins/parseable-logstream/src/components/LogStreamCard.tsx @@ -1,4 +1,4 @@ -import React, { useState, useEffect } from 'react'; +import React, { useState, useEffect, useRef } from 'react'; import { useApi } from '@backstage/core-plugin-api'; import { useEntity } from '@backstage/plugin-catalog-react'; import { @@ -87,8 +87,22 @@ export const LogStreamCard = ({ title = 'Parseable Logs' }: LogStreamCardProps) const [selectedDataset, setSelectedDataset] = useState(''); const [logs, setLogs] = useState([]); + const [error, setError] = useState(null); const [isLiveTail, setIsLiveTail] = useState(false); - const [error, setError] = useState(undefined); + + // Create a ref for the log container to handle auto-scrolling + const logContainerRef = useRef(null); + + // Auto-scroll to bottom when logs update during live tail + useEffect(() => { + if (isLiveTail && logContainerRef.current) { + // Safely access scrollHeight with null check + const container = logContainerRef.current; + if (container) { + container.scrollTop = container.scrollHeight; + } + } + }, [logs, isLiveTail]); const baseUrl = entity.metadata.annotations?.['parseable.io/base-url'] || ''; @@ -110,7 +124,7 @@ export const LogStreamCard = ({ title = 'Parseable Logs' }: LogStreamCardProps) try { const logData = await parseableClient.getLogs(baseUrl, selectedDataset); setLogs(logData); - setError(undefined); + setError(null); } catch (err) { setError(err instanceof Error ? err : new Error(String(err))); setIsLiveTail(false); @@ -159,12 +173,30 @@ export const LogStreamCard = ({ title = 'Parseable Logs' }: LogStreamCardProps) // Render log content with proper formatting const renderLogContent = (log: LogEntry) => { - // Convert log entry to string representation - const logString = JSON.stringify(log, null, 2); + // Convert log entry to string representation with safe handling of circular references + let logString = ''; + try { + // Use a replacer function to handle circular references + const seen = new WeakSet(); + logString = JSON.stringify(log, (_key, value) => { + // Handle objects to avoid circular references + if (typeof value === 'object' && value !== null) { + if (seen.has(value)) { + return '[Circular Reference]'; + } + seen.add(value); + } + return value; + }, 2); + } catch (error) { + // Fallback if JSON.stringify fails + const err = error instanceof Error ? error : new Error(String(error)); + logString = `[Error stringifying log: ${err.message}]`; + } return (
- {logString} +
{logString}
); }; @@ -259,7 +291,7 @@ export const LogStreamCard = ({ title = 'Parseable Logs' }: LogStreamCardProps) )} {selectedDataset ? ( - + {logs.length === 0 ? ( No logs found for the selected dataset diff --git a/parseable-backstage-plugin/plugins/parseable-logstream/src/components/ParseableLogstreamPage.tsx b/parseable-backstage-plugin/plugins/parseable-logstream/src/components/ParseableLogstreamPage.tsx index 6438d4c..6013a8a 100644 --- a/parseable-backstage-plugin/plugins/parseable-logstream/src/components/ParseableLogstreamPage.tsx +++ b/parseable-backstage-plugin/plugins/parseable-logstream/src/components/ParseableLogstreamPage.tsx @@ -1,4 +1,4 @@ -import React, { useState, useEffect } from 'react'; +import React, { useState, useEffect, useCallback } from 'react'; import { useApi } from '@backstage/core-plugin-api'; import { useEntity } from '@backstage/plugin-catalog-react'; import { @@ -15,7 +15,6 @@ import { Button, FormControl, InputLabel, - MenuItem, Select, makeStyles, TextField, @@ -23,15 +22,21 @@ import { Typography, IconButton, Tooltip, + MenuItem, + DialogTitle, + DialogContent, + DialogActions, + Dialog, } from '@material-ui/core'; +import CloseIcon from '@material-ui/icons/Close'; import SearchIcon from '@material-ui/icons/Search'; import PlayArrowIcon from '@material-ui/icons/PlayArrow'; import PauseIcon from '@material-ui/icons/Pause'; import GetAppIcon from '@material-ui/icons/GetApp'; +import CheckIcon from '@material-ui/icons/Check'; import { useAsync } from 'react-use'; import { parseableApiRef } from '../api'; -import type { LogEntry, ParseableSchemaResponse } from '../api/ParseableClient'; -import { error } from 'console'; +import type { LogEntry } from '../api/ParseableClient'; const useStyles = makeStyles(theme => ({ root: { @@ -83,6 +88,7 @@ const useStyles = makeStyles(theme => ({ display: 'flex', gap: theme.spacing(2), alignItems: 'center', + marginBottom: theme.spacing(2), }, schemaCard: { marginBottom: theme.spacing(2), @@ -93,6 +99,27 @@ const useStyles = makeStyles(theme => ({ error: { color: theme.palette.error.main, }, + buttonGroup: { + display: 'flex', + gap: theme.spacing(1), + alignItems: 'center', + marginTop: theme.spacing(1), + marginBottom: theme.spacing(1), + }, + actionButton: { + borderRadius: 20, + boxShadow: 'none', + textTransform: 'none', + fontWeight: 500, + padding: theme.spacing(0.5, 2), + }, + iconButton: { + padding: theme.spacing(1), + }, + searchFieldEnhanced: { + marginBottom: theme.spacing(1), + width: '100%', + }, warn: { color: theme.palette.warning.main, }, @@ -114,9 +141,12 @@ export const ParseableLogstreamPage = () => { const [startDate, setStartDate] = useState(''); const [endDate, setEndDate] = useState(''); const [baseUrl, setBaseUrl] = useState('https://demo.parseable.com'); - const [schema, setSchema] = useState(null); - const [schemaLoading, setSchemaLoading] = useState(false); - const [schemaError, setSchemaError] = useState(null); + + // Dialog state for showing full body content + const [toastOpen, setToastOpen] = useState(false); + const [toastContent, setToastContent] = useState(''); + const [copyFeedback, setCopyFeedback] = useState(''); + // Try to get entity context, but don't fail if not available const entityContext = (() => { @@ -155,46 +185,45 @@ export const ParseableLogstreamPage = () => { ); // Fetch logs when dataset is selected or during live tail - useEffect(() => { - let intervalId: NodeJS.Timeout; + // Define fetchLogs as a callback that always uses the latest state values + const fetchLogs = useCallback(async () => { + if (!baseUrl || !selectedDataset) return; - const fetchLogs = async () => { - if (!baseUrl || !selectedDataset) return; + try { + setLogsLoading(true); + console.log('Fetching logs with query:', searchQuery, 'for dataset:', selectedDataset); - try { - setLogsLoading(true); - // Build query string with search terms - let query = searchQuery; - - // Use the updated getLogs method with time range parameters - const logData = await parseableClient.getLogs( - baseUrl, - selectedDataset, - 100, - query, - startDate || undefined, - endDate || undefined - ); - - setLogs(logData); - setError(undefined); - setLogsLoading(false); - } catch (err) { - console.error('Error fetching logs:', err); - setError(err instanceof Error ? err : new Error(String(err))); - setIsLiveTail(false); - setLogsLoading(false); - } - }; - - // Initial fetch - if (selectedDataset) { - fetchLogs(); + // Use the updated getLogs method with time range parameters + const logData = await parseableClient.getLogs( + baseUrl, + selectedDataset, + 100, + searchQuery, + startDate || undefined, + endDate || undefined + ); + + console.log('Fetched logs:', logData.length); + setLogs(logData); + setError(undefined); + setLogsLoading(false); + } catch (err) { + console.error('Error fetching logs:', err); + setError(err instanceof Error ? err : new Error(String(err))); + setIsLiveTail(false); + setLogsLoading(false); } + }, [baseUrl, selectedDataset, searchQuery, startDate, endDate, parseableClient]); - // Set up live tail if enabled - if (isLiveTail && selectedDataset) { - intervalId = setInterval(fetchLogs, 3000); + useEffect(() => { + let intervalId: NodeJS.Timeout; + + // Only set up live tail if enabled - no initial fetch when dataset changes + if (isLiveTail && selectedDataset && baseUrl) { + // Initial fetch for live tail + fetchLogs(); + // Then set interval + intervalId = setInterval(fetchLogs, 600000); // 10 minutes } return () => { @@ -202,7 +231,7 @@ export const ParseableLogstreamPage = () => { clearInterval(intervalId); } }; - }, [baseUrl, selectedDataset, isLiveTail, searchQuery, startDate, endDate, parseableClient]); + }, [baseUrl, selectedDataset, isLiveTail]); const handleDatasetChange = (event: React.ChangeEvent<{ value: unknown }>) => { const newDataset = event.target.value as string; @@ -210,24 +239,44 @@ export const ParseableLogstreamPage = () => { setLogs([]); setError(undefined); - // Fetch schema for the selected dataset + // Fetch logs with a default query when dataset is selected if (newDataset && baseUrl) { - setSchemaLoading(true); - setSchema(null); - setSchemaError(null); + // Use a default time range (last 30 minutes) + const now = new Date(); + const thirtyMinutesAgo = new Date(now.getTime() - 30 * 60 * 1000); + + // Format dates for display in the UI + const formattedEndDate = now.toISOString(); + const formattedStartDate = thirtyMinutesAgo.toISOString(); + + // Update the date pickers to reflect the default range + setStartDate(formattedStartDate); + setEndDate(formattedEndDate); + + // Temporarily set loading state + setLogsLoading(true); + + console.log('Fetching default logs for dataset:', newDataset); + console.log('Time range:', formattedStartDate, 'to', formattedEndDate); - parseableClient.getSchema(baseUrl, newDataset) - .then(response => { - setSchema(response); - setSchemaLoading(false); - }) - .catch(err => { - console.error('Error fetching schema:', err); - setSchemaError(err instanceof Error ? err : new Error(String(err))); - setSchemaLoading(false); - }); - } else { - setSchema(null); + // Fetch logs with default parameters + parseableClient.getLogs( + baseUrl, + newDataset, + 100, // Default limit + '', // Empty query string + formattedStartDate, + formattedEndDate + ).then(logData => { + console.log('Fetched initial logs for dataset:', logData.length); + setLogs(logData); + setError(undefined); + setLogsLoading(false); + }).catch(err => { + console.error('Error fetching initial logs:', err); + setError(err instanceof Error ? err : new Error(String(err))); + setLogsLoading(false); + }); } }; @@ -236,8 +285,43 @@ export const ParseableLogstreamPage = () => { }; const handleSearch = () => { - // Trigger a search with the current query - setIsLiveTail(false); // Stop live tail when searching + setIsLiveTail(false); + // Explicitly fetch logs when search button is clicked + fetchLogs(); + }; + + // State for export functionality + const [isExporting, setIsExporting] = useState(false); + + // Handle CSV export + const handleExportCsv = async () => { + if (!baseUrl || !selectedDataset) return; + + try { + setIsExporting(true); + + // Build query string with time range and search terms + let query = searchQuery; + + if (startDate && endDate) { + const timeFilter = `timestamp >= "${startDate}" AND timestamp <= "${endDate}"`; + query = query ? `${query} AND ${timeFilter}` : timeFilter; + } + + const blob = await parseableClient.exportToCsv(baseUrl, selectedDataset, query); + const url = URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = `${selectedDataset}-logs.csv`; + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + URL.revokeObjectURL(url); + } catch (err) { + setError(err instanceof Error ? err : new Error(String(err))); + } finally { + setIsExporting(false); + } }; // Helper function to determine log level color based on log properties @@ -262,6 +346,101 @@ export const ParseableLogstreamPage = () => { return 'inherit'; }; + + // Format timestamp to be more readable + const formatTimestamp = (timestamp: string): string => { + if (!timestamp) return ''; + + try { + // Try to parse the timestamp + const date = new Date(timestamp); + + // Check if date is valid + if (isNaN(date.getTime())) { + return timestamp; // Return original if parsing failed + } + + // Format as a readable date and time + return date.toLocaleString(); + } catch { + return timestamp; // Return original on any error + } + }; + + // Extract all unique fields from logs to create dynamic columns + const extractLogFields = (logs: LogEntry[]): string[] => { + const fieldsSet = new Set(); + + logs.forEach(log => { + Object.keys(log).forEach(key => { + // Skip timestamp fields as they'll be handled separately + if (!['p_timestamp', 'event_time', 'timestamp', 'datetime'].includes(key)) { + fieldsSet.add(key); + } + }); + }); + + return Array.from(fieldsSet); + }; + + // Safely prepare log data for rendering to avoid 'in' operator errors + const prepareLogsForRendering = (logs: LogEntry[]): Record[] => { + return logs.map(log => { + const preparedLog: Record = {}; + Object.entries(log).forEach(([key, value]) => { + // Ensure all values are strings or primitives, not complex objects + if (value === null || value === undefined) { + preparedLog[key] = ''; + } else if (typeof value === 'object') { + preparedLog[key] = JSON.stringify(value); + } else { + preparedLog[key] = String(value); + } + }); + // Add levelColor separately since it's used for styling + if (log.levelColor) { + preparedLog.levelColor = log.levelColor; + } + return preparedLog; + }); + }; + + // Format body column content for better readability + const formatBodyContent = (value: any): string => { + if (!value) return ''; + + if (typeof value === 'object') { + try { + return JSON.stringify(value, null, 2); + } catch (e) { + return String(value); + } + } + + return String(value); + }; + + // Helper function to render truncated text with 'See more' button + const renderTruncatedText = (text: string) => { + return ( +
+ + {text.substring(0, 50)}... + + +
+ ); + }; if (logsLoading) { return ; @@ -345,86 +524,80 @@ export const ParseableLogstreamPage = () => {
-
- setStartDate(e.target.value)} - InputLabelProps={{ shrink: true }} - disabled={isLiveTail} - /> - - setEndDate(e.target.value)} - InputLabelProps={{ shrink: true }} - disabled={isLiveTail} - /> -
- setSearchQuery(e.target.value)} - onKeyPress={(e) => e.key === 'Enter' && handleSearch()} + label="Start Date" + type="datetime-local" + value={startDate} + onChange={(e) => setStartDate(e.target.value)} + InputLabelProps={{ shrink: true }} disabled={isLiveTail} + size="small" /> + setEndDate(e.target.value)} + InputLabelProps={{ shrink: true }} + disabled={isLiveTail} + size="small" + /> +
+ + setSearchQuery(e.target.value)} + onKeyPress={(e) => e.key === 'Enter' && handleSearch()} + disabled={isLiveTail} + placeholder="Simple text or SQL WHERE clause" + helperText="Example: level='error' OR status>=400" + fullWidth + size="small" + variant="outlined" + margin="normal" + /> + +
- - {isLiveTail ? : } - + {/* Wrapper to handle disabled tooltip */} + + {isLiveTail ? : } + + {selectedDataset && ( )}
@@ -435,27 +608,7 @@ export const ParseableLogstreamPage = () => { - {selectedDataset && ( - - {schemaLoading && } - {schemaError && } - {schema && ( - - ({ - field, - type, - }))} - columns={[ - { title: 'Field', field: 'field' }, - { title: 'Type', field: 'type' }, - ]} - /> - - )} - - )} + {/* Schema table removed */} {logsLoading && } @@ -475,49 +628,242 @@ export const ParseableLogstreamPage = () => { {!logsLoading && !error && logs.length > 0 && (
{ - const level = log.level || log.severity || log.event_type || 'unknown'; - // Get the color for the log level - const levelColor = getLogLevelColor(log); + key={`logs-table-${selectedDataset}`} + options={{ + pageSize: 10, + pageSizeOptions: [10, 20, 50], + headerStyle: { + backgroundColor: '#e3f2fd', + color: '#000000', + fontWeight: 'bold', + }, + maxBodyHeight: '600px', + minBodyHeight: '400px', + padding: 'default', + tableLayout: 'auto', + search: true, + paging: true, + sorting: true, + columnsButton: true + }} + data={prepareLogsForRendering(logs).map((log, index) => { + // Create a data object with all fields from the log + const rowData: Record = { id: index }; + + // Add timestamp field + rowData.timestamp = formatTimestamp(String(log.p_timestamp || log.datetime || log.event_time || '')); - return { - id: index, - timestamp: log.p_timestamp || log.event_time || '', - level: level, - message: JSON.stringify(log, null, 2), - // Store the color as a string - levelColor: levelColor - }; + // Add all other fields from the log + Object.entries(log).forEach(([key, value]) => { + // Skip timestamp fields as they're already handled + if (!['p_timestamp', 'event_time', 'timestamp', 'datetime'].includes(key)) { + // Convert any object values to strings to avoid React errors + if (typeof value === 'object' && value !== null) { + // Special handling for body column + if (key === 'body') { + rowData[key] = formatBodyContent(value); + } else { + try { + const seen = new WeakSet(); + rowData[key] = JSON.stringify(value, (_k, v) => { + if (typeof v === 'object' && v !== null) { + if (seen.has(v)) return '[Circular]'; + seen.add(v); + } + return v; + }); + + // Truncate long JSON strings + if (rowData[key].length > 200) { + rowData[key] = rowData[key].substring(0, 200) + '...'; + } + } catch (error) { + rowData[key] = `[Error: ${error instanceof Error ? error.message : String(error)}]`; + } + } + } else { + rowData[key] = value === null ? '' : String(value); + } + } + }); + + // Add level color for styling + rowData.levelColor = getLogLevelColor(log); + + return rowData; })} - columns={[ - { title: 'Timestamp', field: 'timestamp' }, - { - title: 'Level', - field: 'level', - render: rowData => { - return ( - - {rowData.level} - - ); + components={{ + // Use a custom container to avoid the scrollWidth error + Container: props =>
+ }} + columns={(() => { + // Start with timestamp column + const columns: any[] = [ + { + title: 'Timestamp', + field: 'timestamp', + render: (rowData: Record) => {String(rowData.timestamp || '')} } - }, - { - title: 'Message', - field: 'message', - render: rowData => ( -
-                        {rowData.message}
-                      
- ) - }, - ]} + ]; + + // Add columns for all fields in the logs + if (logs.length > 0) { + const fields = extractLogFields(logs); + + // Add level/status/meta-state first if they exist + const priorityFields = ['level', 'meta-state', 'status', 'method', 'host', 'id']; + priorityFields.forEach(field => { + if (fields.includes(field)) { + columns.push({ + title: field.charAt(0).toUpperCase() + field.slice(1).replace(/-/g, ' '), + field: field, + render: (rowData: Record) => { + // Style level/status fields with colors + if (['level', 'meta-state', 'status'].includes(field)) { + return ( + = 400 ? '#d32f2f' : + field === 'level' || field === 'meta-state' ? rowData.levelColor : 'inherit', + fontWeight: 'medium' + }}> + {String(rowData[field] || '')} + + ); + } + return {String(rowData[field] || '')}; + } + }); + // Remove from fields array to avoid duplication + fields.splice(fields.indexOf(field), 1); + } + }); + + // Add remaining fields + fields.forEach(field => { + // Special handling for body column + if (field === 'body') { + columns.push({ + title: 'Body', + field: 'body', + render: (rowData: Record) => { + const bodyValue = rowData.body; + if (!bodyValue || typeof bodyValue !== 'string') { + return {String(bodyValue || '')}; + } + + // Try to parse as JSON if it looks like JSON + try { + if (bodyValue.startsWith('{') || bodyValue.startsWith('[')) { + const parsed = JSON.parse(bodyValue); + const displaySummary = typeof parsed === 'object' ? + Object.keys(parsed).slice(0, 3).map(k => k).join(', ') : + String(parsed).substring(0, 30); + + return ( +
+ + {displaySummary} + + +
+ ); + } + // If it's valid JSON but not an object/array that we handled above + // fall through to default handling below + } catch (e) { + console.debug('Failed to parse JSON body:', e); + // Parsing failed, fall through to default handling below + } + + // For non-JSON strings or parsing failures + return bodyValue.length > 50 ? renderTruncatedText(bodyValue) : {bodyValue}; + } + + }); + } else { + columns.push({ + title: field.charAt(0).toUpperCase() + field.slice(1).replace(/-/g, ' '), + field: field, + render: (rowData: Record) => {String(rowData[field] || '')} + }); + } + }); + } + + return columns; + })()} /> )} - + + {/* Dialog for displaying full body content */} + setToastOpen(false)} + maxWidth="md" + fullWidth + PaperProps={{ + style: { + maxHeight: '80vh', + backgroundColor: '#282828', // Dark background to match Backstage theme + color: '#fff', + }, + }} + > + + Log Body Content + setToastOpen(false)} + > + + + + +
+            {toastContent}
+          
+
+ + + + +
+ ); }; diff --git a/parseable-backstage-plugin/plugins/parseable-logstream/src/components/Router.tsx b/parseable-backstage-plugin/plugins/parseable-logstream/src/components/Router.tsx index 3397899..03bda2c 100644 --- a/parseable-backstage-plugin/plugins/parseable-logstream/src/components/Router.tsx +++ b/parseable-backstage-plugin/plugins/parseable-logstream/src/components/Router.tsx @@ -3,6 +3,7 @@ import { ParseableLogstreamPage } from './ParseableLogstreamPage'; import { Entity } from '@backstage/catalog-model'; import { MissingAnnotationEmptyState } from '@backstage/core-components'; import { useEntity } from '@backstage/plugin-catalog-react'; +import React from 'react'; // Define the annotation constant directly here const PARSEABLE_ANNOTATION_BASE_URL = 'parseable.io/base-url';