Skip to content

Commit

Permalink
Adding compression details card
Browse files Browse the repository at this point in the history
Signed-off-by: Arun Kumar Mohan <amohan@redhat.com>
  • Loading branch information
aruniiird committed Jul 19, 2021
1 parent 3c3240d commit f9ba66b
Show file tree
Hide file tree
Showing 4 changed files with 98 additions and 2 deletions.
Expand Up @@ -28,6 +28,11 @@
"Pool creation is not supported for OpenShift Container Storage's external mode.": "Pool creation is not supported for OpenShift Container Storage's external mode.",
"A BlockPool is a logical entity providing elastic capacity to applications and workloads. Pools provide a means of supporting policies for access data resilience and storage efficiency.": "A BlockPool is a logical entity providing elastic capacity to applications and workloads. Pools provide a means of supporting policies for access data resilience and storage efficiency.",
"BlockPool Creation Form": "BlockPool Creation Form",
"Compression status": "Compression status",
"Storage efficiency": "Storage efficiency",
"Compression eligibility": "Compression eligibility",
"Compression ratio": "Compression ratio",
"Compression savings": "Compression savings",
"Details": "Details",
"Replicas": "Replicas",
"Inventory": "Inventory",
Expand Down Expand Up @@ -249,7 +254,6 @@
"Many buckets have issues": "Many buckets have issues",
"Some buckets have issues": "Some buckets have issues",
"{{capacityRatio, number}}:1": "{{capacityRatio, number}}:1",
"Compression ratio": "Compression ratio",
"OpenShift Container Storage can be configured to use compression. The efficiency rate reflects the actual compression ratio when using such a configuration.": "OpenShift Container Storage can be configured to use compression. The efficiency rate reflects the actual compression ratio when using such a configuration.",
"Savings": "Savings",
"Savings shows the uncompressed and non-deduped data that would have been stored without those techniques.": "Savings shows the uncompressed and non-deduped data that would have been stored without those techniques.",
Expand Down
@@ -0,0 +1,80 @@
import * as React from 'react';
import { useTranslation } from 'react-i18next';
import { compose } from 'redux';

import { getInstantVectorStats } from '@console/internal/components/graphs/utils';

import DashboardCard from '@console/shared/src/components/dashboard/dashboard-card/DashboardCard';
import DashboardCardBody from '@console/shared/src/components/dashboard/dashboard-card/DashboardCardBody';
import DashboardCardHeader from '@console/shared/src/components/dashboard/dashboard-card/DashboardCardHeader';
import DashboardCardTitle from '@console/shared/src/components/dashboard/dashboard-card/DashboardCardTitle';
import DetailItem from '@console/shared/src/components/dashboard/details-card/DetailItem';
import DetailsBody from '@console/shared/src/components/dashboard/details-card/DetailsBody';
import { usePrometheusQueries } from '@console/shared/src/components/dashboard/utilization-card/prometheus-hook';
import {
humanizePercentage,
humanizeBinaryBytes,
humanizeNumber,
} from '@console/internal/components/utils';

import { BlockPoolDashboardContext } from './block-pool-dashboard-context';
import { getPoolQuery, StorageDashboardQuery } from '../../../queries/ceph-queries';

const parser = compose((val) => val?.[0]?.y, getInstantVectorStats);

export const CompressionDetailsCard: React.FC = () => {
const { t } = useTranslation();
const { obj } = React.useContext(BlockPoolDashboardContext);
const compressionMode = obj.spec?.compressionMode;
const compressionEnabled = compressionMode !== 'none';
const { name } = obj.metadata;

// Compression Metrics
const queries = React.useMemo(
() => [
getPoolQuery([name], StorageDashboardQuery.POOL_COMPRESSION_SAVINGS),
getPoolQuery([name], StorageDashboardQuery.POOL_COMPRESSION_ELIGIBILITY),
getPoolQuery([name], StorageDashboardQuery.POOL_COMPRESSION_RATIO),
],
[name],
);

const [values, loading] = usePrometheusQueries(queries, parser as any);
const poolCompressionSavings = (values?.[0] ?? 0) as number;
const poolCompressionEligibility = (values?.[1] ?? 0) as number;
const poolCompressionRatio = (values?.[2] ?? 0) as number;

return (
<DashboardCard data-test-id="compression-details-card">
<DashboardCardHeader>
<DashboardCardTitle>{t('ceph-storage-plugin~Compression')}</DashboardCardTitle>
</DashboardCardHeader>
<DashboardCardBody>
<DetailsBody>
<DetailItem isLoading={!obj} title={t('ceph-storage-plugin~Compression status')}>
{!compressionEnabled ? 'Disabled' : 'Enabled'}
</DetailItem>
{compressionEnabled && (
<>
<DetailItem isLoading={loading} title={t('ceph-storage-plugin~Storage efficiency')}>
{}
</DetailItem>
<DetailItem
isLoading={loading}
title={t('ceph-storage-plugin~Compression eligibility')}
>
{humanizePercentage(poolCompressionEligibility).string}
</DetailItem>
<DetailItem isLoading={loading} title={t('ceph-storage-plugin~Compression ratio')}>
{humanizeNumber(poolCompressionRatio)}
</DetailItem>
<DetailItem isLoading={loading} title={t('ceph-storage-plugin~Compression savings')}>
{humanizeBinaryBytes(poolCompressionSavings).string}
</DetailItem>
</>
)}
</DetailsBody>
</DashboardCardBody>
</DashboardCard>
);
};
Expand Up @@ -11,8 +11,13 @@ import { StatusCard } from './status-card';
import { RawCapacityCard } from './raw-capacity-card';
import { UtilizationCard } from './utilization-card';
import { MirroringCard } from './mirroring-card';
import { CompressionDetailsCard } from './compression-details-card';

const leftCards = [{ Card: DetailsCard }, { Card: InventoryCard }];
const leftCards = [
{ Card: DetailsCard },
{ Card: InventoryCard },
{ Card: CompressionDetailsCard },
];
const mainCards = [{ Card: StatusCard }, { Card: RawCapacityCard }, { Card: UtilizationCard }];
const rightCards = [{ Card: MirroringCard }];

Expand Down
Expand Up @@ -32,6 +32,10 @@ export enum StorageDashboardQuery {
POOL_MAX_CAPACITY_AVAILABLE = 'POOL_MAX_CAPACITY_AVAILABLE',
POOL_UTILIZATION_IOPS_QUERY = 'POOL_UTILIZATION_IOPS_QUERY',
POOL_UTILIZATION_THROUGHPUT_QUERY = 'POOL_UTILIZATION_THROUGHPUT_QUERY',
// Pool Compression Details
POOL_COMPRESSION_SAVINGS = 'POOL_COMPRESSION_SAVINGS',
POOL_COMPRESSION_RATIO = 'POOL_COMPRESSION_RATIO',
POOL_COMPRESSION_ELIGIBILITY = 'POOL_COMPRESSION_ELIGIBILITY',
// Capacity Info Card
RAW_CAPACITY_TOTAL = 'RAW_TOTAL_CAPACITY',
RAW_CAPACITY_USED = 'RAW_CAPACITY_USED',
Expand Down Expand Up @@ -226,6 +230,9 @@ export const getPoolQuery = (poolNames: string[], queryName: string) => {
[StorageDashboardQuery.POOL_MAX_CAPACITY_AVAILABLE]: `ceph_pool_max_avail * on (pool_id) group_left(name)ceph_pool_metadata{name=~'${names}'}`,
[StorageDashboardQuery.POOL_UTILIZATION_IOPS_QUERY]: `(rate(ceph_pool_wr[1m]) + rate(ceph_pool_rd[1m])) * on (pool_id) group_left(name)ceph_pool_metadata{name=~'${names}'}`,
[StorageDashboardQuery.POOL_UTILIZATION_THROUGHPUT_QUERY]: `(rate(ceph_pool_wr_bytes[1m]) + rate(ceph_pool_rd_bytes[1m])) * on (pool_id) group_left(name)ceph_pool_metadata{name=~'${names}'}`,
[StorageDashboardQuery.POOL_COMPRESSION_SAVINGS]: `(ceph_pool_compress_under_bytes - ceph_pool_compress_bytes_used) * on (pool_id) group_left(name)ceph_pool_metadata{name=~'${names}'}`,
[StorageDashboardQuery.POOL_COMPRESSION_ELIGIBILITY]: `(((ceph_pool_compress_under_bytes > 0) / ceph_pool_stored_raw) * 100) * on (pool_id) group_left(name)ceph_pool_metadata{name=~'${names}'}`,
[StorageDashboardQuery.POOL_COMPRESSION_RATIO]: `((ceph_pool_compress_under_bytes / ceph_pool_compress_bytes_used > 0) and on(pool_id) (((ceph_pool_compress_under_bytes > 0) / ceph_pool_stored_raw) * 100 > 0.5)) * on (pool_id) group_left(name)ceph_pool_metadata{name=~'${names}'}`,
};
return queries[queryName];
};

0 comments on commit f9ba66b

Please sign in to comment.