Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rebranding OCS to ODF #9604

Merged
merged 1 commit into from Jul 23, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -8,10 +8,10 @@ import { poolData } from '../mocks/storage-pool';

export const poolMessage = {
PROGRESS:
'The creation of an OCS storage cluster is still in progress or have failed, please try again after the StorageCluster is ready to use.',
'The creation of a StorageCluster is still in progress or has failed. Try again after the StorageCuster is ready to use.',
POOL_START: 'Pool "foo" creation in progress',
POOL_TIMEOUT:
'Pool "foo" creation timed out. Please check if ocs-operator and rook operator are running',
'Pool "foo" creation timed out. Please check if odf operator and rook operator are running',
POOL_DUPLICATED: 'Pool "foo" already exists',
POOL_CREATED: 'Pool "foo" was successfully created',
};
Expand Down
Expand Up @@ -29,7 +29,7 @@
"OpenShift Container Storage's StorageCluster is not available. Try again after the StorageCluster is ready to use.": "OpenShift Container Storage's StorageCluster is not available. Try again after the StorageCluster is ready to use.",
"Create BlockPool": "Create BlockPool",
"Close": "Close",
"Pool creation is not supported for OpenShift Container Storage's external mode.": "Pool creation is not supported for OpenShift Container Storage's external mode.",
"Pool creation is not supported for OpenShift Data Foundation's external RHCS StorageSystem.": "Pool creation is not supported for OpenShift Data Foundation's external RHCS StorageSystem.",
"A BlockPool is a logical entity providing elastic capacity to applications and workloads. Pools provide a means of supporting policies for access data resilience and storage efficiency.": "A BlockPool is a logical entity providing elastic capacity to applications and workloads. Pools provide a means of supporting policies for access data resilience and storage efficiency.",
"BlockPool Creation Form": "BlockPool Creation Form",
"Name": "Name",
Expand All @@ -38,7 +38,7 @@
"Region": "Region",
"BackingStore Table": "BackingStore Table",
"Each BackingStore can be used for one tier at a time. Selecting a BackingStore in one tier will remove the resource from the second tier option and vice versa.": "Each BackingStore can be used for one tier at a time. Selecting a BackingStore in one tier will remove the resource from the second tier option and vice versa.",
"Bucket created for OpenShift Container Storage's Service": "Bucket created for OpenShift Container Storage's Service",
"Bucket created for OpenShift Data Foundation's Service": "Bucket created for OpenShift Data Foundation's Service",
"Tier 1 - BackingStores": "Tier 1 - BackingStores",
"Create BackingStore ": "Create BackingStore ",
"Tier-1-Table": "Tier-1-Table",
Expand Down Expand Up @@ -270,11 +270,11 @@
"Some buckets have issues": "Some buckets have issues",
"{{capacityRatio, number}}:1": "{{capacityRatio, number}}:1",
"Compression ratio": "Compression ratio",
"OpenShift Container Storage can be configured to use compression. The efficiency rate reflects the actual compression ratio when using such a configuration.": "OpenShift Container Storage can be configured to use compression. The efficiency rate reflects the actual compression ratio when using such a configuration.",
"OpenShift Data Foundation can be configured to use compression. The efficiency rate reflects the actual compression ratio when using such a configuration.": "OpenShift Data Foundation can be configured to use compression. The efficiency rate reflects the actual compression ratio when using such a configuration.",
"Savings": "Savings",
"Savings shows the uncompressed and non-deduped data that would have been stored without those techniques.": "Savings shows the uncompressed and non-deduped data that would have been stored without those techniques.",
"Storage Efficiency": "Storage Efficiency",
"OpenShift Container Storage Overview": "OpenShift Container Storage Overview",
"OpenShift Data Foundation Overview": "OpenShift Data Foundation Overview",
"Storage Classes": "Storage Classes",
"Pods": "Pods",
"{{metricType}}": "{{metricType}}",
Expand All @@ -285,8 +285,8 @@
"Utilization": "Utilization",
"Used Capacity": "Used Capacity",
"Requested capacity": "Requested capacity",
"Expanding OpenShift Container Storage's Cluster": "Expanding OpenShift Container Storage's Cluster",
"Upgrading OpenShift Container Storage's Operator": "Upgrading OpenShift Container Storage's Operator",
"Expanding StorageCluster": "Expanding StorageCluster",
"Upgrading OpenShift Data Foundation's Operator": "Upgrading OpenShift Data Foundation's Operator",
"Used Capacity Breakdown": "Used Capacity Breakdown",
"This card shows the used capacity for different Kubernetes resources. The figures shown represent the Usable storage, meaning that data replication is not taken into consideration.": "This card shows the used capacity for different Kubernetes resources. The figures shown represent the Usable storage, meaning that data replication is not taken into consideration.",
"Service name": "Service name",
Expand All @@ -309,7 +309,7 @@
"Throughput": "Throughput",
"Recovery": "Recovery",
"Disk State": "Disk State",
"OpenShift Container Storage's Status": "OpenShift Container Storage's Status",
"OpenShift Data Foundation status": "OpenShift Data Foundation status",
"Model": "Model",
"Capacity": "Capacity",
"Filesystem": "Filesystem",
Expand Down Expand Up @@ -347,7 +347,7 @@
"A PEM-encoded CA certificate file used to verify the Vault server's SSL certificate.": "A PEM-encoded CA certificate file used to verify the Vault server's SSL certificate.",
"A PEM-encoded client certificate. This certificate is used for TLS communication with the Vault server.": "A PEM-encoded client certificate. This certificate is used for TLS communication with the Vault server.",
"An unencrypted, PEM-encoded private key which corresponds to the matching client certificate provided with VAULT_CLIENT_CERT.": "An unencrypted, PEM-encoded private key which corresponds to the matching client certificate provided with VAULT_CLIENT_CERT.",
"The name to use as the SNI host when OpenShift Container Storage connecting via TLS to the Vault server": "The name to use as the SNI host when OpenShift Container Storage connecting via TLS to the Vault server",
"The name to use as the SNI host when OpenShift Data Foundation connecting via TLS to the Vault server": "The name to use as the SNI host when OpenShift Data Foundation connecting via TLS to the Vault server",
"Key Management Service Advanced Settings": "Key Management Service Advanced Settings",
"Backend Path": "Backend Path",
"path/": "path/",
Expand Down Expand Up @@ -480,7 +480,8 @@
"Connect to external cluster": "Connect to external cluster",
"Download <1>{{SCRIPT_NAME}}</1> script and run on the RHCS cluster, then upload the results (JSON) in the External cluster metadata field.": "Download <1>{{SCRIPT_NAME}}</1> script and run on the RHCS cluster, then upload the results (JSON) in the External cluster metadata field.",
"Download Script": "Download Script",
"A bucket will be created to provide the OpenShift Container Storage's Service.": "A bucket will be created to provide the OpenShift Container Storage's Service.",
"A bucket will be created to provide the OpenShift Data Foundation's Service.": "A bucket will be created to provide the OpenShift Data Foundation's Service.",
"Bucket created for OpenShift Container Storage's Service": "Bucket created for OpenShift Container Storage's Service",
"Create External StorageCluster": "Create External StorageCluster",
"External cluster metadata": "External cluster metadata",
"Upload JSON File": "Upload JSON File",
Expand Down Expand Up @@ -550,11 +551,11 @@
"The last saved values will be updated": "The last saved values will be updated",
"Enable Thick Provisioning": "Enable Thick Provisioning",
"By enabling thick-provisioning, volumes will allocate the requested capacity upon volume creation. Volume creation will be slower when thick-provisioning is enabled.": "By enabling thick-provisioning, volumes will allocate the requested capacity upon volume creation. Volume creation will be slower when thick-provisioning is enabled.",
"Storage status represents the health status of Openshift Container Storage's StorageCluster.": "Storage status represents the health status of Openshift Container Storage's StorageCluster.",
"Storage status represents the health status of Openshift Data Foundation's StorageCluster.": "Storage status represents the health status of Openshift Data Foundation's StorageCluster.",
"Health": "Health",
"Openshift Container Storage": "Openshift Container Storage",
"Openshift Data Foundation": "Openshift Data Foundation",
"Standard": "Standard",
"Data will be consumed by a Multi-cloud object gateway, deduped, compressed, and encrypted. The encrypted chunks would be saved on the selected BackingStores. Best used when the applications would always use the OpenShift Container Storage endpoints to access the data.": "Data will be consumed by a Multi-cloud object gateway, deduped, compressed, and encrypted. The encrypted chunks would be saved on the selected BackingStores. Best used when the applications would always use the OpenShift Container Storage endpoints to access the data.",
"Data will be consumed by a Multi-cloud object gateway, deduped, compressed, and encrypted. The encrypted chunks would be saved on the selected BackingStores. Best used when the applications would always use the OpenShift Data Foundation endpoints to access the data.": "Data will be consumed by a Multi-cloud object gateway, deduped, compressed, and encrypted. The encrypted chunks would be saved on the selected BackingStores. Best used when the applications would always use the OpenShift Data Foundation endpoints to access the data.",
"Data is stored on the NamespaceStores without performing de-duplication, compression, or encryption. BucketClasses of namespace type allow connecting to existing data and serving from them. These are best used for existing data or when other applications (and cloud-native services) need to access the data from outside OpenShift Container Storage.": "Data is stored on the NamespaceStores without performing de-duplication, compression, or encryption. BucketClasses of namespace type allow connecting to existing data and serving from them. These are best used for existing data or when other applications (and cloud-native services) need to access the data from outside OpenShift Container Storage.",
"Single NamespaceStore": "Single NamespaceStore",
"Multi NamespaceStores": "Multi NamespaceStores",
Expand All @@ -580,7 +581,7 @@
"Pool {{name}} creation in progress": "Pool {{name}} creation in progress",
"Pool {{name}} was successfully created": "Pool {{name}} was successfully created",
"An error occurred. Pool {{name}} was not created": "An error occurred. Pool {{name}} was not created",
"Pool {{name}} creation timed out. Please check if ocs-operator and rook operator are running": "Pool {{name}} creation timed out. Please check if ocs-operator and rook operator are running",
"Pool {{name}} creation timed out. Please check if odf operator and rook operator are running": "Pool {{name}} creation timed out. Please check if odf operator and rook operator are running",
"The creation of a StorageCluster is still in progress or has failed. Try again after the StorageCuster is ready to use.": "The creation of a StorageCluster is still in progress or has failed. Try again after the StorageCuster is ready to use.",
"Pool management tasks are not supported for default pool and OpenShift Container Storage's external mode.": "Pool management tasks are not supported for default pool and OpenShift Container Storage's external mode.",
"Pool {{name}} was created with errors.": "Pool {{name}} was created with errors.",
Expand Down
Expand Up @@ -89,7 +89,7 @@ const CreateBlockPool: React.FC<CreateBlockPoolProps> = ({ match }) => {
>
<strong>
{t(
"ceph-storage-plugin~Pool creation is not supported for OpenShift Container Storage's external mode.",
"ceph-storage-plugin~Pool creation is not supported for OpenShift Data Foundation's external RHCS StorageSystem.",
)}
</strong>
</Modal>
Expand Down
Expand Up @@ -190,7 +190,7 @@ const BackingStoreSelection: React.FC<BackingStoreSelectionProps> = (props) => {
'ceph-storage-plugin~Each BackingStore can be used for one tier at a time. Selecting a BackingStore in one tier will remove the resource from the second tier option and vice versa.',
)}
aria-label={t(
"ceph-storage-plugin~Bucket created for OpenShift Container Storage's Service",
"ceph-storage-plugin~Bucket created for OpenShift Data Foundation's Service",
)}
isInline
/>
Expand Down
Expand Up @@ -255,7 +255,7 @@ const CreateBucketClass: React.FC<CreateBCProps> = ({ match }) => {
name: _.get(
clusterServiceVersion,
'spec.displayName',
'Openshift Container Storage Operator',
'Openshift Data Foundation Operator',
),
path: resourcePathFromModel(ClusterServiceVersionModel, appName, ns),
},
Expand Down
Expand Up @@ -83,7 +83,7 @@ const StorageEfficiencyCard: React.FC<DashboardItemProps> = ({
error: !!compressionQueryResultError || !compressionRatio || Number(compressionRatio) === 1,
title: t('ceph-storage-plugin~Compression ratio'),
infoText: t(
'ceph-storage-plugin~OpenShift Container Storage can be configured to use compression. The efficiency rate reflects the actual compression ratio when using such a configuration.',
'ceph-storage-plugin~OpenShift Data Foundation can be configured to use compression. The efficiency rate reflects the actual compression ratio when using such a configuration.',
),
getStats: compressionStats,
};
Expand Down
Expand Up @@ -24,7 +24,7 @@ import { HorizontalNav, PageHeading, LoadingBox } from '@console/internal/compon

const OCSDashboardsPage: React.FC<DashboardsPageProps> = ({ match, kindsInFlight, k8sModels }) => {
const { t } = useTranslation();
const title = t('ceph-storage-plugin~OpenShift Container Storage Overview');
const title = t('ceph-storage-plugin~OpenShift Data Foundation Overview');
const tabExtensions = useExtensions<DashboardsTab>(isDashboardsTab);
const cardExtensions = useExtensions<DashboardsCard>(isDashboardsCard);
const dynamicTabExtensions = useExtensions<DynamicDashboardsTab>(isDynamicDashboardsTab);
Expand Down
Expand Up @@ -6,11 +6,7 @@ import { K8sResourceKind } from '@console/internal/module/k8s';
export const ClusterExpandActivity: React.FC = () => {
const { t } = useTranslation();

return (
<ActivityItem>
{t("ceph-storage-plugin~Expanding OpenShift Container Storage's Cluster")}
</ActivityItem>
);
return <ActivityItem>{t('ceph-storage-plugin~Expanding StorageCluster')}</ActivityItem>;
};

export const isClusterExpandActivity = (storageCluster: K8sResourceKind): boolean =>
Expand Down
Expand Up @@ -12,7 +12,7 @@ export const OCSUpgradeActivity: React.FC = () => {

return (
<ActivityItem>
{t("ceph-storage-plugin~Upgrading OpenShift Container Storage's Operator")}
{t("ceph-storage-plugin~Upgrading OpenShift Data Foundation's Operator")}
</ActivityItem>
);
};
Expand Up @@ -247,7 +247,7 @@ const OCSDisksList: React.FC<TableProps> = React.memo((props) => {
props: { className: tableColumnClasses[1] },
},
{
title: t("ceph-storage-plugin~OpenShift Container Storage's Status"),
title: t('ceph-storage-plugin~OpenShift Data Foundation status'),
sortField: '',
transforms: [],
props: { className: tableColumnClasses[1] },
Expand Down
Expand Up @@ -73,7 +73,7 @@ export const AdvancedKMSModal = withHandlePromise((props: AdvancedKMSModalProps)
);

const vaultTLSTooltip = t(
`ceph-storage-plugin~The name to use as the SNI host when OpenShift Container Storage connecting via TLS to the Vault server`,
`ceph-storage-plugin~The name to use as the SNI host when OpenShift Data Foundation connecting via TLS to the Vault server`,
);

const submit = (event: React.FormEvent<EventTarget>) => {
Expand Down
Expand Up @@ -22,7 +22,7 @@ const CreateNamespaceStore: React.FC<CreateNamespaceStoreProps> = ({ match }) =>
<BreadCrumbs
breadcrumbs={[
{
name: 'Openshift Container Storage',
name: 'Openshift Data Foundation',
path: resourcePathFromModel(ClusterServiceVersionModel, appName, ns),
},
{ name: t('ceph-storage-plugin~Create NamespaceStore '), path: match.url },
Expand Down
Expand Up @@ -185,7 +185,7 @@ const CreateExternalCluster = withHandlePromise((props: CreateExternalClusterPro
className="co-alert"
variant="info"
title={t(
"ceph-storage-plugin~A bucket will be created to provide the OpenShift Container Storage's Service.",
"ceph-storage-plugin~A bucket will be created to provide the OpenShift Data Foundation's Service.",
)}
role="alert"
aria-label={t(
Expand Down
Expand Up @@ -28,14 +28,14 @@ export const StoragePopover: React.FC<StoragePopoverProps> = ({ ceph }) => {
return (
<>
{t(
"ceph-storage-plugin~Storage status represents the health status of Openshift Container Storage's StorageCluster.",
"ceph-storage-plugin~Storage status represents the health status of Openshift Data Foundation's StorageCluster.",
)}
<StatusPopupSection
firstColumn={t('ceph-storage-plugin~Provider')}
secondColumn={t('ceph-storage-plugin~Health')}
>
<Status key="ocs" value={value} icon={icon}>
<Link to="/ocs-dashboards">{t('ceph-storage-plugin~Openshift Container Storage')}</Link>
<Link to="/ocs-dashboards">{t('ceph-storage-plugin~Openshift Data Foundation')}</Link>
</Status>
</StatusPopupSection>
</>
Expand Down
Expand Up @@ -19,7 +19,7 @@ export const bucketClassTypeRadios = (t: TFunction) => [
value: BucketClassType.STANDARD,
label: t('ceph-storage-plugin~Standard'),
description: t(
'ceph-storage-plugin~Data will be consumed by a Multi-cloud object gateway, deduped, compressed, and encrypted. The encrypted chunks would be saved on the selected BackingStores. Best used when the applications would always use the OpenShift Container Storage endpoints to access the data.',
'ceph-storage-plugin~Data will be consumed by a Multi-cloud object gateway, deduped, compressed, and encrypted. The encrypted chunks would be saved on the selected BackingStores. Best used when the applications would always use the OpenShift Data Foundation endpoints to access the data.',
),
},
{
Expand Down
Expand Up @@ -57,7 +57,7 @@ export const PROGRESS_STATUS = (t: TFunction, poolName: string): ProgressStatusP
name: POOL_PROGRESS.TIMEOUT,
icon: DisconnectedIcon,
desc: t(
'ceph-storage-plugin~Pool {{name}} creation timed out. Please check if ocs-operator and rook operator are running',
'ceph-storage-plugin~Pool {{name}} creation timed out. Please check if odf operator and rook operator are running',
{ name: poolName },
),
className: '',
Expand Down