Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

mgr/dashboard: list services and daemons #33531

Merged
merged 3 commits into from Mar 5, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 0 additions & 3 deletions qa/tasks/mgr/dashboard/test_orchestrator.py
Expand Up @@ -57,7 +57,6 @@ class OrchestratorControllerTest(DashboardTestCase):

URL_STATUS = '/api/orchestrator/status'
URL_INVENTORY = '/api/orchestrator/inventory'
URL_SERVICE = '/api/orchestrator/service'
URL_OSD = '/api/orchestrator/osd'


Expand Down Expand Up @@ -110,8 +109,6 @@ def test_access_permissions(self):
self.assertStatus(200)
self._get(self.URL_INVENTORY)
self.assertStatus(403)
self._get(self.URL_SERVICE)
self.assertStatus(403)

def test_status_get(self):
data = self._get(self.URL_STATUS)
Expand Down
4 changes: 2 additions & 2 deletions qa/tasks/mgr/test_orchestrator_cli.py
Expand Up @@ -63,12 +63,12 @@ def test_device_ls_json(self):

def test_ps(self):
ret = self._orch_cmd("ps")
self.assertIn("ceph-mgr", ret)
self.assertIn("mgr", ret)

def test_ps_json(self):
ret = self._orch_cmd("ps", "--format", "json")
self.assertIsInstance(json.loads(ret), list)
self.assertIn("ceph-mgr", ret)
self.assertIn("mgr", ret)


def test_service_action(self):
Expand Down
18 changes: 11 additions & 7 deletions src/pybind/mgr/dashboard/controllers/host.py
Expand Up @@ -2,10 +2,7 @@
from __future__ import absolute_import
import copy

try:
from typing import List
except ImportError:
pass
from typing import List

from mgr_util import merge_dicts
from orchestrator import HostSpec
Expand Down Expand Up @@ -44,9 +41,9 @@ def merge_hosts_by_hostname(ceph_hosts, orch_hosts):

# Hosts only in Orchestrator
orch_sources = {'ceph': False, 'orchestrator': True}
orch_hosts = [dict(hostname=hostname, ceph_version='', services=[], sources=orch_sources)
for hostname in orch_hostnames]
_ceph_hosts.extend(orch_hosts)
_orch_hosts = [dict(hostname=hostname, ceph_version='', services=[], sources=orch_sources)
for hostname in orch_hostnames]
_ceph_hosts.extend(_orch_hosts)
return _ceph_hosts


Expand Down Expand Up @@ -119,3 +116,10 @@ def devices(self, hostname):
def smart(self, hostname):
# type: (str) -> dict
return CephService.get_smart_data_by_host(hostname)

@RESTController.Resource('GET')
@raise_if_no_orchestrator
def daemons(self, hostname: str) -> List[dict]:
orch = OrchClient.instance()
bk201 marked this conversation as resolved.
Show resolved Hide resolved
daemons = orch.services.list_daemons(None, hostname)
return [d.to_json() for d in daemons]
11 changes: 1 addition & 10 deletions src/pybind/mgr/dashboard/controllers/orchestrator.py
Expand Up @@ -36,7 +36,7 @@ def get_device_osd_map():
}
:rtype: dict
"""
result = {}
result: dict = {}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is PY3 type hinting syntax. Can we use that or should we use # type: dict instead? This may be a concern when backporting Octopus PRs to a PY2 based release.

@sebastian-philipp What do you think?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for bringing this up. Orchestrator integration will not be backported, but considering we are encouraged to add typing info in Dashboard (are we?), we should have a guideline to follow.

PY3 type hinting syntax will make backport a nightmare.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In today's standup we agree:

  • Use the PY3-only syntax if the codes are not going to be backported.
  • Stick to the comments way # type: xxxx if codes are going to be backported potentially.
  • If you don't know, stick to the comments way.

for osd_id, osd_metadata in mgr.get('osd_metadata').items():
hostname = osd_metadata.get('hostname')
devices = osd_metadata.get('devices')
Expand Down Expand Up @@ -123,15 +123,6 @@ def list(self, hostname=None):
return inventory_hosts


@ApiController('/orchestrator/service', Scope.HOSTS)
class OrchestratorService(RESTController):

@raise_if_no_orchestrator
def list(self, hostname=None):
orch = OrchClient.instance()
return [service.to_json() for service in orch.services.list(None, None, hostname)]


@ApiController('/orchestrator/osd', Scope.OSD)
class OrchestratorOsd(RESTController):

Expand Down
31 changes: 31 additions & 0 deletions src/pybind/mgr/dashboard/controllers/service.py
@@ -0,0 +1,31 @@
from typing import List, Optional
import cherrypy

from . import ApiController, RESTController
from .orchestrator import raise_if_no_orchestrator
from ..security import Scope
from ..services.orchestrator import OrchClient


@ApiController('/service', Scope.HOSTS)
class Service(RESTController):

@raise_if_no_orchestrator
def list(self, service_name: Optional[str] = None) -> List[dict]:
orch = OrchClient.instance()
bk201 marked this conversation as resolved.
Show resolved Hide resolved
return [service.to_json() for service in orch.services.list(service_name)]

@raise_if_no_orchestrator
def get(self, service_name: str) -> List[dict]:
orch = OrchClient.instance()
bk201 marked this conversation as resolved.
Show resolved Hide resolved
services = orch.services.get(service_name)
if not services:
raise cherrypy.HTTPError(404, 'Service {} not found'.format(service_name))
return services[0].to_json()

@RESTController.Resource('GET')
@raise_if_no_orchestrator
def daemons(self, service_name: str) -> List[dict]:
orch = OrchClient.instance()
bk201 marked this conversation as resolved.
Show resolved Hide resolved
daemons = orch.services.list_daemons(service_name)
return [d.to_json() for d in daemons]
Expand Up @@ -47,6 +47,8 @@ import { RulesListComponent } from './prometheus/rules-list/rules-list.component
import { SilenceFormComponent } from './prometheus/silence-form/silence-form.component';
import { SilenceListComponent } from './prometheus/silence-list/silence-list.component';
import { SilenceMatcherModalComponent } from './prometheus/silence-matcher-modal/silence-matcher-modal.component';
import { ServiceDaemonListComponent } from './services/service-daemon-list/service-daemon-list.component';
import { ServiceDetailsComponent } from './services/service-details/service-details.component';
import { ServicesComponent } from './services/services.component';

@NgModule({
Expand Down Expand Up @@ -116,7 +118,9 @@ import { ServicesComponent } from './services/services.component';
RulesListComponent,
ActiveAlertListComponent,
MonitoringListComponent,
HostFormComponent
HostFormComponent,
ServiceDetailsComponent,
ServiceDaemonListComponent
]
})
export class ClusterModule {}
Expand Up @@ -9,12 +9,10 @@
<cd-inventory [hostname]="selectedHostname"></cd-inventory>
</tab>
<tab i18n-heading
heading="Services"
heading="Daemons"
*ngIf="permissions.hosts.read">
<cd-services
[hostname]="selectedHostname"
[hiddenColumns]="['nodename']">
</cd-services>
<cd-service-daemon-list [hostname]="selectedHostname">
</cd-service-daemon-list>
</tab>
<tab i18n-heading
heading="Performance Details"
Expand Down
Expand Up @@ -6,11 +6,9 @@ import { NgBootstrapFormValidationModule } from 'ng-bootstrap-form-validation';
import { BsDropdownModule } from 'ngx-bootstrap/dropdown';
import { TabsModule } from 'ngx-bootstrap/tabs';
import { ToastrModule } from 'ngx-toastr';
import { of } from 'rxjs';

import { configureTestBed, i18nProviders } from '../../../../../testing/unit-test-helper';
import { CoreModule } from '../../../../core/core.module';
import { OrchestratorService } from '../../../../shared/api/orchestrator.service';
import { CdTableSelection } from '../../../../shared/models/cd-table-selection';
import { Permissions } from '../../../../shared/models/permissions';
import { SharedModule } from '../../../../shared/shared.module';
Expand Down Expand Up @@ -47,10 +45,6 @@ describe('HostDetailsComponent', () => {
hosts: ['read'],
grafana: ['read']
});
const orchService = TestBed.get(OrchestratorService);
spyOn(orchService, 'status').and.returnValue(of({ available: true }));
spyOn(orchService, 'inventoryDeviceList').and.returnValue(of([]));
spyOn(orchService, 'serviceList').and.returnValue(of([]));
});

it('should create', () => {
Expand All @@ -73,7 +67,7 @@ describe('HostDetailsComponent', () => {
'Devices',
'Device health',
'Inventory',
'Services',
'Daemons',
'Performance Details'
]);
});
Expand Down
@@ -0,0 +1,6 @@
<cd-table [data]="daemons"
[columns]="columns"
columnMode="flex"
autoReload="0"
(fetchData)="getDaemons($event)">
</cd-table>
@@ -0,0 +1,114 @@
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { ComponentFixture, TestBed } from '@angular/core/testing';

import * as _ from 'lodash';
import { of } from 'rxjs';

import { configureTestBed, i18nProviders } from '../../../../../testing/unit-test-helper';
import { CoreModule } from '../../../../core/core.module';
import { CephServiceService } from '../../../../shared/api/ceph-service.service';
import { HostService } from '../../../../shared/api/host.service';
import { CdTableFetchDataContext } from '../../../../shared/models/cd-table-fetch-data-context';
import { SharedModule } from '../../../../shared/shared.module';
import { CephModule } from '../../../ceph.module';
import { ServiceDaemonListComponent } from './service-daemon-list.component';

describe('ServiceDaemonListComponent', () => {
let component: ServiceDaemonListComponent;
let fixture: ComponentFixture<ServiceDaemonListComponent>;

const daemons = [
{
hostname: 'osd0',
container_id: '003c10beafc8c27b635bcdfed1ed832e4c1005be89bb1bb05ad4cc6c2b98e41b',
container_image_id: 'e70344c77bcbf3ee389b9bf5128f635cf95f3d59e005c5d8e67fc19bcc74ed23',
container_image_name: 'docker.io/ceph/daemon-base:latest-master-devel',
daemon_id: '3',
daemon_type: 'osd',
version: '15.1.0-1174-g16a11f7',
status: 1,
status_desc: 'running',
last_refresh: '2020-02-25T04:33:26.465699'
},
{
hostname: 'osd0',
container_id: 'baeec41a01374b3ed41016d542d19aef4a70d69c27274f271e26381a0cc58e7a',
container_image_id: 'e70344c77bcbf3ee389b9bf5128f635cf95f3d59e005c5d8e67fc19bcc74ed23',
container_image_name: 'docker.io/ceph/daemon-base:latest-master-devel',
daemon_id: '4',
daemon_type: 'osd',
version: '15.1.0-1174-g16a11f7',
status: 1,
status_desc: 'running',
last_refresh: '2020-02-25T04:33:26.465822'
},
{
hostname: 'osd0',
container_id: '8483de277e365bea4365cee9e1f26606be85c471e4da5d51f57e4b85a42c616e',
container_image_id: 'e70344c77bcbf3ee389b9bf5128f635cf95f3d59e005c5d8e67fc19bcc74ed23',
container_image_name: 'docker.io/ceph/daemon-base:latest-master-devel',
daemon_id: '5',
daemon_type: 'osd',
version: '15.1.0-1174-g16a11f7',
status: 1,
status_desc: 'running',
last_refresh: '2020-02-25T04:33:26.465886'
},
{
hostname: 'mon0',
container_id: '6ca0574f47e300a6979eaf4e7c283a8c4325c2235ae60358482fc4cd58844a21',
container_image_id: 'e70344c77bcbf3ee389b9bf5128f635cf95f3d59e005c5d8e67fc19bcc74ed23',
container_image_name: 'docker.io/ceph/daemon-base:latest-master-devel',
daemon_id: 'a',
daemon_type: 'mon',
version: '15.1.0-1174-g16a11f7',
status: 1,
status_desc: 'running',
last_refresh: '2020-02-25T04:33:26.465886'
}
];

const getDaemonsByHostname = (hostname?: string) => {
return hostname ? _.filter(daemons, { hostname: hostname }) : daemons;
};

const getDaemonsByServiceName = (serviceName?: string) => {
return serviceName ? _.filter(daemons, { daemon_type: serviceName }) : daemons;
};

configureTestBed({
imports: [HttpClientTestingModule, CephModule, CoreModule, SharedModule],
declarations: [],
providers: [i18nProviders]
});

beforeEach(() => {
fixture = TestBed.createComponent(ServiceDaemonListComponent);
component = fixture.componentInstance;
const hostService = TestBed.get(HostService);
const cephServiceService = TestBed.get(CephServiceService);
spyOn(hostService, 'getDaemons').and.callFake(() =>
of(getDaemonsByHostname(component.hostname))
);
spyOn(cephServiceService, 'getDaemons').and.callFake(() =>
of(getDaemonsByServiceName(component.serviceName))
);
fixture.detectChanges();
});

it('should create', () => {
expect(component).toBeTruthy();
});

it('should list daemons by host', () => {
component.hostname = 'mon0';
component.getDaemons(new CdTableFetchDataContext(() => {}));
expect(component.daemons.length).toBe(1);
});

it('should list daemons by service', () => {
component.serviceName = 'osd';
component.getDaemons(new CdTableFetchDataContext(() => {}));
expect(component.daemons.length).toBe(3);
});
});