Skip to content
This repository has been archived by the owner on Apr 7, 2022. It is now read-only.

Commit

Permalink
Merge pull request #1456 from akrzos/perf_ui_improvements
Browse files Browse the repository at this point in the history
Perf ui improvements
  • Loading branch information
seandst committed Jan 8, 2015
2 parents da4b934 + a265015 commit c96c32a
Show file tree
Hide file tree
Showing 12 changed files with 1,167 additions and 439 deletions.
417 changes: 0 additions & 417 deletions cfme/tests/perf/test_page_render_times_n_queries.py

This file was deleted.

55 changes: 55 additions & 0 deletions cfme/tests/perf/test_ui_automate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*
from cfme.fixtures import pytest_selenium as sel
from utils.conf import ui_bench_tests
from utils.pagestats import analyze_page_stat
from utils.pagestats import navigate_accordions
from utils.pagestats import pages_to_csv
from utils.pagestats import pages_to_statistics_csv
from utils.pagestats import perf_click
from utils.pagestats import standup_perf_ui
from collections import OrderedDict
import pytest
import re

explorer_filters = [
re.compile(r'^POST \"\/miq_ae_class\/tree_select\/\?id\=aem\-[A-Za-z0-9\-\_]*\"$'),
re.compile(r'^POST \"\/miq_ae_class\/tree_select\/\?id\=aei\-[A-Za-z0-9\-\_]*\"$')]

customization_filters = [
re.compile(r'^POST \"\/miq_ae_customization\/tree_select\/\?id\=odg\-[A-Za-z0-9\-\_]*\"$')]


@pytest.mark.perf_ui_automate
def test_perf_ui_automate_explorer(ssh_client, soft_assert):
pages, ui_worker_pid, prod_tail = standup_perf_ui(ssh_client, soft_assert)

pages.extend(analyze_page_stat(perf_click(ui_worker_pid, prod_tail, True, sel.force_navigate,
'automate_explorer'), soft_assert))

services_acc = OrderedDict((('Datastore', 'datastore'), ))

pages.extend(navigate_accordions(services_acc, 'automate_explorer',
ui_bench_tests['page_check']['automate']['explorer'], ui_worker_pid, prod_tail,
soft_assert))

pages_to_csv(pages, 'perf_ui_automate_explorer.csv')
pages_to_statistics_csv(pages, explorer_filters, 'statistics.csv')


@pytest.mark.perf_ui_automate
def test_perf_ui_automate_customization(ssh_client, soft_assert):
pages, ui_worker_pid, prod_tail = standup_perf_ui(ssh_client, soft_assert)

pages.extend(analyze_page_stat(perf_click(ui_worker_pid, prod_tail, True, sel.force_navigate,
'automate_customization'), soft_assert))

services_acc = OrderedDict((('Provisioning Dialogs', 'provisioning_dialogs'),
('Service Dialogs', 'service_dialogs'), ('Buttons', 'buttons'),
('Import/Export', 'import_export')))

pages.extend(navigate_accordions(services_acc, 'automate_customization',
ui_bench_tests['page_check']['automate']['customization'], ui_worker_pid, prod_tail,
soft_assert))

pages_to_csv(pages, 'perf_ui_automate_customization.csv')
pages_to_statistics_csv(pages, customization_filters, 'statistics.csv')
141 changes: 141 additions & 0 deletions cfme/tests/perf/test_ui_cloud.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
# -*- coding: utf-8 -*
from cfme.cloud.provider import get_all_providers as get_all_cloud_provs
from cfme.fixtures import pytest_selenium as sel
from utils.conf import ui_bench_tests
from utils.pagestats import analyze_page_stat
from utils.pagestats import navigate_accordions
from utils.pagestats import navigate_quadicons
from utils.pagestats import navigate_split_table
from utils.pagestats import pages_to_csv
from utils.pagestats import pages_to_statistics_csv
from utils.pagestats import perf_click
from utils.pagestats import standup_perf_ui
from collections import OrderedDict
import pytest
import re

cloud_provider_filters = [
re.compile(r'^GET \"\/ems_cloud\/show\/[A-Za-z0-9]*\"$')]

availability_zones_filters = [
re.compile(r'^GET \"\/availability_zone\/show\/[A-Za-z0-9]*\"$')]

tenants_filters = [
re.compile(r'^GET \"\/cloud_tenant\/show\/[A-Za-z0-9]*\"$')]

flavors_filters = [
re.compile(r'^GET \"\/flavor\/show\/[A-Za-z0-9]*\"$')]

security_groups_filters = [
re.compile(r'^GET \"\/security_group\/show\/[A-Za-z0-9]*\"$')]

vm_cloud_filters = [
re.compile(r'^POST \"\/vm_cloud\/tree_select\/\?id\=v\-[A-Za-z0-9]*\"$'),
re.compile(r'^POST \"\/vm_cloud\/tree_select\/\?id\=t\-[A-Za-z0-9]*\"$'),
re.compile(r'^POST \"\/vm_cloud\/tree_select\/\?id\=ms\-[A-Za-z0-9]*\"$')]


@pytest.mark.perf_ui_cloud
@pytest.mark.usefixtures("setup_cloud_providers")
def test_perf_ui_cloud_providers(ssh_client, soft_assert):
pages, ui_worker_pid, prod_tail = standup_perf_ui(ssh_client, soft_assert)

nav_limit = 0
if 'providers' in ui_bench_tests['page_check']['cloud']:
nav_limit = ui_bench_tests['page_check']['cloud']['providers']

pages.extend(navigate_quadicons(get_all_cloud_provs(), 'cloud_prov', 'clouds_providers',
nav_limit, ui_worker_pid, prod_tail, soft_assert))

pages_to_csv(pages, 'perf_ui_cloud_providers.csv')
pages_to_statistics_csv(pages, cloud_provider_filters, 'statistics.csv')


@pytest.mark.perf_ui_cloud
@pytest.mark.usefixtures("setup_cloud_providers")
def test_perf_ui_cloud_availability_zones(ssh_client, soft_assert):
pages, ui_worker_pid, prod_tail = standup_perf_ui(ssh_client, soft_assert)

nav_limit = 0
if 'availability_zones' in ui_bench_tests['page_check']['cloud']:
nav_limit = ui_bench_tests['page_check']['cloud']['availability_zones']

from cfme.cloud.availability_zone import list_page as lst_pg

pages.extend(navigate_split_table(lst_pg.zone_table, 'clouds_availability_zones', nav_limit,
ui_worker_pid, prod_tail, soft_assert))

pages_to_csv(pages, 'perf_ui_cloud_availability_zones.csv')
pages_to_statistics_csv(pages, availability_zones_filters, 'statistics.csv')


@pytest.mark.perf_ui_cloud
@pytest.mark.usefixtures("setup_cloud_providers")
def test_perf_ui_cloud_tenants(ssh_client, soft_assert):
pages, ui_worker_pid, prod_tail = standup_perf_ui(ssh_client, soft_assert)

nav_limit = 0
if 'tenants' in ui_bench_tests['page_check']['cloud']:
nav_limit = ui_bench_tests['page_check']['cloud']['tenants']

from cfme.cloud.tenant import list_page as lst_pg

pages.extend(navigate_split_table(lst_pg.tenant_table, 'clouds_tenants', nav_limit,
ui_worker_pid, prod_tail, soft_assert))

pages_to_csv(pages, 'perf_ui_cloud_tenants.csv')
pages_to_statistics_csv(pages, tenants_filters, 'statistics.csv')


@pytest.mark.perf_ui_cloud
@pytest.mark.usefixtures("setup_cloud_providers")
def test_perf_ui_cloud_flavors(ssh_client, soft_assert):
pages, ui_worker_pid, prod_tail = standup_perf_ui(ssh_client, soft_assert)

nav_limit = 0
if 'flavors' in ui_bench_tests['page_check']['cloud']:
nav_limit = ui_bench_tests['page_check']['cloud']['flavors']

from cfme.cloud.flavor import list_page as lst_pg

pages.extend(navigate_split_table(lst_pg.flavor_table, 'clouds_flavors', nav_limit,
ui_worker_pid, prod_tail, soft_assert))

pages_to_csv(pages, 'perf_ui_cloud_flavors.csv')
pages_to_statistics_csv(pages, flavors_filters, 'statistics.csv')


@pytest.mark.perf_ui_cloud
@pytest.mark.usefixtures("setup_cloud_providers")
def test_perf_ui_cloud_security_groups(ssh_client, soft_assert):
pages, ui_worker_pid, prod_tail = standup_perf_ui(ssh_client, soft_assert)

nav_limit = 0
if 'security_groups' in ui_bench_tests['page_check']['cloud']:
nav_limit = ui_bench_tests['page_check']['cloud']['security_groups']

from cfme.cloud.security_group import list_page as lst_pg

pages.extend(navigate_split_table(lst_pg.security_group_table, 'clouds_security_groups',
nav_limit, ui_worker_pid, prod_tail, soft_assert))

pages_to_csv(pages, 'perf_ui_cloud_security_groups.csv')
pages_to_statistics_csv(pages, security_groups_filters, 'statistics.csv')


@pytest.mark.perf_ui_cloud
@pytest.mark.usefixtures("setup_cloud_providers")
def test_perf_ui_cloud_vm_explorer(ssh_client, soft_assert):
pages, ui_worker_pid, prod_tail = standup_perf_ui(ssh_client, soft_assert)

pages.extend(analyze_page_stat(perf_click(ui_worker_pid, prod_tail, True, sel.force_navigate,
'clouds_instances'), soft_assert))

cloud_acc = OrderedDict((('Instances by Provider', 'instances_by_prov'),
('Images by Provider', 'images_by_prov'), ('Instances', 'instances'), ('Images', 'images')))

pages.extend(navigate_accordions(cloud_acc, 'clouds_instances', (ui_bench_tests['page_check']
['cloud']['vm_explorer']), ui_worker_pid, prod_tail, soft_assert))

pages_to_csv(pages, 'perf_ui_cloud_vm_explorer.csv')
pages_to_statistics_csv(pages, vm_cloud_filters, 'statistics.csv')
35 changes: 35 additions & 0 deletions cfme/tests/perf/test_ui_configure.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# -*- coding: utf-8 -*
from cfme.fixtures import pytest_selenium as sel
from utils.conf import ui_bench_tests
from utils.pagestats import analyze_page_stat
from utils.pagestats import navigate_accordions
from utils.pagestats import pages_to_csv
from utils.pagestats import pages_to_statistics_csv
from utils.pagestats import perf_click
from utils.pagestats import standup_perf_ui
from collections import OrderedDict
import pytest
import re

configuration_filters = [
re.compile(r'^POST \"\/ops\/tree_select\/\?id\=u\-[0-9\-\_]*\"$'),
re.compile(r'^POST \"\/ops\/tree_select\/\?id\=g\-[0-9\-\_]*\"$'),
re.compile(r'^POST \"\/ops\/tree_select\/\?id\=ti\-[0-9\-\_]*\"$')]


@pytest.mark.perf_ui_configure
def test_perf_ui_configure_configuration(ssh_client, soft_assert):
pages, ui_worker_pid, prod_tail = standup_perf_ui(ssh_client, soft_assert)

pages.extend(analyze_page_stat(perf_click(ui_worker_pid, prod_tail, True, sel.force_navigate,
'configuration'), soft_assert))

services_acc = OrderedDict((('Settings', 'settings'), ('Access Control', 'access_control'),
('Diagnostics', 'diagnostics')))
# ('Database', 'database'))) - Requires almost 17 minutes to read the database tree.

pages.extend(navigate_accordions(services_acc, 'configuration', (ui_bench_tests['page_check']
['configure']['configuration']), ui_worker_pid, prod_tail, soft_assert))

pages_to_csv(pages, 'perf_ui_configure_configuration.csv')
pages_to_statistics_csv(pages, configuration_filters, 'statistics.csv')
37 changes: 37 additions & 0 deletions cfme/tests/perf/test_ui_control.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# -*- coding: utf-8 -*
from cfme.fixtures import pytest_selenium as sel
from utils.conf import ui_bench_tests
from utils.pagestats import analyze_page_stat
from utils.pagestats import navigate_accordions
from utils.pagestats import pages_to_csv
from utils.pagestats import pages_to_statistics_csv
from utils.pagestats import perf_click
from utils.pagestats import standup_perf_ui
from collections import OrderedDict
import pytest
import re

explorer_filters = [
re.compile(r'^POST \"\/miq_policy\/tree_select\/\?id\=xx\-compliance[A-Za-z0-9\-\_]*\"$'),
re.compile(r'^POST \"\/miq_policy\/tree_select\/\?id\=xx\-control[A-Za-z0-9\-\_]*\"$'),
re.compile(r'^POST \"\/miq_policy\/tree_select\/\?id\=ev\-[0-9]*\"$'),
re.compile(r'^POST \"\/miq_policy\/tree_select\/\?id\=a\-[0-9]*\"$'),
re.compile(r'^POST \"\/miq_policy\/tree_select\/\?id\=al\-[0-9]*\"$')]


@pytest.mark.perf_ui_control
def test_perf_ui_control_explorer(ssh_client, soft_assert):
pages, ui_worker_pid, prod_tail = standup_perf_ui(ssh_client, soft_assert)

pages.extend(analyze_page_stat(perf_click(ui_worker_pid, prod_tail, True, sel.force_navigate,
'control_explorer'), soft_assert))

services_acc = OrderedDict((('Policy Profiles', 'policy_profiles'), ('Policies', 'policies'),
('Events', 'events'), ('Conditions', 'conditions'), ('Actions', 'actions'),
('Alert Profiles', 'alert_profiles'), ('Alerts', 'alerts')))

pages.extend(navigate_accordions(services_acc, 'control_explorer', (ui_bench_tests['page_check']
['control']['explorer']), ui_worker_pid, prod_tail, soft_assert))

pages_to_csv(pages, 'perf_ui_control_explorer.csv')
pages_to_statistics_csv(pages, explorer_filters, 'statistics.csv')

0 comments on commit c96c32a

Please sign in to comment.