Skip to content

Commit

Permalink
Utilise la méthode BulkRequest pour l'affichage des statistiques (#6135)
Browse files Browse the repository at this point in the history
* Utilise la méthode BulkRequest pour l'affichage des statistiques

* reduce matomo request to one and fix tests

Co-authored-by: willy <willy.konguem@gmail.com>
  • Loading branch information
Situphen and firm1 committed Jun 13, 2021
1 parent e3fde7b commit 836d6b8
Show file tree
Hide file tree
Showing 2 changed files with 104 additions and 74 deletions.
81 changes: 58 additions & 23 deletions zds/tutorialv2/tests/tests_views/tests_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,18 @@ def setUp(self):
self.user_author, self.user_staff, self.nb_part, self.nb_chapter, self.nb_section
)

def count_urls(self):
cpt = 1
if self.published.content.load_version().has_extracts():
return 1
for child in self.published.content.load_version().children:
cpt += 1
if not child.has_extracts():
cpt += len(child.children)
return cpt

def _mock_response(self, start_date=None, end_date=None, duration=7, status=200, raise_for_status=None):
methods = ["Referrers.getReferrerType", "Referrers.getWebsites", "Referrers.getKeywords", "Actions.getPageUrl"]

if end_date is None:
end_date = datetime.datetime.today()
Expand All @@ -56,29 +67,53 @@ def _mock_response(self, start_date=None, end_date=None, duration=7, status=200,
# set status code and content
mock_resp.status_code = status
# add json data if provided
json_data = {}
for single_date in daterange(start_date, end_date):
fuzzy_item = {
"label": r"\/index",
"nb_visits": randint(0, 1000),
"nb_uniq_visitors": randint(0, 1000),
"nb_hits": randint(0, 1000),
"sum_time_spent": randint(0, 1000),
"nb_hits_following_search": randint(0, 1000),
"entry_nb_uniq_visitors": randint(0, 1000),
"entry_nb_visits": randint(0, 1000),
"entry_nb_actions": randint(0, 1000),
"entry_sum_visit_length": randint(0, 1000),
"entry_bounce_count": randint(0, 1000),
"exit_nb_uniq_visitors": randint(0, 1000),
"exit_nb_visits": randint(0, 1000),
"avg_time_on_page": randint(0, 1000),
"bounce_rate": f"{randint(0, 1000)}\u00a0%",
"exit_rate": f"{randint(0, 1000)}\u00a0%",
"url": r"https:\/\/zestedesavoir.com",
}
json_data[single_date.strftime("%Y-%m-%d")] = [fuzzy_item]
mock_resp.json = mock.Mock(return_value=json_data)
response_data = []
count_urls = self.count_urls()
for method in methods:
for counter in range(count_urls):
json_data = {}
for single_date in daterange(start_date, end_date):
if method == "Actions.getPageUrl":
fuzzy_item = {
"label": r"\/index",
"nb_visits": randint(0, 1000),
"nb_uniq_visitors": randint(0, 1000),
"nb_hits": randint(0, 1000),
"sum_time_spent": randint(0, 1000),
"nb_hits_following_search": randint(0, 1000),
"entry_nb_uniq_visitors": randint(0, 1000),
"entry_nb_visits": randint(0, 1000),
"entry_nb_actions": randint(0, 1000),
"entry_sum_visit_length": randint(0, 1000),
"entry_bounce_count": randint(0, 1000),
"exit_nb_uniq_visitors": randint(0, 1000),
"exit_nb_visits": randint(0, 1000),
"avg_time_on_page": randint(0, 1000),
"bounce_rate": f"{randint(0, 1000)}\u00a0%",
"exit_rate": f"{randint(0, 1000)}\u00a0%",
"url": r"https:\/\/zestedesavoir.com",
}
else:
fuzzy_item = {
"label": "Reseaux sociaux",
"nb_uniq_visitors": randint(0, 1000),
"nb_visits": randint(0, 1000),
"nb_actions": randint(0, 1000),
"nb_users": randint(0, 1000),
"max_actions": randint(0, 1000),
"sum_visit_length": randint(0, 1000),
"bounce_count": randint(0, 1000),
"nb_visits_converted": randint(0, 1000),
"goals": {},
"nb_conversions": randint(0, 1000),
"revenue": 0,
"segment": "referrerType==social",
"referrer_type": 7,
"idsubdatatable": 7,
}
json_data[single_date.strftime("%Y-%m-%d")] = [fuzzy_item]
response_data.append(json_data)
mock_resp.json = mock.Mock(return_value=response_data)
return mock_resp

@mock.patch("requests.post")
Expand Down
97 changes: 46 additions & 51 deletions zds/tutorialv2/views/statistics.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import itertools
import logging
import urllib.parse
from datetime import timedelta, datetime, date
Expand Down Expand Up @@ -58,62 +59,46 @@ def get_content_urls(self):
urls.append(NamedUrl(subchild.title, subchild.get_absolute_url_online(), 2))
return urls

def get_all_refs(self, url, start, end, method):
def get_all_statistics(self, urls, start, end, methods):
date_ranges = "{},{}".format(start.strftime("%Y-%m-%d"), end.strftime("%Y-%m-%d"))
absolute_url = f"{self.request.scheme}://{self.request.get_host()}{url.url}"
param_url = f"pageUrl=={urllib.parse.quote_plus(absolute_url)}"

data_request = {
"module": "API",
"method": method,
"format": "json",
"idSite": self.matomo_site_id,
"date": date_ranges,
"period": "day",
"segment": ",".join([param_url]),
}
data_request = {"module": "API", "method": "API.getBulkRequest", "format": "json", "filter_limit": -1}
data_structured = {}

try:
response_matomo = requests.post(url=self.matomo_api_url, data=data_request)
data = response_matomo.json()
if data.get("result", "") == "error":
data = {}
self.logger.error(data.get("message", "Something failed with Matomo reporting system"))
messages.error(self.request, data.get("message", _(f"Impossible de récupérer les référents du site.")))
for method in methods:
data_structured[method] = []

except Exception:
data = {}
self.logger.exception(f"Something failed with Matomo reporting system")
messages.error(self.request, _(f"Impossible de récupérer les référents du site."))
for index, method_url in enumerate(itertools.product(methods, urls)):
method = method_url[0]
url = method_url[1]
absolute_url = f"{self.request.scheme}://{self.request.get_host()}{url.url}"
param_url = f"pageUrl=={urllib.parse.quote_plus(absolute_url)}"

return data
request_params = {"method": method, "idSite": self.matomo_site_id, "date": date_ranges, "period": "day"}
if method.startswith("Referrers"): # referrers requests use segment for define url
request_params["segment"] = ",".join([param_url])
elif method == "Actions.getPageUrl":
request_params["pageUrl"] = absolute_url

def get_all_stats(self, url, start, end):
date_ranges = "{},{}".format(start.strftime("%Y-%m-%d"), end.strftime("%Y-%m-%d"))
absolute_url = f"{self.request.scheme}://{self.request.get_host()}{url.url}"

data_request = {
"module": "API",
"method": "Actions.getPageUrl",
"format": "json",
"idSite": self.matomo_site_id,
"date": date_ranges,
"period": "day",
"pageUrl": absolute_url,
}
data_request.update({f"urls[{index}]": urllib.parse.urlencode(request_params)})

try:
response_matomo = requests.post(url=self.matomo_api_url, data=data_request)
data = response_matomo.json()
if data.get("result", "") == "error":
if isinstance(data, dict) and data.get("result", "") == "error":
data = {}
self.logger.error(data.get("message", "Something failed with Matomo reporting system"))
self.logger.error(data.get("message", "Something failed with Matomo reporting system."))
messages.error(
self.request, data.get("message", _(f"Impossible de récupérer les statistiques du site."))
)

for index, method_url in enumerate(itertools.product(methods, urls)):
method = method_url[0]
data_structured[method].append(data[index])

return data_structured
except Exception:
data = {}
self.logger.exception(f"Something failed with Matomo reporting system")
self.logger.exception(f"Something failed with Matomo reporting system.")
messages.error(self.request, _(f"Impossible de récupérer les statistiques du site."))

return data
Expand Down Expand Up @@ -229,27 +214,37 @@ def get_context_data(self, **kwargs):
keywords = {}
report_field = [("nb_uniq_visitors", False), ("nb_hits", False), ("avg_time_on_page", True)]

for url in urls:
all_stats = self.get_all_stats(url, start_date, end_date)
cumul_stats = self.get_cumulative(all_stats)
all_referrers = self.get_all_refs(url, start_date, end_date, "Referrers.getWebsites")
all_type_referrers = self.get_all_refs(url, start_date, end_date, "Referrers.getReferrerType")
all_keywords = self.get_all_refs(url, start_date, end_date, "Referrers.getKeywords")
# Each function sends only one bulk request for all the urls
# Each variable is a list of dictionnaries (one for each url)
all = self.get_all_statistics(
urls,
start_date,
end_date,
["Referrers.getReferrerType", "Referrers.getWebsites", "Referrers.getKeywords", "Actions.getPageUrl"],
)

all_stats = all["Actions.getPageUrl"]
all_ref_websites = all["Referrers.getWebsites"]
all_ref_types = all["Referrers.getReferrerType"]
all_ref_keyword = all["Referrers.getKeywords"]

for index, url in enumerate(urls):
cumul_stats = self.get_cumulative(all_stats[index])
reports[url] = {}
cumulative_stats[url] = {}

for item, is_avg in report_field:
reports[url][item] = self.get_stat_metrics(all_stats, item)
reports[url][item] = self.get_stat_metrics(all_stats[index], item)
if is_avg:
cumulative_stats[url][item] = 0
if cumul_stats.get("total") > 0:
cumulative_stats[url][item] = cumul_stats.get(item, 0) / cumul_stats.get("total")
else:
cumulative_stats[url][item] = cumul_stats.get(item, 0)

referrers = self.merge_ref_to_data(referrers, self.get_ref_metrics(all_referrers))
type_referrers = self.merge_ref_to_data(type_referrers, self.get_ref_metrics(all_type_referrers))
keywords = self.merge_ref_to_data(keywords, self.get_ref_metrics(all_keywords))
referrers = self.merge_ref_to_data(referrers, self.get_ref_metrics(all_ref_websites[index]))
type_referrers = self.merge_ref_to_data(type_referrers, self.get_ref_metrics(all_ref_types[index]))
keywords = self.merge_ref_to_data(keywords, self.get_ref_metrics(all_ref_keyword[index]))

if display_mode.lower() == "global":
reports = {NamedUrl(display_mode, "", 0): self.merge_report_to_global(reports, report_field)}
Expand Down

0 comments on commit 836d6b8

Please sign in to comment.