Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ ignored-parents=
max-args=10

# Maximum number of attributes for a class (see R0902).
max-attributes=7
max-attributes=10

# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
Expand Down Expand Up @@ -470,7 +470,7 @@ notes-rgx=
[REFACTORING]

# Maximum number of nested blocks for function / method body
max-nested-blocks=5
max-nested-blocks=6

# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
Expand Down
33 changes: 33 additions & 0 deletions integration_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
"""
This script demonstrates how to use the BulkSubIssueCollector to find sub-issues
"""

import os
import urllib3

from release_notes_generator.data.utils.bulk_sub_issue_collector import CollectorConfig, BulkSubIssueCollector

urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

class MissingTokenError(ValueError):
"""Raised when GITHUB_TOKEN environment variable is not set."""
pass

token = os.getenv("GITHUB_TOKEN")
if token is None:
raise MissingTokenError("GITHUB_TOKEN environment variable is not set")

# WARNING: TLS verification is disabled for testing purposes only.
# Do not use this configuration in production.
cfg = CollectorConfig(verify_tls=False)

collector = BulkSubIssueCollector(token, cfg=cfg)

new_parents = [
"absa-group/AUL#2960",
]

while new_parents:
new_parents = collector.scan_sub_issues_for_parents(new_parents)
print("New parents found:", new_parents)
print("Collected sub-issues so far:", collector.parents_sub_issues)
Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@
from copy import deepcopy
from typing import Optional

from github.Issue import Issue
from github.PullRequest import PullRequest
from github.Repository import Repository

from release_notes_generator.action_inputs import ActionInputs
from release_notes_generator.model.mined_data import MinedData

Expand Down Expand Up @@ -68,39 +72,43 @@ def filter(self, data: MinedData) -> MinedData:
if data.release is not None:
logger.info("Starting issue, prs and commit reduction by the latest release since time.")

issues_list = self._filter_issues(data)
logger.debug("Count of issues reduced from %d to %d", len(data.issues), len(issues_list))
issues_dict = self._filter_issues(data)
logger.debug("Count of issues reduced from %d to %d", len(data.issues), len(issues_dict))

# filter out merged PRs and commits before the date
pulls_seen: set[int] = set()
pulls_list: list = []
for pull in data.pull_requests:
pulls_dict: dict[PullRequest, Repository] = {}
for pull, repo in data.pull_requests.items():
if (pull.merged_at is not None and pull.merged_at >= data.since) or (
pull.closed_at is not None and pull.closed_at >= data.since
):
if pull.number not in pulls_seen:
pulls_seen.add(pull.number)
pulls_list.append(pull)
logger.debug("Count of pulls reduced from %d to %d", len(data.pull_requests), len(pulls_list))
pulls_dict[pull] = repo
logger.debug(
"Count of pulls reduced from %d to %d", len(data.pull_requests.items()), len(pulls_dict.items())
)

commits_list = list(filter(lambda commit: commit.commit.author.date > data.since, data.commits))
logger.debug("Count of commits reduced from %d to %d", len(data.commits), len(commits_list))
commits_dict = {
commit: repo for commit, repo in data.commits.items() if commit.commit.author.date > data.since
}
logger.debug("Count of commits reduced from %d to %d", len(data.commits.items()), len(commits_dict.items()))

md.issues = issues_list
md.pull_requests = pulls_list
md.commits = commits_list
md.issues = issues_dict
md.pull_requests = pulls_dict
md.commits = commits_dict

logger.debug(
"Input data. Issues: %d, Pull Requests: %d, Commits: %d",
len(data.issues),
len(data.pull_requests),
len(data.commits),
len(data.issues.items()),
len(data.pull_requests.items()),
len(data.commits.items()),
)
logger.debug(
"Filtered data. Issues: %d, Pull Requests: %d, Commits: %d",
len(md.issues),
len(md.pull_requests),
len(md.commits),
len(md.issues.items()),
len(md.pull_requests.items()),
len(md.commits.items()),
)
else:
md.issues = deepcopy(data.issues)
Expand All @@ -109,12 +117,15 @@ def filter(self, data: MinedData) -> MinedData:

return md

def _filter_issues(self, data: MinedData) -> list:
def _filter_issues(self, data: MinedData) -> dict[Issue, Repository]:
"""
Filter issues based on the selected filtering type - default or hierarchy.

@param data: The mined data containing issues.
@return: The filtered list of issues.
Parameters:
data (MinedData): The mined data to filter.

Returns:
dict[Issue, Repository]: The filtered issues.
"""
if ActionInputs.get_hierarchy():
logger.debug("Used hierarchy issue filtering logic.")
Expand All @@ -124,20 +135,24 @@ def _filter_issues(self, data: MinedData) -> list:
return self._filter_issues_default(data)

@staticmethod
def _filter_issues_default(data: MinedData) -> list:
def _filter_issues_default(data: MinedData) -> dict[Issue, Repository]:
"""
Default filtering for issues: filter out closed issues before the release date.

Parameters:
data (MinedData): The mined data containing issues and release information.

Returns:
list: The filtered list of issues.
dict[Issue, Repository]: The filtered issues.
"""
return [issue for issue in data.issues if (issue.closed_at is None) or (issue.closed_at >= data.since)]
return {
issue: repo
for issue, repo in data.issues.items()
if (issue.closed_at is None) or (issue.closed_at >= data.since)
}

@staticmethod
def _filter_issues_issue_hierarchy(data: MinedData) -> list:
def _filter_issues_issue_hierarchy(data: MinedData) -> dict[Issue, Repository]:
"""
Hierarchy filtering for issues: include issues closed since the release date
or still open at generation time.
Expand All @@ -146,14 +161,10 @@ def _filter_issues_issue_hierarchy(data: MinedData) -> list:
data (MinedData): The mined data containing issues and release information.

Returns:
list: The filtered list of issues.
dict[Issue, Repository]: The filtered issues.
"""
return list(
filter(
lambda issue: (
(issue.closed_at is not None and issue.closed_at >= data.since) # closed after the release
or (issue.state == "open") # still open
),
data.issues,
)
)
return {
issue: repo
for issue, repo in data.issues.items()
if ((issue.closed_at is not None and issue.closed_at >= data.since) or (issue.state == "open"))
}
Loading
Loading