Skip to content

Commit 99efda2

Browse files
Feature/176 introduce bulk logic for getting sub issues (#177)
Added bulk logic for getting sub-issues for all present issues.
1 parent 82ea74b commit 99efda2

20 files changed

+785
-443
lines changed

.pylintrc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -298,7 +298,7 @@ ignored-parents=
298298
max-args=10
299299

300300
# Maximum number of attributes for a class (see R0902).
301-
max-attributes=7
301+
max-attributes=10
302302

303303
# Maximum number of boolean expressions in an if statement (see R0916).
304304
max-bool-expr=5
@@ -470,7 +470,7 @@ notes-rgx=
470470
[REFACTORING]
471471

472472
# Maximum number of nested blocks for function / method body
473-
max-nested-blocks=5
473+
max-nested-blocks=6
474474

475475
# Complete name of functions that never returns. When checking for
476476
# inconsistent-return-statements if a never returning function is called then

integration_test.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
"""
2+
This script demonstrates how to use the BulkSubIssueCollector to find sub-issues
3+
"""
4+
5+
import os
6+
import urllib3
7+
8+
from release_notes_generator.data.utils.bulk_sub_issue_collector import CollectorConfig, BulkSubIssueCollector
9+
10+
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
11+
12+
class MissingTokenError(ValueError):
13+
"""Raised when GITHUB_TOKEN environment variable is not set."""
14+
pass
15+
16+
token = os.getenv("GITHUB_TOKEN")
17+
if token is None:
18+
raise MissingTokenError("GITHUB_TOKEN environment variable is not set")
19+
20+
# WARNING: TLS verification is disabled for testing purposes only.
21+
# Do not use this configuration in production.
22+
cfg = CollectorConfig(verify_tls=False)
23+
24+
collector = BulkSubIssueCollector(token, cfg=cfg)
25+
26+
new_parents = [
27+
"absa-group/AUL#2960",
28+
]
29+
30+
while new_parents:
31+
new_parents = collector.scan_sub_issues_for_parents(new_parents)
32+
print("New parents found:", new_parents)
33+
print("Collected sub-issues so far:", collector.parents_sub_issues)

release_notes_generator/data/__init__.py

Whitespace-only changes.
Lines changed: 45 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,10 @@
2020
from copy import deepcopy
2121
from typing import Optional
2222

23+
from github.Issue import Issue
24+
from github.PullRequest import PullRequest
25+
from github.Repository import Repository
26+
2327
from release_notes_generator.action_inputs import ActionInputs
2428
from release_notes_generator.model.mined_data import MinedData
2529

@@ -68,39 +72,43 @@ def filter(self, data: MinedData) -> MinedData:
6872
if data.release is not None:
6973
logger.info("Starting issue, prs and commit reduction by the latest release since time.")
7074

71-
issues_list = self._filter_issues(data)
72-
logger.debug("Count of issues reduced from %d to %d", len(data.issues), len(issues_list))
75+
issues_dict = self._filter_issues(data)
76+
logger.debug("Count of issues reduced from %d to %d", len(data.issues), len(issues_dict))
7377

7478
# filter out merged PRs and commits before the date
7579
pulls_seen: set[int] = set()
76-
pulls_list: list = []
77-
for pull in data.pull_requests:
80+
pulls_dict: dict[PullRequest, Repository] = {}
81+
for pull, repo in data.pull_requests.items():
7882
if (pull.merged_at is not None and pull.merged_at >= data.since) or (
7983
pull.closed_at is not None and pull.closed_at >= data.since
8084
):
8185
if pull.number not in pulls_seen:
8286
pulls_seen.add(pull.number)
83-
pulls_list.append(pull)
84-
logger.debug("Count of pulls reduced from %d to %d", len(data.pull_requests), len(pulls_list))
87+
pulls_dict[pull] = repo
88+
logger.debug(
89+
"Count of pulls reduced from %d to %d", len(data.pull_requests.items()), len(pulls_dict.items())
90+
)
8591

86-
commits_list = list(filter(lambda commit: commit.commit.author.date > data.since, data.commits))
87-
logger.debug("Count of commits reduced from %d to %d", len(data.commits), len(commits_list))
92+
commits_dict = {
93+
commit: repo for commit, repo in data.commits.items() if commit.commit.author.date > data.since
94+
}
95+
logger.debug("Count of commits reduced from %d to %d", len(data.commits.items()), len(commits_dict.items()))
8896

89-
md.issues = issues_list
90-
md.pull_requests = pulls_list
91-
md.commits = commits_list
97+
md.issues = issues_dict
98+
md.pull_requests = pulls_dict
99+
md.commits = commits_dict
92100

93101
logger.debug(
94102
"Input data. Issues: %d, Pull Requests: %d, Commits: %d",
95-
len(data.issues),
96-
len(data.pull_requests),
97-
len(data.commits),
103+
len(data.issues.items()),
104+
len(data.pull_requests.items()),
105+
len(data.commits.items()),
98106
)
99107
logger.debug(
100108
"Filtered data. Issues: %d, Pull Requests: %d, Commits: %d",
101-
len(md.issues),
102-
len(md.pull_requests),
103-
len(md.commits),
109+
len(md.issues.items()),
110+
len(md.pull_requests.items()),
111+
len(md.commits.items()),
104112
)
105113
else:
106114
md.issues = deepcopy(data.issues)
@@ -109,12 +117,15 @@ def filter(self, data: MinedData) -> MinedData:
109117

110118
return md
111119

112-
def _filter_issues(self, data: MinedData) -> list:
120+
def _filter_issues(self, data: MinedData) -> dict[Issue, Repository]:
113121
"""
114122
Filter issues based on the selected filtering type - default or hierarchy.
115123
116-
@param data: The mined data containing issues.
117-
@return: The filtered list of issues.
124+
Parameters:
125+
data (MinedData): The mined data to filter.
126+
127+
Returns:
128+
dict[Issue, Repository]: The filtered issues.
118129
"""
119130
if ActionInputs.get_hierarchy():
120131
logger.debug("Used hierarchy issue filtering logic.")
@@ -124,20 +135,24 @@ def _filter_issues(self, data: MinedData) -> list:
124135
return self._filter_issues_default(data)
125136

126137
@staticmethod
127-
def _filter_issues_default(data: MinedData) -> list:
138+
def _filter_issues_default(data: MinedData) -> dict[Issue, Repository]:
128139
"""
129140
Default filtering for issues: filter out closed issues before the release date.
130141
131142
Parameters:
132143
data (MinedData): The mined data containing issues and release information.
133144
134145
Returns:
135-
list: The filtered list of issues.
146+
dict[Issue, Repository]: The filtered issues.
136147
"""
137-
return [issue for issue in data.issues if (issue.closed_at is None) or (issue.closed_at >= data.since)]
148+
return {
149+
issue: repo
150+
for issue, repo in data.issues.items()
151+
if (issue.closed_at is None) or (issue.closed_at >= data.since)
152+
}
138153

139154
@staticmethod
140-
def _filter_issues_issue_hierarchy(data: MinedData) -> list:
155+
def _filter_issues_issue_hierarchy(data: MinedData) -> dict[Issue, Repository]:
141156
"""
142157
Hierarchy filtering for issues: include issues closed since the release date
143158
or still open at generation time.
@@ -146,14 +161,10 @@ def _filter_issues_issue_hierarchy(data: MinedData) -> list:
146161
data (MinedData): The mined data containing issues and release information.
147162
148163
Returns:
149-
list: The filtered list of issues.
164+
dict[Issue, Repository]: The filtered issues.
150165
"""
151-
return list(
152-
filter(
153-
lambda issue: (
154-
(issue.closed_at is not None and issue.closed_at >= data.since) # closed after the release
155-
or (issue.state == "open") # still open
156-
),
157-
data.issues,
158-
)
159-
)
166+
return {
167+
issue: repo
168+
for issue, repo in data.issues.items()
169+
if ((issue.closed_at is not None and issue.closed_at >= data.since) or (issue.state == "open"))
170+
}

0 commit comments

Comments
 (0)