From 30ba233d4cee945756ed7344e7ddb3a90d2ae608 Mon Sep 17 00:00:00 2001 From: Simon Sawicki Date: Sun, 17 Sep 2023 13:22:04 +0200 Subject: [PATCH] [devscripts] `make_changelog`: Fix changelog grouping and add networking group (#8124) Authored by: Grub4K --- devscripts/changelog_override.json | 21 ++++++- devscripts/make_changelog.py | 96 ++++++++++++++++-------------- 2 files changed, 71 insertions(+), 46 deletions(-) diff --git a/devscripts/changelog_override.json b/devscripts/changelog_override.json index d03db3f23..e7f453acf 100644 --- a/devscripts/changelog_override.json +++ b/devscripts/changelog_override.json @@ -68,6 +68,25 @@ { "action": "change", "when": "b03fa7834579a01cc5fba48c0e73488a16683d48", - "short": "[ie/twitter] Revert 92315c03774cfabb3a921884326beb4b981f786b" + "short": "[ie/twitter] Revert 92315c03774cfabb3a921884326beb4b981f786b", + "authors": ["pukkandan"] + }, + { + "action": "change", + "when": "fcd6a76adc49d5cd8783985c7ce35384b72e545f", + "short": "[test] Add tests for socks proxies (#7908)", + "authors": ["coletdjnz"] + }, + { + "action": "change", + "when": "4bf912282a34b58b6b35d8f7e6be535770c89c76", + "short": "[rh:urllib] Remove dot segments during URL normalization (#7662)", + "authors": ["coletdjnz"] + }, + { + "action": "change", + "when": "59e92b1f1833440bb2190f847eb735cf0f90bc85", + "short": "[rh:urllib] Simplify gzip decoding (#7611)", + "authors": ["Grub4K"] } ] diff --git a/devscripts/make_changelog.py b/devscripts/make_changelog.py index 84f72d52f..ac68dcd19 100644 --- a/devscripts/make_changelog.py +++ b/devscripts/make_changelog.py @@ -31,35 +31,27 @@ class CommitGroup(enum.Enum): EXTRACTOR = 'Extractor' DOWNLOADER = 'Downloader' POSTPROCESSOR = 'Postprocessor' + NETWORKING = 'Networking' MISC = 'Misc.' - @classmethod - @property - def ignorable_prefixes(cls): - return ('core', 'downloader', 'extractor', 'misc', 'postprocessor', 'upstream') - @classmethod @lru_cache - def commit_lookup(cls): + def subgroup_lookup(cls): return { name: group for group, names in { - cls.PRIORITY: {'priority'}, cls.CORE: { 'aes', 'cache', 'compat_utils', 'compat', 'cookies', - 'core', 'dependencies', 'formats', 'jsinterp', - 'networking', 'outtmpl', 'plugins', 'update', - 'upstream', 'utils', }, cls.MISC: { @@ -67,23 +59,40 @@ def commit_lookup(cls): 'cleanup', 'devscripts', 'docs', - 'misc', 'test', }, - cls.EXTRACTOR: {'extractor', 'ie'}, - cls.DOWNLOADER: {'downloader', 'fd'}, - cls.POSTPROCESSOR: {'postprocessor', 'pp'}, + cls.NETWORKING: { + 'rh', + }, }.items() for name in names } @classmethod - def get(cls, value): - result = cls.commit_lookup().get(value) - if result: - logger.debug(f'Mapped {value!r} => {result.name}') + @lru_cache + def group_lookup(cls): + result = { + 'fd': cls.DOWNLOADER, + 'ie': cls.EXTRACTOR, + 'pp': cls.POSTPROCESSOR, + 'upstream': cls.CORE, + } + result.update({item.name.lower(): item for item in iter(cls)}) return result + @classmethod + def get(cls, value: str) -> tuple[CommitGroup | None, str | None]: + group, _, subgroup = (group.strip().lower() for group in value.partition('/')) + + result = cls.group_lookup().get(group) + if not result: + if subgroup: + return None, value + subgroup = group + result = cls.subgroup_lookup().get(subgroup) + + return result, subgroup or None + @dataclass class Commit: @@ -198,19 +207,23 @@ def _prepare_cleanup_misc_items(self, items): for commit_infos in cleanup_misc_items.values(): sorted_items.append(CommitInfo( 'cleanup', ('Miscellaneous',), ', '.join( - self._format_message_link(None, info.commit.hash).strip() + self._format_message_link(None, info.commit.hash) for info in sorted(commit_infos, key=lambda item: item.commit.hash or '')), [], Commit(None, '', commit_infos[0].commit.authors), [])) return sorted_items - def format_single_change(self, info): - message = self._format_message_link(info.message, info.commit.hash) + def format_single_change(self, info: CommitInfo): + message, sep, rest = info.message.partition('\n') + if '[' not in message: + # If the message doesn't already contain markdown links, try to add a link to the commit + message = self._format_message_link(message, info.commit.hash) + if info.issues: - message = message.replace('\n', f' ({self._format_issues(info.issues)})\n', 1) + message = f'{message} ({self._format_issues(info.issues)})' if info.commit.authors: - message = message.replace('\n', f' by {self._format_authors(info.commit.authors)}\n', 1) + message = f'{message} by {self._format_authors(info.commit.authors)}' if info.fixes: fix_message = ', '.join(f'{self._format_message_link(None, fix.hash)}' for fix in info.fixes) @@ -219,16 +232,14 @@ def format_single_change(self, info): if authors != info.commit.authors: fix_message = f'{fix_message} by {self._format_authors(authors)}' - message = message.replace('\n', f' (With fixes in {fix_message})\n', 1) + message = f'{message} (With fixes in {fix_message})' - return message[:-1] + return message if not sep else f'{message}{sep}{rest}' def _format_message_link(self, message, hash): assert message or hash, 'Improperly defined commit message or override' message = message if message else hash[:HASH_LENGTH] - if not hash: - return f'{message}\n' - return f'[{message}\n'.replace('\n', f']({self.repo_url}/commit/{hash})\n', 1) + return f'[{message}]({self.repo_url}/commit/{hash})' if hash else message def _format_issues(self, issues): return ', '.join(f'[#{issue}]({self.repo_url}/issues/{issue})' for issue in issues) @@ -318,7 +329,7 @@ def _get_commits_and_fixes(self, default_author): for commitish, revert_commit in reverts.items(): reverted = commits.pop(commitish, None) if reverted: - logger.debug(f'{commit} fully reverted {reverted}') + logger.debug(f'{commitish} fully reverted {reverted}') else: commits[revert_commit.hash] = revert_commit @@ -337,7 +348,7 @@ def apply_overrides(self, overrides): for override in overrides: when = override.get('when') if when and when not in self and when != self._start: - logger.debug(f'Ignored {when!r}, not in commits {self._start!r}') + logger.debug(f'Ignored {when!r} override') continue override_hash = override.get('hash') or when @@ -365,7 +376,7 @@ def groups(self): for commit in self: upstream_re = self.UPSTREAM_MERGE_RE.search(commit.short) if upstream_re: - commit.short = f'[core/upstream] Merged with youtube-dl {upstream_re.group(1)}' + commit.short = f'[upstream] Merged with youtube-dl {upstream_re.group(1)}' match = self.MESSAGE_RE.fullmatch(commit.short) if not match: @@ -410,25 +421,20 @@ def details_from_prefix(prefix): if not prefix: return CommitGroup.CORE, None, () - prefix, _, details = prefix.partition('/') - prefix = prefix.strip() - details = details.strip() + prefix, *sub_details = prefix.split(':') - group = CommitGroup.get(prefix.lower()) - if group is CommitGroup.PRIORITY: - prefix, _, details = details.partition('/') + group, details = CommitGroup.get(prefix) + if group is CommitGroup.PRIORITY and details: + details = details.partition('/')[2].strip() - if not details and prefix and prefix not in CommitGroup.ignorable_prefixes: - logger.debug(f'Replaced details with {prefix!r}') - details = prefix or None + if details and '/' in details: + logger.error(f'Prefix is overnested, using first part: {prefix}') + details = details.partition('/')[0].strip() if details == 'common': details = None - - if details: - details, *sub_details = details.split(':') - else: - sub_details = [] + elif group is CommitGroup.NETWORKING and details == 'rh': + details = 'Request Handler' return group, details, sub_details