From 8c7a6dbc165986bd494142125ddd88dfe683bdc4 Mon Sep 17 00:00:00 2001 From: Can Bulut Bayburt Date: Wed, 22 Nov 2023 21:39:45 +0100 Subject: [PATCH] Add GH action for enhanced changelog validation --- .github/workflows/changelogs.yml | 29 +- .github/workflows/changelogs/changelogs.py | 636 ++++++++++++++++++ .../workflows/changelogs/test_changelogs.py | 384 +++++++++++ .github/workflows/changelogs/trackers.xml | 343 ++++++++++ 4 files changed, 1380 insertions(+), 12 deletions(-) create mode 100644 .github/workflows/changelogs/changelogs.py create mode 100644 .github/workflows/changelogs/test_changelogs.py create mode 100644 .github/workflows/changelogs/trackers.xml diff --git a/.github/workflows/changelogs.yml b/.github/workflows/changelogs.yml index 6f6d1de8e50b..18c08dfbfdc2 100644 --- a/.github/workflows/changelogs.yml +++ b/.github/workflows/changelogs.yml @@ -1,7 +1,7 @@ name: Changelogs on: - pull_request_target: + pull_request: types: [opened, edited, synchronize, reopened, closed] # Only execute on the main branches (feature branches are excluded) branches: @@ -44,7 +44,7 @@ on: jobs: changelog_test: - name: Test changelog entries + name: Changelog tests runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -65,22 +65,27 @@ jobs: echo " - $file" done echo - echo "See https://github.com/uyuni-project/uyuni/wiki/Contributing for a guide to writing checklogs." + echo "See https://github.com/uyuni-project/uyuni/wiki/Contributing for a guide to writing changelogs." exit 1 - id: changelogs - name: Get modified changelog files + name: Get modified files if: "!contains(github.event.pull_request.body, '[x] No changelog needed')" uses: Ana06/get-changed-files@v2.2.0 - with: - filter: '*.changes.*' - - name: Fail if no changelog entries are added - if: steps.changelogs.conclusion == 'success' && steps.changelogs.outputs.added_modified == '' + - name: Test changelog entries + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BZ_TOKEN: ${{ secrets.BUGZILLA_TOKEN }} run: | - echo "No changelog entry found. Please add the required changelog entries." - echo "See https://github.com/uyuni-project/uyuni/wiki/Contributing for a guide to writing checklogs." - exit 1 + pip install python-bugzilla~=3.2.0 + # Tracker file can be retrieved from the OBS/IBS API + # by calling 'osc api /issue_trackers/' (with authentication) + python .github/workflows/changelogs/changelogs.py \ + --tracker-file .github/workflows/changelogs/trackers.xml \ + --git-repo uyuni-project/uyuni \ + --pr-number ${{ github.event.pull_request.number }} \ + ${{ steps.changelogs.outputs.all }} - # warns the user if they merged the PR, but the changelog test failed + # Warns the user if they merged the PR, but the changelog test failed warn_user_if_merged: name: Warn user if merged if: always() && github.event.action == 'closed' && github.event.pull_request.merged == true && needs.changelog_test.result == 'failure' diff --git a/.github/workflows/changelogs/changelogs.py b/.github/workflows/changelogs/changelogs.py new file mode 100644 index 000000000000..d51ae487f236 --- /dev/null +++ b/.github/workflows/changelogs/changelogs.py @@ -0,0 +1,636 @@ +#!/usr/bin/env python + +import logging +import argparse +import sys +import os +import re +import linecache +import functools +import bugzilla +import xmlrpc +import requests +import xml.etree.ElementTree as ET + +DEFAULT_LINE_LENGTH = 67 +DEFAULT_GIT_REPO = "uyuni-project/uyuni" +DEFAULT_BUGZILLA_URI = "bugzilla.suse.com" + +class RegexRules: + """Contains the regex rules for all the changelog checks + + Can additionally contain dynamic regex rules that are acquired from a + 'tracker file'. This is an XML file that contains definitions of all the + tracker IDs that OBS/IBS supports. The contents of the file can be + retrieved from the '/issue-trackers' endpoint of the IBS/OBS APIs. + """ + + MULTIW = re.compile(r"\S[ \t]{2,}[^ ]") + TRAILINGW = re.compile(r"[ \t]$") + WRONG_CAP_START = re.compile(r"^\W*[a-z]") + WRONG_CAP_AFTER = re.compile(r"[:.] *[a-z]") + WRONG_SPACING = re.compile(r"([.,;:])[^ \n]") + TRACKER_LIKE = re.compile(r".{2,5}#\d+") + + def __init__(self, tracker_filename: str = None): + trackers = {} + if tracker_filename: + try: + logging.info(f"Parsing tracker file: {tracker_filename}") + tree = ET.parse(tracker_filename) + except FileNotFoundError as e: + raise Exception(f"{e.strerror}: '{e.filename}'") + except ET.ParseError as e: + raise Exception(f"Error parsing '{tracker_filename}': {e.msg}") + + for tracker in tree.getroot(): + try: + # Every element should + # contain 'name' and 'regex' as children + name = tracker.find('name').text + regex = tracker.find('regex').text + except AttributeError: + raise Exception(f"Error parsing '{tracker_filename}': not a tracker XML file") + trackers[name] = regex + + logging.info(f"Found {len(trackers.keys())} tracker definition(s)") + + self.trackers = trackers + +class IssueType: + """Contains the issue messages as static strings""" + + LINE_TOO_LONG = "Line exceeds {} characters" + EMPTY_LINE = "Empty line" + WRONG_INDENT = "Wrong indentation" + WRONG_START = "Entries must start with '- ' characters" + MULTI_WHITESPACE = "Multiple whitespaces" + TRAIL_WHITESPACE = "Trailing whitespaces" + MISSING_CHLOG = "Changelog not added" + WRONG_CHLOG = "Changelog added without changes" + EMPTY_CHLOG = "No changelog entries found" + MISSING_NEWLINE = "Missing newline at the end" + WRONG_CAP = "Wrong capitalization" + WRONG_SPACING = "Wrong spacing" + WRONG_TRACKER = "{} is not mentioned in PR title or in commit messages" + MISSING_TRACKER = "{} is not mentioned in any changelog entries" + MISTYPED_TRACKER = "Possibly a mistyped tracker" + BUG_NOT_FOUND = "Bug #{} does not exist at Bugzilla" + BUG_NOT_AUTHORIZED = "Not authorized to access bug #{} at Bugzilla" + INVALID_BUG = "Some error occurred when accessing bug #{} at Bugzilla: {}" + INVALID_PRODUCT = "Bug #{} does not belong to SUSE Manager" + +class Entry: + """Class that represents a single changelog entry + + Each Entry contains the entry text, its file, the beginning and ending + lines, and any extracted tracker information mentioned in the entry + text. + + The ending line is 'None' if the entry consists of a single line. + """ + + def __init__(self, entry: str, file: str, line: int, end_line: int = None, trackers = {}): + self.entry = entry + self.file = file + self.line = line + self.end_line = end_line + self.trackers = trackers + +class Issue: + """Class that represents a single validation issue + + Each Issue contains a message describing the issue, the file and the + package the issue is found in, the beginning and ending lines in the + file, and a 'severe' flag that denotes whether the issues is severe + (causing a validation failure) or not. + + The ending line is 'None' if the entry consists of a single line. + + The Issue class overrides the '__str__' method to pretty-print itself. + The actual print format depends on the environment this program runs in. + When run as a GitHub action (GITHUB_ACTION environment variable is set), + special GitHub workflow commands are prepended to the messages. This + lets GitHub UI to display the issue messages in a richer way. + + See: https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions + """ + + def __init__(self, msg, file: str = None, line: int = None, end_line: int = None, + package: str = None, severe: bool = True): + self.msg = msg + self.severe = severe + self.set_details(file, line, end_line, package) + + def set_details(self, file: str = None, line: int = None, end_line: int = None, + package: str = None) -> None: + self.file = file + # TODO: Pinpoint the column number for issues where applicable + self.line = line + self.end_line = end_line + self.package = package + + def get_message_header(self) -> str: + """Return a message prefix for the issue + + The prefix depends on the issue type, the environment, and the + additional information available. + """ + + # Prepend special workflow tags if running as a GitHub action + if os.getenv("GITHUB_ACTION"): + msg = "::error" if self.severe else "::warning" + if self.file: + msg += " file={}".format(self.file) + if self.line: + msg += ",line={}".format(self.line) + if self.end_line: + msg += ",endLine={}".format(self.end_line) + + return msg + "::" + else: + return "ERROR: " if self.severe else "WARNING: " + + def __str__(self): + out = self.get_message_header() + out += self.msg + + if self.package: + out += " for package " + self.package + if self.file: + out += " in file " + self.file + if self.line: + out += "#L" + str(self.line) + if self.end_line: + out += "-" + str(self.end_line) + return out + +class ChangelogValidator: + """Class that handles the changelog validation + + Validates the changelog entries by enforcing various rules, such as; + - Basic typos, spacing, capitalization and line length issues + - Ensuring the changelogs are added for correct packages + + Additionally, the following checks are made if a tracker file is + provided; + - Correct spelling of tracker IDs defined by IBS/OBS + - Correct matching of tracker IDs between the PR title, comment + messages and the changelog entries (only if a valid PR number is + provided) + - For Bugzilla trackers, the bug exists in the specified Bugzilla + host + - For Bugzilla trackers, the bug is reported for the "SUSE Manager" + product (passes with a warning if the check fails) + + If a tracker file and a PR number is provided, the following environment + variables must be also set: + + GH_TOKEN: A GitHub access token with the basic privileges + BZ_TOKEN: A valid Bugzilla API key + + The trackers XML file (trackers.xml): + This is an XML document that contains definitions and formats of all + the various types of tracker IDs recognized by IBS/OBS. The document is + provided by the OBS/IBS APIs. + + It can be obtained by sending a request to the '/issue-trackers' + endpoint of the OBS/IBS API: + + `osc api /issue-trackers` + """ + + def __init__(self, spacewalk_root: str, git_repo: str, pr_number: int, max_line_length: int, + regex_rules: type[RegexRules]): + if pr_number and not os.getenv("GH_TOKEN"): + raise Exception("GitHub API key not set. Please set it in 'GH_TOKEN' environment variable.") + + self.spacewalk_root = spacewalk_root + self.git_repo = git_repo + self.pr_number = pr_number + self.max_line_length = max_line_length + self.regex = regex_rules + if regex_rules.trackers: + self.bzapi = self.get_bugzilla_api() + + def get_bugzilla_api(self) -> type[bugzilla.Bugzilla]: + """Initialize and authenticate the Bugzilla API""" + + api_key = os.getenv("BZ_TOKEN") + if not api_key: + raise Exception("Bugzilla API key not set. Please set it in 'BZ_TOKEN' environment variable.") + + uri = os.getenv("BUGZILLA_URI", DEFAULT_BUGZILLA_URI) + try: + logging.info(f"Initializing Bugzilla API at '{uri}'") + bzapi = bugzilla.Bugzilla(uri, api_key=api_key) + except requests.exceptions.ConnectionError as e: + raise ConnectionError(f"Cannot connect to the Bugzilla API at '{uri}'") + + try: + assert bzapi.logged_in, f"Cannot log into the Bugzilla API at '{uri}'" + except xmlrpc.client.Fault as f: + raise Exception(f"Cannot log in to the Bugzilla API at '{uri}': {f.faultString}") + + + return bzapi + + def get_modified_files_for_pkg(self, pkg_path: str, pkg_name: str, files: list[str]) -> dict[str, list[str]]: + """Return a dictionary of modified files in a package + + The files lists are split into 2 different groups in the dictionary: + + 'changes': The changelog files (packagename.changes.*) + 'files': The rest of the modified files in the package + """ + + pkg_files = [] + pkg_chlogs = [] + for f in files: + # Check if the file exists in a subdirectory of the base path of the package + if os.path.normpath(os.path.dirname(f)).startswith(os.path.normpath(pkg_path)): + if os.path.basename(f).startswith(pkg_name + ".changes."): + # Ignore if the change is a removal + if os.path.isfile(os.path.join(self.spacewalk_root, f)): + pkg_chlogs.append(f) + else: + pkg_files.append(f) + + return { "files": pkg_files, "changes": pkg_chlogs } + + def get_pkg_index(self, files: list[str]) -> dict[str, list[str]]: + """Index the list of modified files + + Parses the list of files and returns a dictionary in the following + format: + + ["pkg_name"] -> { + "files": list of changed files in the package, + "changes": list of modified changelog files in the package + } + + The actual package names and their base paths are read from the files in + the 'rel-eng/packages' directory. Each file in this directory defines + the name of a package (the name of the file), the current version of the + package, and its base path. + """ + + packages_dir = os.path.join(self.spacewalk_root, "rel-eng/packages") + pkg_idx = {} + + try: + pkg_names = os.listdir(packages_dir) + logging.debug(f"Found {len(pkg_names)} package(s) in 'rel-eng/packages'") + except FileNotFoundError: + raise Exception(f"Not an Uyuni repository. Consider using '--spacewalk-dir' option.") + + for pkg_name in pkg_names: + if pkg_name.startswith('.'): + # Skip hidden files in rel-eng/packages + continue + # Extract the package path from the file: + # Each file contains the package version and the + # package path, separated by a space character + pkg_path = linecache.getline(os.path.join(packages_dir, pkg_name), 1).rstrip().split(maxsplit=1)[1] + logging.debug(f"Package {pkg_name} is in path {pkg_path}") + + # Get the list of modified files and changelog files for the package + modified_files = self.get_modified_files_for_pkg(pkg_path, pkg_name, files) + if modified_files["files"] or modified_files["changes"]: + pkg_idx[pkg_name] = modified_files + + return pkg_idx + + def extract_trackers(self, text: str) -> dict[str, list[tuple[str, str]]]: + """Extract all the mentioned trackers in a body of text + + The trackers to be extracted are defined in the 'regex' object. + + The method returns a dictionary of trackers in the following format: + + ["kind"] -> [(full_tracker_id, num_tracker_id)] + + Example of a tuple: ("bsc#1234567", "1234567") + """ + + trackers = {} + for kind, regex in self.regex.trackers.items(): + trackers_of_kind = [] + for match in re.finditer(regex, text): + # Match groups are defined by contract in the tracker file + # Group 1 should be the numeric ID of the tracker + trackers_of_kind.append(match.group(0, 1)) + + # Gather the unique trackers + trackers[kind] = list(set(trackers_of_kind)) + + return trackers + + def get_pr_trackers(self, git_repo: str, pr_number: int) -> dict[str, list[tuple[str, str]]]: + """Get all the trackers mentioned in a PR + + The trackers are extracted from the PR title and the commit messages. + """ + + logging.info(f"Requesting information for PR#{pr_number} at '{git_repo}'") + stream = os.popen(f'gh pr view -R {git_repo} {pr_number} --json title,commits -q ".title, .commits[].messageHeadline, .commits[].messageBody | select(length > 0)"') + commits = stream.read() + if stream.close(): + raise Exception("An error occurred when getting the PR information from the GitHub API.") + return self.extract_trackers(commits) + + def validate_chlog_entry(self, entry: type[Entry]) -> list[type[Issue]]: + """Validate a single changelog entry""" + + issues = [] + # Test capitalization + if re.search(self.regex.WRONG_CAP_START, entry.entry) or re.search(self.regex.WRONG_CAP_AFTER, entry.entry): + issues.append(Issue(IssueType.WRONG_CAP, entry.file, entry.line, entry.end_line)) + # Test spacing + if re.search(self.regex.WRONG_SPACING, entry.entry): + issues.append(Issue(IssueType.WRONG_SPACING, entry.file, entry.line, entry.end_line)) + + return issues + + def get_entry_obj(self, buffer: list[str], file: str, line_no: int) -> type[Entry]: + """Create an Entry object from a buffer of entry lines + + The elements in the 'buffer' list are separate lines of a single entry. + """ + + # Strip the '- ' characters in the beginning of the first line + msg = ''.join(buffer)[2:] + trackers = self.extract_trackers(msg) + return Entry(msg, file, line_no - len(buffer), line_no - 1 if len(buffer) > 1 else None, trackers) + + def validate_chlog_file(self, file: str) -> tuple[list[type[Issue]], list[type[Entry]]]: + """Validate a single changelog file""" + + logging.debug(f"Validating changelog file: {file}") + file_path = os.path.join(self.spacewalk_root, file) + + if os.path.getsize(file_path) == 0: + return ([Issue(IssueType.EMPTY_CHLOG, file)], []) + + f = open(file_path, "r") + issues = [] + entries = [] + entry_buf: list[str] = [] # List to buffer the lines in a single changelog entry + line_no = 0 + + for line in f: + line_no += 1 + + if not line.endswith("\n"): + issues.append(Issue(IssueType.MISSING_NEWLINE, file)) + + stripped_line = line.rstrip("\n") + if not stripped_line.strip(): + issues.append(Issue(IssueType.EMPTY_LINE, file, line_no)) + continue + + if re.search(r"^- ", stripped_line): + # Start of a new entry + if entry_buf: + # Wrap up the previous entry + entry = self.get_entry_obj(entry_buf, file, line_no) + issues.extend(self.validate_chlog_entry(entry)) + entries.append(entry) + entry_buf = [stripped_line] + else: + # First entry in the file + entry_buf.append(stripped_line) + else: + # Successive lines of the entry + if entry_buf: + if not re.search(r"^ [^ ]", stripped_line): + # Successive lines must be indented by two spaces + issues.append(Issue(IssueType.WRONG_INDENT, file, line_no)) + # Strip 1 whitespace from the left + # 2 (indentation) - 1 (a literal space after the last line) + entry_buf.append(stripped_line[1:]) + elif re.search(r"^\s+- ", stripped_line): + # No space allowed before the first entry line + issues.append(Issue(IssueType.WRONG_INDENT, file, line_no)) + else: + # All entries must start with '- ' characters + issues.append(Issue(IssueType.WRONG_START, file, line_no)) + + if len(stripped_line) > self.max_line_length: + issues.append(Issue(IssueType.LINE_TOO_LONG.format(self.max_line_length), file, line_no)) + if re.search(self.regex.MULTIW, stripped_line): + issues.append(Issue(IssueType.MULTI_WHITESPACE, file, line_no)) + if re.search(self.regex.TRAILINGW, stripped_line): + issues.append(Issue(IssueType.TRAIL_WHITESPACE, file, line_no)) + + # EOF + if entry_buf: + # Validate and append the last entry + entry = self.get_entry_obj(entry_buf, file, line_no + 1) + issues.extend(self.validate_chlog_entry(entry)) + entries.append(entry) + + return (issues, entries) + + def validate_bsc(self, entry: type[Entry]) -> list[type[Issue]]: + """Validate Bugzilla trackers against a Bugzilla host""" + + issues = [] + # 'bnc' is the name of the tracker as defined in the trackers file + if 'bnc' in entry.trackers: + for tracker, bug_id in entry.trackers['bnc']: + try: + bug = self.bzapi.getbug(bug_id) + logging.debug(f"Bug #{bug_id} belongs to product '{bug.product}'") + + if not bug.product.startswith("SUSE Manager"): + issues.append(Issue(IssueType.INVALID_PRODUCT.format(bug_id), entry.file, entry.line, \ + entry.end_line, severe=False)) + except xmlrpc.client.Fault as f: + if f.faultCode == 101: + # Bug not found + issues.append(Issue(IssueType.BUG_NOT_FOUND.format(bug_id), entry.file, entry.line, \ + entry.end_line, severe=True)) + elif f.faultCode == 102: + # Not authorized + issues.append(Issue(IssueType.BUG_NOT_AUTHORIZED.format(bug_id), entry.file, entry.line, \ + entry.end_line, severe=False)) + else: + # Any other fault + issues.append(Issue(IssueType.INVALID_BUG.format(bug_id, f.faultString), entry.file, entry.line, \ + entry.end_line, severe=False)) + return issues + + + def validate_trackers(self, entries: list[type[Entry]]) -> list[type[Issue]]: + """Validate the trackers mentioned in a list of entries + + Checks any possible typos and verifies Bugzilla trackers via the + Bugzilla API. + + If a PR number is provided, additionally match the mentioned trackers + against the PR's title and comment messages. + """ + + issues = [] + + if self.pr_number: + pr_validation = True + pr_trackers = self.get_pr_trackers(self.git_repo, self.pr_number) + else: + pr_validation = False + + all_trackers = {} + + for entry in entries: + # Check for mistyped trackers + # Count actual trackers in the entry + num_trackers = functools.reduce(lambda x, y: x + len(y), entry.trackers.values(), 0) + # Find all tracker-like words + if len(re.findall(self.regex.TRACKER_LIKE, entry.entry)) > num_trackers: + issues.append(Issue(IssueType.MISTYPED_TRACKER, entry.file, entry.line, entry.end_line, severe=False)) + + for kind, trackers in entry.trackers.items(): + # Collect all trackers in all entries of the changelog + if kind not in all_trackers: + all_trackers[kind] = entry.trackers[kind] + else: + all_trackers[kind].extend(entry.trackers[kind]) + + # Check if all the trackers mentioned in the + # changelog entry are also mentioned in the PR + if pr_validation: + for t in trackers: + if kind not in pr_trackers or t not in pr_trackers[kind]: + # Tracker not mentioned in the PR + issues.append(Issue(IssueType.WRONG_TRACKER.format(t[0]), entry.file, entry.line, \ + entry.end_line)) + + # Check Bugzilla trackers via the API + issues.extend(self.validate_bsc(entry)) + + # Check if all the trackers mentioned in the + # PR are also mentioned in the changelogs + if pr_validation: + for kind, trackers in pr_trackers.items(): + for t in trackers: + if kind not in all_trackers or t not in all_trackers[kind]: + issues.append(Issue(IssueType.MISSING_TRACKER.format(t[0]))) + + return issues + + def validate(self, file_list: list[str]) -> list[type[Issue]]: + """Validates changelogs in the list of files""" + + # Index the list of files by package + self.pkg_idx = self.get_pkg_index(file_list) + issues = [] + entries = [] + for pkg, files in self.pkg_idx.items(): + # General checks (package/changelog file mismatch) + if not files["files"]: + # Changelog added but no file is modified + issues.append(Issue(IssueType.WRONG_CHLOG, package=pkg)) + if not files["changes"]: + # Files are modified but no changelog file added + issues.append(Issue(IssueType.MISSING_CHLOG, package=pkg)) + + # Validate each changelog file and gather all the issues + for file in files["changes"]: + i, e = self.validate_chlog_file(file) + issues.extend(i) + entries.extend(e) + + # Validate all the mentioned trackers if the tracker file is provided + if self.regex.trackers: + issues.extend(self.validate_trackers(entries)) + + return issues + +def parse_args(): + parser = argparse.ArgumentParser(description="Validate changelog entries for Uyuni PRs", + epilog="Uyuni project: ") + + parser.add_argument("-v", "--verbose", + action="store_true", + help="enable verbose output") + + parser.add_argument("-l", "--line-length", + type=int, + default=DEFAULT_LINE_LENGTH, + help="maximum line length allowed in changelog files (default: 67)") + + parser.add_argument("-t", "--tracker-file", + help="tracker definitions XML document retrieved from the OBS/IBS API. Bypass tracker validation if not provided.") + + parser.add_argument("-d", "--spacewalk-dir", + default=".", + help="path to the local git repository root (default: current directory)") + + parser.add_argument("-p", "--pr-number", + type=int, + help="the ID of the pull request to be validated. Bypass PR validation if not provided.") + + parser.add_argument("-r", "--git-repo", + default=DEFAULT_GIT_REPO, + help=f"the Uyuni repository to validate the PR against (default: '{DEFAULT_GIT_REPO}')") + + parser.add_argument("-b", "--bugzilla-uri", + default=DEFAULT_BUGZILLA_URI, + help=f"the URI to the Bugzilla host to verify bug trackers (default: '{DEFAULT_BUGZILLA_URI}')") + + parser.add_argument("files", + metavar="FILE", + nargs="*", + help="the list of modified files in the pull request") + return parser.parse_args() + +def init_logging(verbose): + if verbose: + log_level = logging.DEBUG + else: + log_level = logging.INFO + + logging.basicConfig(level=log_level, format="%(levelname)s: %(message)s") + +def main(): + args = parse_args() + init_logging(args.verbose) + + is_gh_action = os.getenv("GITHUB_ACTION") + if is_gh_action: + logging.info("Running in GitHub actions environment") + + try: + logging.debug("Initializing the validator") + regexRules = RegexRules(args.tracker_file) + validator = ChangelogValidator(args.spacewalk_dir, args.git_repo, args.pr_number, args.line_length, regexRules) + + logging.debug(f"Validating {len(args.files)} file(s)") + issues = validator.validate(args.files) + logging.debug(f"Validation finished with {len(issues)} issue(s)") + except Exception as e: + print(e, file=sys.stderr) + return 2 + + is_fail = any([issue.severe for issue in issues]) + + if not issues: + logging.info("Changelog test passed") + return 0 + + logging.info("Changelog test {} with {} issue(s):".format("failed" if is_fail else "passed", len(issues))) + if not is_gh_action: + print("-" * 60) + for i in issues: print(i) + + if is_fail: + print() + print("{}See https://github.com/uyuni-project/uyuni/wiki/Contributing for a guide to writing changelogs." + .format("::notice::" if is_gh_action else "")) + return 1 + + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/.github/workflows/changelogs/test_changelogs.py b/.github/workflows/changelogs/test_changelogs.py new file mode 100644 index 000000000000..e7f6ad1ab35c --- /dev/null +++ b/.github/workflows/changelogs/test_changelogs.py @@ -0,0 +1,384 @@ +import pytest +import os +import re +import io +import bugzilla +from bugzilla.bug import Bug +from changelogs import * + +def issues_to_str(issues, num_expected): + out = f"{len(issues)} issue(s) found ({num_expected} expected)\n" + out += "\n".join([str(i) for i in issues]) + return out + +@pytest.fixture +def tracker_filename(tmp_path): + p = tmp_path / "trackers.xml" + p.write_text(r""" + + + tckr + tckr#(\d{2}) + + + bnc + (?:bsc|bnc)#(\d+) + + + """) + return str(p) + +@pytest.fixture +def file_list(): + return [ + "pkg/path/myfile.txt", + "pkg/path/mypkg.changes.my.feature", + "pkg/other/path/file.txt", + "pkg/other/otherpkg.changes.my.feature" + ] + +@pytest.fixture +def base_path(tmp_path, file_list): + # Create a temp base dir + base_path = tmp_path / "base" + base_path.mkdir() + + # Create files in file_list + for file in file_list: + fp = base_path / file + fp.parent.mkdir(parents=True, exist_ok=True) + fp.touch() + + # Create rel-eng/packages files + pkg_dir = base_path / "rel-eng/packages" + pkg_dir.mkdir(parents=True) + + pkg_file = pkg_dir / "mypkg" + pkg_file.write_text("1.0.0 pkg/path/") + + pkg_file = pkg_dir / "otherpkg" + pkg_file.write_text("1.0.0 pkg/other/") + return base_path + +@pytest.fixture +def chlog_file(base_path): + file = base_path / "pkg/path/mypkg.changes.my.feature" + return file + +@pytest.fixture +def validator(monkeypatch, base_path): + return ChangelogValidator(base_path, None, None, DEFAULT_LINE_LENGTH, RegexRules(None)) + +@pytest.fixture +def validator_with_trackers(monkeypatch, tracker_filename, base_path): + monkeypatch.setenv("BZ_TOKEN", "my-bugzilla-token") + monkeypatch.setenv("GH_TOKEN", "my-github-token") + + # Mock GitHub API + def gh_api_call(api_cmd): + pr_data="""Title of my PR (tckr#99) +First commit message (tckr#99) +Second commit message +""" + if re.search(r"^gh pr view -R [^ ]+ 999 .*", api_cmd): + return io.StringIO(pr_data) + else: + raise Exception("An error occurred when getting the PR information from the GitHub API.") + + # Mock Bugzilla API + def getbug(self, bsc): + if bsc == "1000000": + return Bug(self, 1000000, {"product": "SUSE Manager 1.0"}) + elif bsc == "2000000": + return Bug(self, 2000000, {"product": "Not SUSE Manager"}) + elif bsc == "9999999": + raise xmlrpc.client.Fault(102, "Not authorized") + else: + raise xmlrpc.client.Fault(101, "Not found") + + monkeypatch.setattr(os, "popen", gh_api_call) + monkeypatch.setattr(bugzilla.Bugzilla, "getbug", getbug) + monkeypatch.setattr(ChangelogValidator, "get_bugzilla_api", lambda self: bugzilla.Bugzilla(url=None)) + + return ChangelogValidator(base_path, "test/repo", 999, DEFAULT_LINE_LENGTH, RegexRules(tracker_filename)) + +def test_regex_trackers(tracker_filename): + regex = RegexRules(tracker_filename) + assert "tckr" in regex.trackers + assert regex.trackers["tckr"] == r"tckr#(\d{2})" + +def test_issue_error_string(): + assert not os.getenv("GITHUB_ACTION") + issue = Issue(IssueType.EMPTY_LINE, package="mypackage") + assert str(issue) == f"ERROR: {IssueType.EMPTY_LINE} for package mypackage" + +def test_issue_warning_string_all_params(): + assert not os.getenv("GITHUB_ACTION") + issue = Issue(IssueType.WRONG_SPACING, file="myfile.txt", line=1, package="mypackage", severe=False) + assert str(issue) == f"WARNING: {IssueType.WRONG_SPACING} for package mypackage in file myfile.txt#L1" + +def test_issue_gh_action_string(monkeypatch): + monkeypatch.setenv("GITHUB_ACTION", "true") + issue = Issue(IssueType.WRONG_CAP, "myfile.txt", 3, 5) + assert str(issue) == f"::error file=myfile.txt,line=3,endLine=5::{IssueType.WRONG_CAP} in file myfile.txt#L3-5" + +def test_get_pkg_index(validator, file_list): + pkg_idx = validator.get_pkg_index(file_list) + assert "mypkg" in pkg_idx + assert "pkg/path/myfile.txt" in pkg_idx["mypkg"]["files"] + assert "pkg/path/mypkg.changes.my.feature" in pkg_idx["mypkg"]["changes"] + +def test_extract_trackers(validator_with_trackers): + trackers = validator_with_trackers.extract_trackers(""" + This is a tckr#23 tracker. + Repeat tckr#23. + And another tckr#24. + Not a valid tracker tckr#1, + Also not a valid tracker tkr#333. + """) + assert "tckr" in trackers + assert len(trackers["tckr"]) == 2 + assert ("tckr#23", "23") in trackers["tckr"] + assert ("tckr#24", "24") in trackers["tckr"] + assert ("tckr#1", "1") not in trackers["tckr"] + assert ("tkr#333", "333") not in trackers["tckr"] + +def test_get_entry_obj(validator): + buffer = ["- This is a changelog entry."] + + entry = validator.get_entry_obj(buffer, "myfile.changes", 5) + assert entry.entry == "This is a changelog entry." + assert entry.file == "myfile.changes" + assert entry.line == 4 + assert not entry.end_line + assert not entry.trackers + +def test_get_entry_obj_multiline(validator): + buffer = ["- This is a ", "multi line ", "changelog entry."] + + entry = validator.get_entry_obj(buffer, "myfile.changes", 5) + assert entry.entry == "This is a multi line changelog entry." + assert entry.file == "myfile.changes" + assert entry.line == 2 + assert entry.end_line == 4 + assert not entry.trackers + +def test_get_entry_obj_with_tracker(validator_with_trackers): + buffer = ["- This is a changelog entry with a tracker (tckr#99)"] + + entry = validator_with_trackers.get_entry_obj(buffer, "myfile.changes", 1) + assert ("tckr#99", "99") in entry.trackers["tckr"] + +def test_get_entry_obj_with_multiple_trackers(validator_with_trackers): + buffer = ["- This is a changelog entry with trackers (tckr#01, tckr#02)"] + + entry = validator_with_trackers.get_entry_obj(buffer, "myfile.changes", 2) + assert entry.entry == "This is a changelog entry with trackers (tckr#01, tckr#02)" + assert entry.file == "myfile.changes" + assert entry.line == 1 + assert not entry.end_line + assert len(entry.trackers["tckr"]) == 2 + assert ("tckr#01", "01") in entry.trackers["tckr"] + assert ("tckr#02", "02") in entry.trackers["tckr"] + +def test_validate_chlog_file_valid(validator, chlog_file): + chlog_file.write_text("- This is a valid\n multiline changelog entry\n") + issues, entries = validator.validate_chlog_file(str(chlog_file)) + assert not issues, issues_to_str(issues, 0) + assert len(entries) == 1 + +def test_validate_chlog_file_multiple_entries(validator, chlog_file): + chlog_file.write_text("- This is a valid\n multiline changelog entry\n- This is a second entry\n") + issues, entries = validator.validate_chlog_file(str(chlog_file)) + assert not issues, issues_to_str(issues, 0) + assert len(entries) == 2 + +def test_validate_chlog_file_multiple_entries_line_numbers(validator, chlog_file): + chlog_file.write_text("- This is a valid\n multiline changelog entry\n- This is a second entry\n- a multiline\n entry\n") + issues, entries = validator.validate_chlog_file(str(chlog_file)) + assert len(issues) == 1, issues_to_str(issues, 1) + assert len(entries) == 3 + issue = issues[0] + assert IssueType.WRONG_CAP in str(issue) + assert issue.line == 4 + assert issue.end_line == 5 + +def test_validate_chlog_file_empty_file(validator, chlog_file): + issues, entries = validator.validate_chlog_file(str(chlog_file)) + assert len(issues) == 1 + assert IssueType.EMPTY_CHLOG in str(issues[0]) + +def test_validate_chlog_file_multiple_issues_and_entries(validator, chlog_file): + content = """- This is a valid entry +- This is a valid + multiline entry +- This entry has an extra + whitespace at the second line +- Invalid entry with multiple trailing whitespaces at the end +- Invalid entry: wrong capitalization after the colon +""" + chlog_file.write_text(content) + issues, entries = validator.validate_chlog_file(str(chlog_file)) + assert len(issues) == 3, issues_to_str(issues, 3) + assert len(entries) == 5 + +# Tests for the basic rules + +@pytest.mark.parametrize("entry_text, issue_msg", [ + ("- This entry has\n trailing whitespaces \n", IssueType.TRAIL_WHITESPACE), + ("- This entry has an extra whitespace\n", IssueType.MULTI_WHITESPACE), + (" - This is an invalid changelog entry\n", IssueType.WRONG_INDENT), + ("- This is an invalid changelog entry\n This line has only 1 leading whitespace instead of 2\n", IssueType.WRONG_INDENT), + ("This changelog entry doesn't start with '- ' characters\n", IssueType.WRONG_START), + ("- This is an invalid changelog entry without a newline at the end", IssueType.MISSING_NEWLINE), + ("- This entry\n\n has an empty line in between\n", IssueType.EMPTY_LINE), + ("- this entry has wrong capitalization\n", IssueType.WRONG_CAP), + ("- This entry has wrong capitalization\n in the. second sentence\n", IssueType.WRONG_CAP), + ("- This entry has wrong capitalization: right here.\n", IssueType.WRONG_CAP), + ("- This entry has wrong capitalization.\n right here.\n", IssueType.WRONG_CAP), + ("- This entry does not have a space.After a full stop\n", IssueType.WRONG_SPACING), + ("- This entry does not have a space:After a colon\n", IssueType.WRONG_SPACING), + ("- This entry is" + " very" * 10 + " long\n", IssueType.LINE_TOO_LONG.format(DEFAULT_LINE_LENGTH)), +]) +def test_validate_chlog_file_rules(validator, chlog_file, entry_text, issue_msg): + chlog_file.write_text(entry_text) + issues, entries = validator.validate_chlog_file(str(chlog_file)) + assert len(issues) == 1, issues_to_str(issues, 1) + assert issue_msg in str(issues[0]) + +# Tests for tracker validation rules + +def test_validate_trackers(validator_with_trackers, chlog_file): + chlog_file.write_text("- This entry has a tracker (tckr#99)\n") + issues, entries = validator_with_trackers.validate_chlog_file(str(chlog_file)) + assert not issues, issues_to_str(issues, 0) + assert len(entries) == 1 + + issues = validator_with_trackers.validate_trackers(entries) + assert not issues, issues_to_str(issues, 0) + +def test_validate_trackers_mistyped(validator, chlog_file): + chlog_file.write_text("- This entry has a mistyped trackers (ckr#01, yckr#02)\n") + issues, entries = validator.validate_chlog_file(str(chlog_file)) + assert not issues, issues_to_str(issues, 0) + assert len(entries) == 1 + + issues = validator.validate_trackers(entries) + assert len(issues) == 1, issues_to_str(issues, 1) + assert IssueType.MISTYPED_TRACKER in str(issues[0]) + +def test_validate_trackers_multiple(validator_with_trackers, chlog_file): + chlog_file.write_text("- This entry has trackers (tckr#01, tckr#02)\n- More trackers (tckr#02, tckr#03)\n") + # Disable PR validation + validator_with_trackers.pr_number = None + issues, entries = validator_with_trackers.validate_chlog_file(str(chlog_file)) + assert not issues, issues_to_str(issues, 0) + assert len(entries) == 2 + assert len(entries[0].trackers["tckr"]) == 2 + assert ("tckr#01", "01") in entries[0].trackers["tckr"] + assert ("tckr#02", "02") in entries[0].trackers["tckr"] + assert len(entries[1].trackers["tckr"]) == 2 + assert ("tckr#02", "02") in entries[1].trackers["tckr"] + assert ("tckr#03", "03") in entries[1].trackers["tckr"] + + issues = validator_with_trackers.validate_trackers(entries) + assert not issues, issues_to_str(issues, 0) + +def test_validate_trackers_with_pr(validator_with_trackers, chlog_file): + chlog_file.write_text("- This entry has a tracker matching with the PR title (tckr#99)\n") + issues, entries = validator_with_trackers.validate_chlog_file(str(chlog_file)) + assert not issues, issues_to_str(issues, 0) + issues = validator_with_trackers.validate_trackers(entries) + assert not issues, issues_to_str(issues, 0) + +@pytest.mark.parametrize("entry_text, issue_msg", [ + # In the following cases, tckr#99 is mentioned in the PR title + ("- This entry doesn't have any trackers\n", IssueType.MISSING_TRACKER.format("tckr#99")), + ("- This entry has an additional tracker (tckr#99, tckr#00)\n", IssueType.WRONG_TRACKER.format("tckr#00")) +]) +def test_validate_trackers_tracker_mismatch(validator_with_trackers, chlog_file, entry_text, issue_msg): + # Set PR number to validate trackers against the PR + validator_with_trackers.pr_number = 999 + chlog_file.write_text(entry_text) + issues, entries = validator_with_trackers.validate_chlog_file(str(chlog_file)) + assert not issues, issues_to_str(issues, 0) + issues = validator_with_trackers.validate_trackers(entries) + assert len(issues) == 1, issues_to_str(issues, 1) + assert issue_msg in str(issues[0]) + +def test_validate(validator_with_trackers, base_path, file_list): + for f in file_list: + if ".changes." in f: + p = base_path / f + p.write_text("- This is a valid changelog entry (tckr#99)\n") + + issues = validator_with_trackers.validate(file_list) + assert not issues, issues_to_str(issues, 0) + +def test_validate_no_changes_in_pkg(validator, chlog_file): + chlog_file.write_text("- This is a changelog entry.\n") + + issues = validator.validate(["pkg/path/mypkg.changes.my.feature"]) + assert len(issues) == 1, issues_to_str(issues, 1) + assert IssueType.WRONG_CHLOG in str(issues[0]) + +# Tests for changelogs for correct packages + +def test_validate_missing_chlog(validator, chlog_file): + issues = validator.validate(["pkg/path/myfile.txt"]) + assert len(issues) == 1, issues_to_str(issues, 1) + assert IssueType.MISSING_CHLOG in str(issues[0]) + +def test_validate_chlog_for_wrong_pkg(validator, chlog_file): + chlog_file.write_text("- This is a changelog entry.\n") + issues = validator.validate(["pkg/path/mypkg.changes.my.feature", "pkg/other/path/file.txt"]) + assert len(issues) == 2, issues_to_str(issues, 2) + assert any(IssueType.WRONG_CHLOG in str(issue) and "mypkg" in str(issue) for issue in issues) + assert any(IssueType.MISSING_CHLOG in str(issue) and "otherpkg" in str(issue) for issue in issues) + +def test_validate_change_in_subdir(validator, base_path): + chlog_file = base_path / "pkg/other/otherpkg.changes.my.feature" + chlog_file.write_text("- This is a changelog entry.\n") + issues = validator.validate(["pkg/other/otherpkg.changes.my.feature", "pkg/other/path/file.txt"]) + assert not issues, issues_to_str(issues, 0) + +# Tests for Bugzilla trackers + +def test_validate_bsc(validator_with_trackers, chlog_file): + chlog_file.write_text("- This is an entry with a valid BZ tracker (bsc#1000000)\n") + issues, entries = validator_with_trackers.validate_chlog_file(str(chlog_file)) + assert not issues, issues_to_str(issues, 0) + assert len(entries) == 1 + + issues = validator_with_trackers.validate_bsc(entries[0]) + assert not issues, issues_to_str(issues, 0) + +def test_validate_bsc_non_existent(validator_with_trackers, chlog_file): + chlog_file.write_text("- This is an entry with a non-existent BZ tracker (bsc#1234567)\n") + issues, entries = validator_with_trackers.validate_chlog_file(str(chlog_file)) + assert not issues, issues_to_str(issues, 0) + assert len(entries) == 1 + + issues = validator_with_trackers.validate_bsc(entries[0]) + assert len(issues) == 1, issues_to_str(issues, 1) + assert IssueType.BUG_NOT_FOUND.format("1234567") in str(issues[0]) + +def test_validate_bsc_not_authorized(validator_with_trackers, chlog_file): + chlog_file.write_text("- This is an entry with a private BZ tracker (bsc#9999999)\n") + issues, entries = validator_with_trackers.validate_chlog_file(str(chlog_file)) + assert not issues, issues_to_str(issues, 0) + assert len(entries) == 1 + + issues = validator_with_trackers.validate_bsc(entries[0]) + assert len(issues) == 1, issues_to_str(issues, 1) + assert IssueType.BUG_NOT_AUTHORIZED.format("9999999") in str(issues[0]) + +def test_validate_bsc_wrong_product(validator_with_trackers, chlog_file): + chlog_file.write_text("- An entry with a BZ tracker for another product (bsc#2000000)\n") + issues, entries = validator_with_trackers.validate_chlog_file(str(chlog_file)) + assert not issues, issues_to_str(issues, 0) + assert len(entries) == 1 + + issues = validator_with_trackers.validate_bsc(entries[0]) + assert len(issues) == 1, issues_to_str(issues, 1) + assert IssueType.INVALID_PRODUCT.format("2000000") in str(issues[0]) diff --git a/.github/workflows/changelogs/trackers.xml b/.github/workflows/changelogs/trackers.xml new file mode 100644 index 000000000000..b25240e9471e --- /dev/null +++ b/.github/workflows/changelogs/trackers.xml @@ -0,0 +1,343 @@ + + + boost + trac + Boost Trac + https://svn.boost.org/trac/boost/ + https://svn.boost.org/trac/boost/ticket/@@@ + boost#(\d+) + + false + true + + + bco + bugzilla + Clutter Project Bugzilla + http://bugzilla.clutter-project.org/ + http://bugzilla.clutter-project.org/show_bug.cgi?id=@@@ + bco#(\d+) + + false + true + + + RT + other + CPAN Bugs + https://rt.cpan.org/ + http://rt.cpan.org/Public/Bug/Display.html?id=@@@ + RT#(\d+) + + false + true + + + deb + bugzilla + Debian Bugzilla + http://bugs.debian.org/ + http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=@@@ + deb#(\d+) + + false + true + + + fdo + bugzilla + Freedesktop.org Bugzilla + https://bugs.freedesktop.org/ + https://bugs.freedesktop.org/show_bug.cgi?id=@@@ + fdo#(\d+) + + false + true + + + GCC + bugzilla + GCC Bugzilla + http://gcc.gnu.org/bugzilla/ + http://gcc.gnu.org/bugzilla/show_bug.cgi?id=@@@ + GCC#(\d+) + + false + true + + + bgo + bugzilla + Gnome Bugzilla + https://bugzilla.gnome.org/ + https://bugzilla.gnome.org/show_bug.cgi?id=@@@ + bgo#(\d+) + + false + true + + + bio + bugzilla + Icculus.org Bugzilla + https://bugzilla.icculus.org/ + https://bugzilla.icculus.org/show_bug.cgi?id=@@@ + bio#(\d+) + + false + true + + + bko + bugzilla + Kernel.org Bugzilla + https://bugzilla.kernel.org/ + https://bugzilla.kernel.org/show_bug.cgi?id=@@@ + (?:Kernel|K|bko)#(\d+) + + false + true + + + kde + bugzilla + KDE Bugzilla + https://bugs.kde.org/ + https://bugs.kde.org/show_bug.cgi?id=@@@ + kde#(\d+) + + false + true + + + lp + launchpad + Launchpad.net Bugtracker + https://bugs.launchpad.net/bugs/ + https://bugs.launchpad.net/bugs/@@@ + b?lp#(\d+) + + false + true + + + Meego + bugzilla + Meego Bugs + https://bugs.meego.com/ + https://bugs.meego.com/show_bug.cgi?id=@@@ + Meego#(\d+) + + false + true + + + bmo + bugzilla + Mozilla Bugzilla + https://bugzilla.mozilla.org/ + https://bugzilla.mozilla.org/show_bug.cgi?id=@@@ + bmo#(\d+) + + false + true + + + bnc + bugzilla + openSUSE Bugzilla + https://apibugzilla.suse.com + https://bugzilla.opensuse.org/show_bug.cgi?id=@@@ + (?:bnc|BNC|bsc|BSC|boo|BOO)\s*[#:]\s*(\d+) + + true + true + + + ITS + other + OpenLDAP Issue Tracker + http://www.openldap.org/its/ + http://www.openldap.org/its/index.cgi/Contrib?id=@@@ + ITS#(\d+) + + false + true + + + i + bugzilla + OpenOffice.org Bugzilla + http://openoffice.org/bugzilla/ + http://openoffice.org/bugzilla/show_bug.cgi?id=@@@ + i#(\d+) + + false + true + + + fate + fate + openSUSE Feature Database + https://features.opensuse.org/ + https://features.opensuse.org/@@@ + (?:fate|Fate|FATE)\s*#\s*(\d+) + + false + true + + + rh + bugzilla + RedHat Bugzilla + https://bugzilla.redhat.com/ + https://bugzilla.redhat.com/show_bug.cgi?id=@@@ + rh#(\d+) + + false + true + + + bso + bugzilla + Samba Bugzilla + https://bugzilla.samba.org/ + https://bugzilla.samba.org/show_bug.cgi?id=@@@ + bso#(\d+) + + false + true + + + sf + sourceforge + SourceForge.net Tracker + http://sf.net/support/ + http://sf.net/support/tracker.php?aid=@@@ + sf#(\d+) + + false + true + + + Xamarin + bugzilla + Xamarin Bugzilla + http://bugzilla.xamarin.com/index.cgi + http://bugzilla.xamarin.com/show_bug.cgi?id=@@@ + (?:bxc|Xamarin)#(\d+) + + false + true + + + cve + cve + CVE Numbers + http://cve.mitre.org/ + http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-@@@ + (?:cve|CVE)-(\d\d\d\d-\d+) + + true + true + + + bxo + bugzilla + XFCE Bugzilla + https://bugzilla.xfce.org/ + https://bugzilla.xfce.org/show_bug.cgi?id=@@@ + bxo#(\d+) + + false + true + + + obs + github + OBS GitHub Issues + https://api.github.com/repos/openSUSE/open-build-service/issues + https://github.com/openSUSE/open-build-service/issues/@@@ + obs#(\d+) + + true + true + + + build + github + OBS build script Issues + https://api.github.com/repos/openSUSE/obs-build/issues + https://github.com/openSUSE/obs-build/issues/@@@ + build#(\d+) + + true + true + + + osc + github + OBS CLI Issues + https://api.github.com/repos/openSUSE/osc/issues + https://github.com/openSUSE/osc/issues/@@@ + osc#(\d+) + + true + true + + + poo + bugzilla + progress openSUSE issue + https://progress.opensuse.org/issues + https://progress.opensuse.org/issues/@@@ + poo#(\d+) + + false + true + + + lf + bugzilla + Linux Foundation Bugzilla + https://developerbugs.linuxfoundation.org + https://developerbugs.linuxfoundation.org/show_bug.cgi?id=@@@ + lf#(\d+) + + false + true + + + jsc + jira + SUSE Jira + https://jira.suse.com/ + https://jira.suse.com/browse/@@@ + ((?:OBS|SES|PM|PED|SCRD|SOC|SMO|SUMA|ECO|RMC|TEAM)-\d+|SLE-\d\d\d+) + + false + true + + + gh + github + Generic github tracker + https://www.github.com + https://github.com/@@@ + (?:gh|github)#([\w-]+\/[\w-]+#\d+) + + false + true + + + ijsc + jira + SUSE Jira Internal Tracker + https://jira.suse.com/ + https://jira.suse.com/browse/@@@ + ((?:MSC|PTF)-\d+) + + false + false + +