diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..47f06a1 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,12 @@ +version: 2 +updates: +- package-ecosystem: github-actions + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 10 +- package-ecosystem: pip + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 10 diff --git a/.github/linters/.markdown-lint.yml b/.github/linters/.markdown-lint.yml new file mode 100644 index 0000000..a9b3063 --- /dev/null +++ b/.github/linters/.markdown-lint.yml @@ -0,0 +1,21 @@ +########################### +########################### +## Markdown Linter rules ## +########################### +########################### + +# Linter rules doc: +# - https://github.com/DavidAnson/markdownlint +# +# Note: +# To comment out a single error: +# +# any violations you want +# +# + +############### +# Rules by id # +############### +MD013: false # Line length is usually not important and the 80 char limit is way too small anyway +MD033: false # Inline HTML is important for multilines and checkboxes within markdown tables diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml new file mode 100644 index 0000000..f14fa0a --- /dev/null +++ b/.github/workflows/linter.yml @@ -0,0 +1,61 @@ +--- +########################### +########################### +## Linter GitHub Actions ## +########################### +########################### +name: Linter + +# +# Documentation: +# https://help.github.com/en/articles/workflow-syntax-for-github-actions +# + +#################################################################### +# Start the job on all pull requests that target the master branch # +#################################################################### +on: + pull_request: + branches: [master] + +############### +# Set the Job # +############### +permissions: + contents: read + +jobs: + linter: + # Name the Job + permissions: + contents: read # for actions/checkout to fetch code + statuses: write # for github/super-linter to mark status of each linter run + name: Lint Code Base + # Set the agent to run on + runs-on: ubuntu-latest + + ################## + # Load all steps # + ################## + steps: + ########################## + # Checkout the code base # + ########################## + - name: Checkout Code + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + with: + # Full git history is needed to get a proper list of changed files within `super-linter` + fetch-depth: 0 + + ################################ + # Run Linter against code base # + ################################ + - name: Lint Code Base + uses: github/super-linter@45fc0d88288beee4701c62761281edfee85655d7 # v5.0.0 + env: + VALIDATE_ALL_CODEBASE: false + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # Markdown lint complains about the issue templates + FILTER_REGEX_EXCLUDE: .github/ISSUE_TEMPLATE/* + # Lots of shellcheck errors - need fixing + VALIDATE_BASH: false diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..e1e69ca --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,26 @@ +name: Test + +on: + pull_request: + branches: [ master ] + +permissions: + contents: read + +jobs: + test_python: + name: Python + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + + - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 + with: + python-version: '3' + + - name: Install Python Dependencies + run: pip install -r requirements.txt + + - name: Run Python Tests + run: python -m unittest test \ No newline at end of file diff --git a/.gitignore b/.gitignore index cce7839..8744b6a 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ workspace pipelines/.gradle pipelines/gradle-cache pipelines/target +__pycache__ \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..082a11b --- /dev/null +++ b/requirements.txt @@ -0,0 +1,2 @@ +GitPython==3.1.43 +tqdm==4.66.2 diff --git a/skaraMirror.md b/skaraMirror.md new file mode 100644 index 0000000..28b8493 --- /dev/null +++ b/skaraMirror.md @@ -0,0 +1,48 @@ +# OpenJDK Mirroring Script + +This script automates the process of mirroring OpenJDK repositories from GitHub to Adoptium. It is designed to clone specific JDK versions, add upstream Skara repositories, and perform merges as necessary to keep the Adoptium mirrors up to date with OpenJDK development. + +## Features + +- **Clone Repositories:** Clone OpenJDK repositories for specific JDK versions. +- **Add Skara Upstream:** Configure Skara repository as a remote upstream. +- **Merge Changes:** Merge changes from Skara into the GitHub repository and manage branch merges for release and development purposes. + +## Prerequisites + +Python 3.6 or higher +Ensure you have Git installed and configured on your system. + +## Installation + +Install the required Python dependencies: + +```bash + pip install -r requirements.txt +``` + +## Usage + +The script supports various operations based on command-line arguments: + +```bash +./skaraMirror.py [repo_url] [branch] +``` + +- ``: The JDK version to mirror (e.g., jdk8u, jdk17u). +- `[repo_url]`: (Optional) URL of the repository to mirror. Defaults to git@github.com:adoptium. +- `[branch]`: (Optional) Branch to mirror. Defaults to master. + +## Examples + +Mirror the JDK 17 repository: + +```bash +./skaraMirror.py jdk17u +``` + +Mirror the JDK 8 repository from a specific repository and branch: + +```bash +./skaraMirror.py jdk8u git@github.com:custom_org custom_branch +``` diff --git a/skaraMirror.py b/skaraMirror.py new file mode 100755 index 0000000..5c38ee9 --- /dev/null +++ b/skaraMirror.py @@ -0,0 +1,536 @@ +#!/usr/bin/env python3 + +import argparse +import logging +import os +import re +import subprocess + +from git import GitCommandError, RemoteProgress, Repo +from tqdm import tqdm + + +class CloneProgress(RemoteProgress): + def __init__(self): + super().__init__() + self.pbar = tqdm() + + def update(self, op_code, cur_count, max_count=None, message=""): + self.pbar.total = max_count + self.pbar.n = cur_count + self.pbar.refresh() + + +def check_args(): + parser = argparse.ArgumentParser( + description="Mirror OpenJDK GitHub repos to Adoptium." + ) + parser.add_argument( + "jdk_version", help="JDK version to mirror (e.g. jdk8u, jdk17u)", type=str + ) + parser.add_argument( + "repo_url", + nargs="?", + default="git@github.com:adoptium", + help="URL of the repository to mirror (optional)", + type=str, + ) + parser.add_argument( + "branch", + nargs="?", + default="master", + help="Branch to mirror (optional)", + type=str, + ) + + args = parser.parse_args() + return args + + +def clone_github_repo(jdk_version, repo_url, workspace): + """ + Clone the specified GitHub repository into the workspace. + """ + repo_name = jdk_version + local_repo_path = os.path.join(workspace, repo_name) + + # If we don't have a clone locally then clone it from adoptium/$repo_url.git + if not os.path.isdir(local_repo_path): + print(f"Cloning {repo_name} into {local_repo_path}...") + try: + Repo.clone_from(repo_url, local_repo_path, progress=CloneProgress()) + print(f"Repository {repo_name} cloned successfully.") + except GitCommandError as error: + print(f"Failed to clone repository: {error}") + exit(1) + else: + print(f"Repository {repo_name} already exists at {local_repo_path}.") + + +def add_skara_upstream(workspace, jdk_version, skara_repo, branch): + """ + Add the Skara repository as a remote and check out the specified branch. + """ + local_repo_path = os.path.join(workspace, jdk_version) + try: + # Open the existing repository + repo = Repo(local_repo_path) + + # Fetch origin + repo.remotes.origin.fetch() + + # Check out the specified branch + if branch in repo.heads: + # Branch exists locally, just check it out + repo.heads[branch].checkout() + # Reset the branch to match the upstream branch + print(f"Resetting {branch} to match upstream...") + try: + repo.git.reset("--hard", f"origin/{branch}") + except: + print("Not resetting as no upstream exists") + elif f"origin/{branch}" in repo.refs: + # Branch exists on remote, create it locally + repo.create_head(branch).checkout() + else: + # Branch does not exist in the remote repository + print( + f"Branch '{branch}' does not exist in the remote repository yet. Using Skara's default branch." + ) + repo.remotes.skara.fetch() + # Create master branch + repo.create_head(branch, f"skara/{branch}").checkout() + + # Check if the remote named 'skara' exists, add if not + if "skara" not in [remote.name for remote in repo.remotes]: + print(f"Initial setup of: {skara_repo}") + repo.create_remote("skara", skara_repo) + + except GitCommandError as error: + print(f"Git command failed: {error}") + exit(1) + except Exception as e: + print(f"An error occurred: {e}") + exit(1) + + +def perform_merge_from_skara_into_git(workspace, github_repo, branch): + """ + Merge the changes from the Skara repository into the GitHub repository. + """ + local_repo_path = os.path.join(workspace, github_repo) + + try: + # Open the existing repository + repo = Repo(local_repo_path) + + # Fetch from Skara remote + print("Fetching updates from Skara remote...") + repo.remotes.skara.fetch(**{"tags": True}) + + # Rebase the current branch with Skara's branch + print(f"Rebasing {branch} with Skara/{branch}...") + repo.git.rebase(f"skara/{branch}") + + # Push the changes to origin + print(f"Pushing {branch} to origin...") + repo.remotes.origin.push(branch, follow_tags=True, progress=CloneProgress()) + + except GitCommandError as error: + print(f"Git command failed: {error}") + exit(1) + except Exception as e: + print(f"An error occurred: {e}") + exit(1) + +def fetch_and_reset_repo(repo): + """ + Abort any ongoing merge and reset the repository, then fetch all tags. + """ + # Abort any ongoing merge and reset + repo.git.merge("--abort", with_exceptions=False) + repo.git.reset("--hard", with_exceptions=False) + + # Fetch all tags + for remote in repo.remotes: + print(f"Fetching latest from {remote}") + remote.fetch(**{"tags": True}) + + +def perform_merge_into_release_from_master(workspace, github_repo, branch): + """ + Merge master(New tagged builds only) into release branch as we build + off release branch at the Adoptium JDK Build farm for release builds + release branch contains patches that Adoptium JDK has beyond upstream OpenJDK tagged builds. + """ + local_repo_path = os.path.join(workspace, github_repo) + + try: + # Open the existing repository + repo = Repo(local_repo_path) + + fetch_and_reset_repo(repo) + + sorted_build_tags = fetch_and_sort_tags( + local_repo_path, github_repo, f"origin/{branch}" + ) + + # Check if release branch exits + if "release" not in repo.heads: + if repo.git.rev_parse("--verify", "origin/release", with_exceptions=False): + repo.git.checkout("-b", "release", "origin/release") + else: + # Get the currentBuildTag from sorted_tags + currentBuildTag = sorted_build_tags[-1] + print(f"Creating release branch from {currentBuildTag}") + repo.git.checkout("-b", "release", currentBuildTag) + else: + print("Release branch already exists. Resetting to origin/release...") + repo.heads["release"].checkout() + try: + repo.git.reset("--hard", "origin/release") + except: + print("Not resetting as no upstream exists") + + # Apply patches if required + apply_patches_if_needed(workspace, github_repo) + + # Find the latest and previous release tags that is not in releaseTagExcludeList + sortedReleaseTags = fetch_and_sort_tags(local_repo_path, github_repo, "release") + currentReleaseTag = "" + + for tag in sortedReleaseTags: + # Check if tag is in the env var releaseTagExcludeList, if so it can't be the current tag + if tag in os.getenv("releaseTagExcludeList", "").split(): + print(f"Skipping excluded tag {tag} from current list") + continue + + if tag.endswith("-b00") or tag.endswith("+0"): + print(f"Skipping fork point tag {tag} from current list") + continue + + currentReleaseTag = tag + + print(f"Current release build tag: {currentReleaseTag}") + + # Merge any new builds since current release build tag + foundCurrentReleaseTag = False + newAdoptTags = [] + + for tag in sorted_build_tags: + if not foundCurrentReleaseTag: + if tag == currentReleaseTag: + foundCurrentReleaseTag = True + + else: + # Check if tag is in the releaseTagExcludeList, if so do not bring it into the release branch + # and do not create an _adopt tag + if tag in os.getenv("releaseTagExcludeList", "").split(): + print(f"Skipping excluded tag {tag} from merge") + continue + if tag.endswith("-b00") or tag.endswith("+0"): + print(f"Skipping fork point tag {tag} from merge") + continue + + print(f"Merging build tag {tag} into release branch") + repo.git.merge( + "-m", f"Merging build tag {tag} into release branch", tag + ) + print(f"Tagging {tag} as {tag}_adopt") + adoptTag = f"{tag}_adopt" + repo.create_tag(adoptTag, ref=tag, message=f"Merged {tag} into release") + newAdoptTags.append(adoptTag) + + if repo.git.rev_parse( + "-q", "--verify", "origin/release", with_exceptions=False + ): + print(repo.git.log("--oneline", "origin/release..release")) + + # Find the latest and previous release tags that is not in releaseTagExcludeList + sortedReleaseTags = fetch_and_sort_tags(local_repo_path, github_repo, "release") + for tag in sortedReleaseTags: + # Check if tag is in the releaseTagExcludeList, if so it can't be the current tag + if tag in os.getenv("releaseTagExcludeList", "").split(): + print(f"Skipping excluded tag {tag} from current list") + continue + + if tag.endswith("-b00") or tag.endswith("+0"): + print(f"Skipping fork point tag {tag} from current list") + continue + + prevReleaseTag = currentReleaseTag + currentReleaseTag = tag + + print(f"New release build tag: {currentReleaseTag}") + repo.remotes.origin.push("release", follow_tags=True, progress=CloneProgress()) + + # Check if the last two build tags are the same commit, and ensure we have tagged both _adopt tags + if prevReleaseTag: + prevCommit = repo.git.rev_list("-n", "1", prevReleaseTag) + currentCommit = repo.git.rev_list("-n", "1", currentReleaseTag) + + if prevCommit == currentCommit: + print( + f"Current build tag commit is same as previous build tag commit: {prevReleaseTag} == {currentReleaseTag}" + ) + prevReleaseAdoptTag = f"{prevReleaseTag}_adopt" + currentReleaseAdoptTag = f"{currentReleaseTag}_adopt" + + if repo.git.tag("-l", prevReleaseAdoptTag): + if not repo.git.tag("-l", currentReleaseAdoptTag): + print( + f"Tagging new current release tag {currentReleaseAdoptTag} which is same commit as the previous {prevReleaseAdoptTag}" + ) + repo.create_tag( + currentReleaseAdoptTag, + ref=currentReleaseTag, + message=f"Merged {currentReleaseTag} into release", + ) + newAdoptTags.append(currentReleaseAdoptTag) + + # Ensure all new _adopt tags are pushed in case no new commits were pushed, eg.multiple tags on same commit + for tag in newAdoptTags: + print(f"Pushing new tag: {tag}") + repo.remotes.origin.push(tag) + + print("Merging complete.") + + except GitCommandError as error: + print(f"Git command failed: {error}") + exit(1) + except Exception as e: + print(f"An error occurred: {e}") + exit(1) + + +def perform_merge_into_dev_from_master(workspace, github_repo, branch): + """ + Merge master(HEAD) into dev as we build off dev at the Adoptium JDK Build farm for Nightlies + dev contains patches that Adoptium JDK has beyond upstream OpenJDK + """ + local_repo_path = os.path.join(workspace, github_repo) + + try: + # Open the existing repository + repo = Repo(local_repo_path) + + fetch_and_reset_repo(repo) + + # Check if dev branch exits + if "dev" not in repo.heads: + if repo.git.rev_parse("--verify", "origin/dev", with_exceptions=False): + repo.create_head("dev", "origin/dev") + else: + repo.create_head("dev", f"origin/{branch}") + else: + print("dev branch already exists. Resetting to origin/dev...") + # Checkout the dev branch + release_branch = repo.heads["dev"].checkout() + repo.git.reset("--hard", "origin/dev") + + # Checkout the dev branch + release_branch = repo.heads["dev"] + release_branch.checkout() + + sorted_tags = fetch_and_sort_tags(local_repo_path, github_repo, "dev") + currentDevTag = sorted_tags[-1] + print(f"Current dev tag: {currentDevTag}") + + # Merge master "HEAD" + print(f"Merging origin/{branch} HEAD into dev branch") + repo.git.merge( + "-m", f"Merging origin/{branch} HEAD into dev", f"origin/{branch}" + ) + + # Merge latest patches from "release" branch + print("Merging latest patches from release branch") + repo.git.merge( + "-m", "Merging latest patches from release branch", "origin/release" + ) + + if repo.git.rev_parse("-q", "--verify", "origin/dev", with_exceptions=False): + print(repo.git.log("--oneline", "origin/dev..dev")) + + sorted_tags = fetch_and_sort_tags(local_repo_path, github_repo, "dev") + currentDevTag = sorted_tags[-1] + print(f"New dev tag: {currentDevTag}") + + # Push the changes to origin/dev + repo.remotes.origin.push("dev", follow_tags=True, progress=CloneProgress()) + + except GitCommandError as error: + print(f"Git command failed: {error}") + exit(1) + except Exception as e: + print(f"An error occurred: {e}") + exit(1) + + +def apply_patches_if_needed(workspace, github_repo): + """ + Apply patches if the repository meets certain conditions. + """ + print(f"Checking if patches need to be applied for {github_repo}") + + # actions ignore branch patch is for > jdk11u + if github_repo not in ["jdk8u", "aarch32-port-jdk8u", "jdk11u"]: + main_workflow_file = os.path.join( + workspace, github_repo, ".github", "workflows", "main.yml" + ) + + # Check if the file exists and the patch hasn't been applied yet + if os.path.exists(main_workflow_file) and not is_patch_applied( + main_workflow_file, "- dev" + ): + patch_file = os.path.join( + os.getcwd(), "patches", "actions-ignore-branches.patch" + ) + apply_patch(patch_file, os.path.join(workspace, github_repo)) + + # README.JAVASE patch needed for all repos + readme_file = os.path.join(workspace, github_repo, "README.JAVASE") + if not os.path.exists(readme_file): + patch_file = os.path.join(os.getcwd(), "patches", "readme-javase.patch") + apply_patch(patch_file, os.path.join(workspace, github_repo)) + + +def is_patch_applied(file_path, search_string): + """ + Checks if a search string is present in a file. + """ + with open(file_path, "r") as file: + if search_string in file.read(): + return True + return False + + +def apply_patch(patch_file, repo_path): + """ + Apply a specified patch file to the repository. + """ + if os.path.exists(patch_file): + try: + print(f"Applying patch: {patch_file}") + subprocess.run(["git", "am", patch_file], check=True, cwd=repo_path) + print("Patch applied successfully.") + except subprocess.CalledProcessError as e: + print(f"Failed to apply patch {patch_file}: {e}") + subprocess.run(["git", "am", "--abort"], cwd=repo_path) + exit(1) + else: + print(f"Patch file not found: {patch_file}") + + +def sort_jdk11plus_tags(tags): + """ + JDK11+ tag sorting: + We use sort and tail to choose the latest tag in case more than one refers the same commit. + Versions tags are formatted: jdk-V[.W[.X[.P]]]+B; with V, W, X, P, B being numeric. + Transform "-" to "." in tag so we can sort as: "jdk.V[.W[.X[.P]]]+B" + Transform "+" to ".0.+" during the sort so that .P (patch) is defaulted to "0" for those + that don't have one, and the trailing "." to terminate the 5th field from the +. + """ + # Filter out tags with '_adopt' in their name + tags = [tag for tag in tags if "_adopt" not in tag] + + # Preprocess tags for sorting + # Replace 'jdk-' with 'jdk.' for consistency in splitting + # Add a pseudo patch number '.0.0' before '+' to ensure proper sorting of tags without a patch number + tags = ["jdk." + tag[4:].replace("+", ".0.0+") for tag in tags] + + def tag_sort_key(tag): + # Split the tag into components for sorting + # First, handle the version part, splitting by '.' and converting numbers to integers + parts = tag.split(".") + version_parts = [ + int(part) if part.isdigit() else part for part in parts[:-1] + ] # Exclude the build part for now + + # Extract and process the build number, which follows the last '+' symbol + # Replace the pseudo '.99999+' used for sorting with a large number to ensure it sorts correctly + build_part = parts[-1].split("+") + build_number = ( + int(build_part[1]) if len(build_part) > 1 and build_part[1].isdigit() else 0 + ) + + # Combine version parts and build number into one tuple for sorting + return (*version_parts, build_number) + + # Sort the tags using the defined key + tags.sort(key=tag_sort_key) + + # Post-process sorted tags to revert changes made for sorting + # This involves replacing '.0.0+' back to '+', and 'jdk.' to 'jdk-' + sorted_tags = [tag.replace(".0.0+", "+").replace("jdk.", "jdk-") for tag in tags] + + return sorted_tags + +def sort_jdk8_tags(tags): + """ + JDK8 tag sorting: + We use sort and tail to choose the latest tag in case more than one refers the same commit. + Versions tags are formatted: jdkVu[.W]-bB; with V, W, B being numeric. + """ + # Filter out tags with '_adopt' in their name + tags = [tag for tag in tags if "_adopt" not in tag] + + # First, sort on build number (B): + tags = sorted(tags, key=lambda x: int(x.split("-b")[1])) + + # Add a number to the beginning of each tag for sorting + tags = [f"{i:02d} {tag}" for i, tag in enumerate(tags, 1)] + + # # Second, (stable) sort on (V), (W) + tags = sorted(tags, key=lambda x: int(x.split("jdk8u")[1].split("-b")[0])) + + # Remove the number from the beginning of each tag + tags = [tag.split(" ", 1)[1] for tag in tags] + + return tags + +def fetch_and_sort_tags(repo_path, version, branch): + """ + Fetch tags from the repository and sort them according to the given command. + """ + # convert version such as jdk22u to 22 + version = re.search(r"\d+", version).group() + repo = Repo(repo_path) + if version == "8": + tag_search_cmd = f"jdk{version}*-*" + else: + tag_search_cmd = f"jdk-{version}*+*" + + tags = repo.git.tag("--merged", branch, tag_search_cmd).split("\n") + + if version == "8": + sorted_tags = sort_jdk8_tags(tags) + else: + sorted_tags = sort_jdk11plus_tags(tags) + + return sorted_tags + + +def main(): + # Parse command line arguments + args = check_args() + + # Set up logging + logging.basicConfig(level=logging.INFO) + + # If WORKSPACE env var is set, use it, otherwise use the current directory/workspace + workspace = os.getenv("WORKSPACE", os.path.join(os.getcwd(), "workspace")) + + skara_repo = f"https://github.com/openjdk/{args.jdk_version}" + + git_repo = f"{args.repo_url}/{args.jdk_version}" + + # Perform operations + clone_github_repo(args.jdk_version, git_repo, workspace) + add_skara_upstream(workspace, args.jdk_version, skara_repo, args.branch) + perform_merge_from_skara_into_git(workspace, args.jdk_version, args.branch) + perform_merge_into_release_from_master(workspace, args.jdk_version, args.branch) + perform_merge_into_dev_from_master(workspace, args.jdk_version, args.branch) + + +if __name__ == "__main__": + main() diff --git a/test.py b/test.py new file mode 100644 index 0000000..a13d82c --- /dev/null +++ b/test.py @@ -0,0 +1,122 @@ +import unittest +from unittest.mock import patch + +from skaraMirror import ( + check_args, + fetch_and_sort_tags, + sort_jdk8_tags, + sort_jdk11plus_tags, +) + + +class TestCheckArgs(unittest.TestCase): + @patch("sys.argv", ["script_name", "jdk8u", "https://example.com/repo", "dev"]) + def test_with_full_arguments(self): + args = check_args() + self.assertEqual(args.jdk_version, "jdk8u") + self.assertEqual(args.repo_url, "https://example.com/repo") + self.assertEqual(args.branch, "dev") + + @patch("sys.argv", ["script_name", "jdk11u"]) + def test_with_minimum_arguments(self): + args = check_args() + self.assertEqual(args.jdk_version, "jdk11u") + self.assertEqual(args.repo_url, "git@github.com:adoptium") + self.assertEqual(args.branch, "master") + + +class TestFetchAndSortTags(unittest.TestCase): + @patch( + "skaraMirror.Repo" + ) # Adjust the patch path according to your script's structure + def test_fetch_and_sort_tags(self, mock_repo): + # Mock git.tag() to return a list of tags + mock_repo.return_value.git.tag.return_value = ( + "jdk-11.0.1+10\njdk-11.0.2+9\njdk-11.0.2+10" + ) + + # Assuming your sorting logic is correct and tested separately, + # the expected result should reflect the sorted tags + expected_sorted_tags = ["jdk-11.0.1+10", "jdk-11.0.2+9", "jdk-11.0.2+10"] + + # Adjust arguments as necessary for your function's parameters + sorted_tags = fetch_and_sort_tags("/fake/repo/path", "jdk11", "master") + + self.assertEqual(sorted_tags, expected_sorted_tags) + + @patch("skaraMirror.Repo") + def test_adopt_tags_excluded(self, mock_repo): + # Mocking response with Adopt tags + mock_repo.return_value.git.tag.return_value = "jdk-11.0.1+10\njdk-11.0.2_adopt" + + # Expected result with Adopt tags excluded + expected_result = ["jdk-11.0.1+10"] + result = fetch_and_sort_tags("/fake/repo/path", "jdk11", "master") + self.assertEqual(result, expected_result) + + @patch("skaraMirror.Repo") + def test_different_jdk_versions(self, mock_repo): + # Mocking different responses based on JDK version + mock_repo.return_value.git.tag.side_effect = [ + "jdk8u412-b07\njdk8u412-b06\njdk8u122-b11\njdk8u122-b11_adopt", # JDK 8 tags + "jdk-11.0.1+10\njdk-11.0.2+9", # JDK 11 tags + ] + + # JDK 8 expected result + expected_jdk8 = ["jdk8u122-b11", "jdk8u412-b06", "jdk8u412-b07"] + result_jdk8 = fetch_and_sort_tags("/fake/repo/path", "jdk8", "master") + self.assertEqual(result_jdk8, expected_jdk8) + + # JDK 11 expected result + expected_jdk11 = ["jdk-11.0.1+10", "jdk-11.0.2+9"] + result_jdk11 = fetch_and_sort_tags("/fake/repo/path", "jdk11", "master") + self.assertEqual(result_jdk11, expected_jdk11) + + +class TestSortJDK11PlusTags(unittest.TestCase): + def test_sorting_basic(self): + # Test basic sorting with mixed versions and build numbers + tags = ["jdk-11.0.2+9", "jdk-11.0.10+3", "jdk-11.0.1+13"] + expected_sorted_tags = ["jdk-11.0.1+13", "jdk-11.0.2+9", "jdk-11.0.10+3"] + self.assertEqual(sort_jdk11plus_tags(tags), expected_sorted_tags) + + def test_sorting_with_patch_numbers(self): + # Test sorting with patch numbers + tags = ["jdk-11.0.2+9", "jdk-11.0.2.1+9", "jdk-11.0.2+10"] + expected_sorted_tags = ["jdk-11.0.2+9", "jdk-11.0.2+10", "jdk-11.0.2.1+9"] + self.assertEqual(sort_jdk11plus_tags(tags), expected_sorted_tags) + + def test_sorting_with_mixed_versions(self): + # Test sorting with completely mixed versions and builds + tags = ["jdk-12.0.1+12", "jdk-11.0.2+9", "jdk-13+33", "jdk-11.0.1+13"] + expected_sorted_tags = [ + "jdk-11.0.1+13", + "jdk-11.0.2+9", + "jdk-12.0.1+12", + "jdk-13+33", + ] + self.assertEqual(sort_jdk11plus_tags(tags), expected_sorted_tags) + + def test_sorting_with_adopt_versions(self): + # Test sorting with GA versions + tags = ["jdk-11.0.2", "jdk-11.0.2+9", "jdk-11.0.2_adopt"] + expected_sorted_tags = ["jdk-11.0.2", "jdk-11.0.2+9"] + self.assertEqual(sort_jdk11plus_tags(tags), expected_sorted_tags) + + +class TestSortJDK8Tags(unittest.TestCase): + def test_sorting_basic(self): + # Test basic sorting with mixed versions and build numbers + tags = ["jdk8u122-b11", "jdk8u412-b06", "jdk8u412-b07"] + expected_sorted_tags = ["jdk8u122-b11", "jdk8u412-b06", "jdk8u412-b07"] + self.assertEqual(sort_jdk8_tags(tags), expected_sorted_tags) + + def test_sorting_with_adopt_versions(self): + # Test sorting with Adopt versions + tags = ["jdk8u122-b11", "jdk8u412-b06", "jdk8u412-b07_adopt"] + expected_sorted_tags = ["jdk8u122-b11", "jdk8u412-b06"] + self.assertEqual(sort_jdk8_tags(tags), expected_sorted_tags) + + +if __name__ == "__main__": + unittest.main()