Compare commits
1 Commits
t-staging-
...
obs-groups
| Author | SHA256 | Date | |
|---|---|---|---|
|
2ff8784e40
|
10
Makefile
10
Makefile
@@ -1,8 +1,4 @@
|
||||
MODULES := devel-importer utils/hujson utils/maintainer-update gitea-events-rabbitmq-publisher gitea_status_proxy group-review obs-forward-bot obs-staging-bot obs-status-service workflow-direct workflow-pr
|
||||
MODULES := devel-importer utils/hujson utils/maintainer-update gitea-events-rabbitmq-publisher gitea_status_proxy group-review obs-forward-bot obs-groups-bot obs-staging-bot obs-status-service workflow-direct workflow-pr
|
||||
|
||||
.PHONY: build $(MODULES)
|
||||
|
||||
build: $(MODULES)
|
||||
|
||||
$(MODULES):
|
||||
go build -C $@ -buildmode=pie
|
||||
build:
|
||||
for m in $(MODULES); do go build -C $$m -buildmode=pie || exit 1 ; done
|
||||
|
||||
@@ -65,12 +65,6 @@ class GiteaAPIClient:
|
||||
return None
|
||||
raise
|
||||
|
||||
def get_submodule_sha(self, owner: str, repo: str, submodule_path: str, ref: str = "main"):
|
||||
info = self.get_file_info(owner, repo, submodule_path, branch=ref)
|
||||
if info and info.get("type") == "submodule":
|
||||
return info.get("sha")
|
||||
return None
|
||||
|
||||
def create_user(self, username, password, email):
|
||||
vprint(f"--- Creating user: {username} ---")
|
||||
data = {
|
||||
@@ -531,14 +525,6 @@ index 00000000..{pkg_b_sha}
|
||||
|
||||
return review
|
||||
|
||||
def request_reviewers(self, repo_full_name: str, pr_number: int, reviewers: list):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
url = f"repos/{owner}/{repo}/pulls/{pr_number}/requested_reviewers"
|
||||
data = {"reviewers": reviewers}
|
||||
vprint(f"--- Requesting reviewers for {repo_full_name} PR #{pr_number}: {reviewers} ---")
|
||||
response, duration = self._request("POST", url, json=data)
|
||||
return response.json()
|
||||
|
||||
def list_reviews(self, repo_full_name: str, pr_number: int):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
url = f"repos/{owner}/{repo}/pulls/{pr_number}/reviews"
|
||||
|
||||
@@ -1,521 +0,0 @@
|
||||
import pytest
|
||||
import time
|
||||
import re
|
||||
from tests.lib.common_test_utils import GiteaAPIClient, vprint
|
||||
|
||||
# Shared methods
|
||||
|
||||
def wait_for_staging_bot_reviewer(gitea_env, prj_pr_number, timeout=30):
|
||||
"""Wait for staging-bot to be added as a reviewer."""
|
||||
for _ in range(timeout):
|
||||
time.sleep(1)
|
||||
pr_details = gitea_env.get_pr_details("myproducts/mySLFO", prj_pr_number)
|
||||
if any(
|
||||
r.get("login") == "autogits_obs_staging_bot"
|
||||
for r in pr_details.get("requested_reviewers", [])
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def wait_for_comment(gitea_env, repo_full_name, pr_number, text_to_find, subtext=None, timeout=60):
|
||||
"""Wait for a specific comment in the PR timeline."""
|
||||
vprint(f"Waiting for comment '{text_to_find}'" + (f" with subtext '{subtext}'" if subtext else "") + f" in {repo_full_name} PR #{pr_number}...")
|
||||
for _ in range(timeout):
|
||||
time.sleep(1)
|
||||
try:
|
||||
events = gitea_env.get_timeline_events(repo_full_name, pr_number)
|
||||
except Exception:
|
||||
continue
|
||||
for event in events:
|
||||
body = event.get("body", "")
|
||||
if not body:
|
||||
continue
|
||||
if subtext:
|
||||
if text_to_find in body and subtext in body:
|
||||
return True
|
||||
else:
|
||||
if text_to_find == body.strip():
|
||||
return True
|
||||
return False
|
||||
|
||||
def setup_obs_mock(httpserver, project_name, build_result_handler):
|
||||
"""Setup OBS mock handlers."""
|
||||
def general_project_meta_handler(request):
|
||||
project = request.path.split("/")[2]
|
||||
return f'<project name="{project}"><scmsync>http://gitea-test:3000/myproducts/mySLFO.git</scmsync></project>'
|
||||
|
||||
httpserver.clear()
|
||||
httpserver.expect_request(re.compile(r"/source/[^/]+/_meta$"), method="GET").respond_with_handler(general_project_meta_handler)
|
||||
httpserver.expect_request(re.compile(f"/build/{project_name}/_result"), method="GET").respond_with_handler(build_result_handler)
|
||||
httpserver.expect_request(re.compile(r"/source/[^/]+/_meta$"), method="PUT").respond_with_data("OK")
|
||||
httpserver.expect_request(re.compile(r"/source/[^/]+$"), method="DELETE").respond_with_data("OK")
|
||||
|
||||
def create_build_result_xml(project_name, repo_arch_package_status):
|
||||
"""Create the XML response for OBS build results."""
|
||||
results_xml = ""
|
||||
# Group statuses by (repo, arch)
|
||||
grouped = {}
|
||||
for (repo, arch, package), (repo_code, pkg_code) in repo_arch_package_status.items():
|
||||
if (repo, arch) not in grouped:
|
||||
grouped[(repo, arch)] = {"repo_code": repo_code, "packages": []}
|
||||
grouped[(repo, arch)]["packages"].append((package, pkg_code))
|
||||
|
||||
for (repo, arch), data in grouped.items():
|
||||
repo_code = data["repo_code"]
|
||||
pkg_statuses = "".join([f'\n <status package="{p}" code="{c}"/>' for p, c in data["packages"]])
|
||||
results_xml += f"""
|
||||
<result project="{project_name}" repository="{repo}" arch="{arch}" code="{repo_code}" state="{repo_code}">{pkg_statuses}
|
||||
</result>"""
|
||||
return f'<resultlist state="mock">{results_xml}\n</resultlist>'
|
||||
|
||||
def create_submodule_diff(submodule_name, old_sha, new_sha):
|
||||
"""Create a git diff string for updating a submodule commit ID."""
|
||||
return f"""diff --git a/{submodule_name} b/{submodule_name}
|
||||
index {old_sha[:7]}..{new_sha[:7]} 160000
|
||||
--- a/{submodule_name}
|
||||
+++ b/{submodule_name}
|
||||
@@ -1 +1 @@
|
||||
-Subproject commit {old_sha}
|
||||
+Subproject commit {new_sha}
|
||||
"""
|
||||
|
||||
@pytest.mark.t001
|
||||
def test_001_build_on_all_archs_success(staging_main_env, httpserver):
|
||||
gitea_env, test_full_repo_name, merge_branch_name = staging_main_env
|
||||
|
||||
# 1. Create a package PR.
|
||||
diff = "diff --git a/test_br.txt b/test_br.txt\nnew file mode 100644\nindex 00000000..473a0f4c\n"
|
||||
pr = gitea_env.create_gitea_pr("mypool/pkgA", diff, "Test BR PR", False, base_branch=merge_branch_name)
|
||||
pkg_pr_number = pr["number"]
|
||||
|
||||
# 2. Wait for the workflow-pr bot to create the related project PR.
|
||||
prj_pr_number = gitea_env.wait_for_project_pr("mypool/pkgA", pkg_pr_number)
|
||||
assert prj_pr_number is not None, "Workflow bot did not create a project PR."
|
||||
|
||||
# 3. Wait for staging-bot to be added as a reviewer.
|
||||
assert wait_for_staging_bot_reviewer(gitea_env, prj_pr_number), "Staging bot was not added as a reviewer."
|
||||
|
||||
# 4. Mock the OBS result list.
|
||||
project_name = f"openSUSE:Leap:16.0:PullRequest:{prj_pr_number}"
|
||||
repos = ["repo1", "repo2"]
|
||||
archs = ["x86_64", "aarch64"]
|
||||
repo_arch_package_status = {}
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
repo_arch_package_status[(r, a, "pkgA")] = ("building", "building")
|
||||
|
||||
def build_result_handler(request):
|
||||
return create_build_result_xml(project_name, repo_arch_package_status)
|
||||
|
||||
setup_obs_mock(httpserver, project_name, build_result_handler)
|
||||
|
||||
# Expected Result 1: "Build is started in..."
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build is started in", project_name), "Staging bot did not post 'Build is started' comment."
|
||||
|
||||
# 6. Transition all to success.
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
repo_arch_package_status[(r, a, "pkgA")] = ("published", "succeeded")
|
||||
|
||||
# Expected Result 2: "Build successful" on project PR.
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build successful"), "Staging bot did not post 'Build successful' comment on project PR."
|
||||
|
||||
# Expected Result 3: "Build successful..." on package PR.
|
||||
assert wait_for_comment(gitea_env, "mypool/pkgA", pkg_pr_number, "Build successful, for more information go in", project_name), "Staging bot did not post 'Build successful' comment on package PR."
|
||||
|
||||
@pytest.mark.t002
|
||||
def test_002_build_on_all_archs_mix(staging_main_env, httpserver):
|
||||
gitea_env, test_full_repo_name, merge_branch_name = staging_main_env
|
||||
|
||||
# 1. Create a package PR.
|
||||
diff = "diff --git a/test_br_mix.txt b/test_br_mix.txt\nnew file mode 100644\nindex 00000000..473a0f4c\n"
|
||||
pr = gitea_env.create_gitea_pr("mypool/pkgA", diff, "Test BR Mix PR", False, base_branch=merge_branch_name)
|
||||
pkg_pr_number = pr["number"]
|
||||
|
||||
# 2. Wait for the workflow-pr bot to create the related project PR.
|
||||
prj_pr_number = gitea_env.wait_for_project_pr("mypool/pkgA", pkg_pr_number)
|
||||
assert prj_pr_number is not None, "Workflow bot did not create a project PR."
|
||||
|
||||
# 3. Wait for staging-bot to be added as a reviewer.
|
||||
assert wait_for_staging_bot_reviewer(gitea_env, prj_pr_number), "Staging bot was not added as a reviewer."
|
||||
|
||||
# 4. Mock the OBS result list with 2 repositories and 2 architectures.
|
||||
project_name = f"openSUSE:Leap:16.0:PullRequest:{prj_pr_number}"
|
||||
repos = ["repo1", "repo2"]
|
||||
archs = ["x86_64", "aarch64"]
|
||||
repo_arch_package_status = {}
|
||||
|
||||
# 5. Set all repository, architecture, and package statuses to "in progress" (e.g., code="building").
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
repo_arch_package_status[(r, a, "pkgA")] = ("building", "building")
|
||||
|
||||
def build_result_handler(request):
|
||||
return create_build_result_xml(project_name, repo_arch_package_status)
|
||||
|
||||
setup_obs_mock(httpserver, project_name, build_result_handler)
|
||||
|
||||
# Expected Result 1: "Build is started in..."
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build is started in", project_name), "Staging bot did not post 'Build is started' comment."
|
||||
|
||||
# 6. Transition the first repository (all architectures) to "finished" mode and set its package statuses to "success" (e.g., code="succeeded").
|
||||
for a in archs:
|
||||
repo_arch_package_status[("repo1", a, "pkgA")] = ("published", "succeeded")
|
||||
|
||||
# 7. Transition the second repository (all architectures) to "finished" mode and set its package statuses to "failed" (e.g., code="failed").
|
||||
for a in archs:
|
||||
repo_arch_package_status[("repo2", a, "pkgA")] = ("finished", "failed")
|
||||
|
||||
# Expected Result 2: "Build failed" on project PR.
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build failed"), "Staging bot did not post 'Build failed' comment on project PR."
|
||||
|
||||
# Expected Result 3: "Build failed, for more information go in..." on package PR.
|
||||
assert wait_for_comment(gitea_env, "mypool/pkgA", pkg_pr_number, "Build failed, for more information go in", project_name), "Staging bot did not post 'Build failed' comment on package PR."
|
||||
|
||||
@pytest.mark.t003
|
||||
def test_003_build_on_some_archs_failed(staging_main_env, httpserver):
|
||||
gitea_env, test_full_repo_name, merge_branch_name = staging_main_env
|
||||
|
||||
# 1. Create a package PR.
|
||||
diff = "diff --git a/test_br_some_fail.txt b/test_br_some_fail.txt\nnew file mode 100644\nindex 00000000..473a0f4c\n"
|
||||
pr = gitea_env.create_gitea_pr("mypool/pkgA", diff, "Test BR Some Fail PR", False, base_branch=merge_branch_name)
|
||||
pkg_pr_number = pr["number"]
|
||||
|
||||
# 2. Wait for the workflow-pr bot to create the related project PR.
|
||||
prj_pr_number = gitea_env.wait_for_project_pr("mypool/pkgA", pkg_pr_number)
|
||||
assert prj_pr_number is not None, "Workflow bot did not create a project PR."
|
||||
|
||||
# 3. Wait for staging-bot to be added as a reviewer.
|
||||
assert wait_for_staging_bot_reviewer(gitea_env, prj_pr_number), "Staging bot was not added as a reviewer."
|
||||
|
||||
# 4. Mock the OBS result list with 2 repositories and 2 architectures.
|
||||
project_name = f"openSUSE:Leap:16.0:PullRequest:{prj_pr_number}"
|
||||
repos = ["repo1", "repo2"]
|
||||
archs = ["x86_64", "aarch64"]
|
||||
repo_arch_package_status = {}
|
||||
|
||||
# 5. Set all repository, architecture, and package statuses to "in progress" (e.g., code="building").
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
repo_arch_package_status[(r, a, "pkgA")] = ("building", "building")
|
||||
|
||||
def build_result_handler(request):
|
||||
return create_build_result_xml(project_name, repo_arch_package_status)
|
||||
|
||||
setup_obs_mock(httpserver, project_name, build_result_handler)
|
||||
|
||||
# Expected Result 1: "Build is started in..."
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build is started in", project_name), "Staging bot did not post 'Build is started' comment."
|
||||
|
||||
# 6. Transition the first repository (all architectures) to "finished" mode and set its package statuses to "success" (e.g., code="succeeded").
|
||||
for a in archs:
|
||||
repo_arch_package_status[("repo1", a, "pkgA")] = ("published", "succeeded")
|
||||
|
||||
# 7. Transition the second repository to "finished" mode, setting the first architecture to "failed" (code="failed") and the second architecture to "success" (code="succeeded").
|
||||
repo_arch_package_status[("repo2", archs[0], "pkgA")] = ("finished", "failed")
|
||||
repo_arch_package_status[("repo2", archs[1], "pkgA")] = ("finished", "succeeded")
|
||||
|
||||
# Expected Result 2: "Build failed" on project PR.
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build failed"), "Staging bot did not post 'Build failed' comment on project PR."
|
||||
|
||||
# Expected Result 3: "Build failed, for more information go in..." on package PR.
|
||||
assert wait_for_comment(gitea_env, "mypool/pkgA", pkg_pr_number, "Build failed, for more information go in", project_name), "Staging bot did not post 'Build failed' comment on package PR."
|
||||
|
||||
@pytest.mark.t004
|
||||
def test_004_build_multiple_packages_success(staging_main_env, httpserver):
|
||||
gitea_env, test_full_repo_name, merge_branch_name = staging_main_env
|
||||
|
||||
# 1. Create package PRs.
|
||||
diff = "diff --git a/test_br_multi_pkgA.txt b/test_br_multi_pkgA.txt\nnew file mode 100644\nindex 00000000..473a0f4c\n"
|
||||
pr1 = gitea_env.create_gitea_pr("mypool/pkgA", diff, "Test BR Multi PR A", False, base_branch=merge_branch_name)
|
||||
pkg_pr_number1 = pr1["number"]
|
||||
pkg_head_sha1 = pr1["head"]["sha"]
|
||||
|
||||
diff2 = "diff --git a/test_br_multi_pkgB.txt b/test_br_multi_pkgB.txt\nnew file mode 100644\nindex 00000000..473a0f4c\n"
|
||||
pr2 = gitea_env.create_gitea_pr("mypool/pkgB", diff2, "Test BR Multi PR B", False, base_branch=merge_branch_name)
|
||||
pkg_pr_number2 = pr2["number"]
|
||||
pkg_head_sha2 = pr2["head"]["sha"]
|
||||
|
||||
# 2. Create a project PR mentioning both packages in description and UPDATING SUBMODULES.
|
||||
old_sha1 = gitea_env.get_submodule_sha("myproducts", "mySLFO", "pkgA", ref=merge_branch_name)
|
||||
old_sha2 = gitea_env.get_submodule_sha("myproducts", "mySLFO", "pkgB", ref=merge_branch_name)
|
||||
|
||||
prj_diff = create_submodule_diff("pkgA", old_sha1, pkg_head_sha1)
|
||||
prj_diff += create_submodule_diff("pkgB", old_sha2, pkg_head_sha2)
|
||||
|
||||
body = f"PR: mypool/pkgA!{pkg_pr_number1}\nPR: mypool/pkgB!{pkg_pr_number2}"
|
||||
prj_pr = gitea_env.create_gitea_pr("myproducts/mySLFO", prj_diff, "Test Project PR Multi", False, base_branch=merge_branch_name, body=body)
|
||||
prj_pr_number = prj_pr["number"]
|
||||
|
||||
# 3. Add staging_bot as a reviewer.
|
||||
gitea_env.request_reviewers("myproducts/mySLFO", prj_pr_number, ["autogits_obs_staging_bot"])
|
||||
|
||||
# Wait for staging-bot to be added (verification)
|
||||
assert wait_for_staging_bot_reviewer(gitea_env, prj_pr_number), "Staging bot was not added as a reviewer."
|
||||
|
||||
# 4. Mock the OBS result list with 2 repositories and 2 architectures for each package.
|
||||
project_name = f"openSUSE:Leap:16.0:PullRequest:{prj_pr_number}"
|
||||
repos = ["repo1", "repo2"]
|
||||
archs = ["x86_64", "aarch64"]
|
||||
packages = ["pkgA", "pkgB"]
|
||||
repo_arch_package_status = {}
|
||||
|
||||
# 5. Set all repository, architecture, and package statuses to "in progress" (e.g., code="building").
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
for p in packages:
|
||||
repo_arch_package_status[(r, a, p)] = ("building", "building")
|
||||
|
||||
def build_result_handler(request):
|
||||
return create_build_result_xml(project_name, repo_arch_package_status)
|
||||
|
||||
setup_obs_mock(httpserver, project_name, build_result_handler)
|
||||
|
||||
# Expected Result 1: "Build is started in..."
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build is started in", project_name), "Staging bot did not post 'Build is started' comment."
|
||||
|
||||
# 6. Transition all repositories and architectures for the first package to "finished" mode and set its package statuses to "success" (e.g., code="succeeded").
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
repo_arch_package_status[(r, a, "pkgA")] = ("published", "succeeded")
|
||||
|
||||
# Verify "Build successful" is NOT yet posted
|
||||
time.sleep(5)
|
||||
events = gitea_env.get_timeline_events("myproducts/mySLFO", prj_pr_number)
|
||||
assert not any(e.get("body") == "Build successful" for e in events), "Build successful posted prematurely."
|
||||
|
||||
# 7. Transition all repositories and architectures for the second package to "finished" mode and set its package statuses to "success" (e.g., code="succeeded").
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
repo_arch_package_status[(r, a, "pkgB")] = ("published", "succeeded")
|
||||
|
||||
# Expected Result 2: "Build successful" on project PR.
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build successful"), "Staging bot did not post 'Build successful' comment on project PR."
|
||||
|
||||
# Expected Result 3: "Build successful..." on each package PR.
|
||||
assert wait_for_comment(gitea_env, "mypool/pkgA", pkg_pr_number1, "Build successful, for more information go in", project_name), "Staging bot did not post 'Build successful' comment on package PR A."
|
||||
assert wait_for_comment(gitea_env, "mypool/pkgB", pkg_pr_number2, "Build successful, for more information go in", project_name), "Staging bot did not post 'Build successful' comment on package PR B."
|
||||
|
||||
@pytest.mark.t005
|
||||
def test_005_build_multiple_packages_mix(staging_main_env, httpserver):
|
||||
gitea_env, test_full_repo_name, merge_branch_name = staging_main_env
|
||||
|
||||
# 1. Create package PRs.
|
||||
diff = "diff --git a/test_br_mix_multi_pkgA.txt b/test_br_mix_multi_pkgA.txt\nnew file mode 100644\nindex 00000000..473a0f4c\n"
|
||||
pr1 = gitea_env.create_gitea_pr("mypool/pkgA", diff, "Test BR Mix Multi PR A", False, base_branch=merge_branch_name)
|
||||
pkg_pr_number1 = pr1["number"]
|
||||
pkg_head_sha1 = pr1["head"]["sha"]
|
||||
|
||||
diff2 = "diff --git a/test_br_mix_multi_pkgB.txt b/test_br_mix_multi_pkgB.txt\nnew file mode 100644\nindex 00000000..473a0f4c\n"
|
||||
pr2 = gitea_env.create_gitea_pr("mypool/pkgB", diff2, "Test BR Mix Multi PR B", False, base_branch=merge_branch_name)
|
||||
pkg_pr_number2 = pr2["number"]
|
||||
pkg_head_sha2 = pr2["head"]["sha"]
|
||||
|
||||
# 2. Create a project PR mentioning both packages in description and UPDATING SUBMODULES.
|
||||
old_sha1 = gitea_env.get_submodule_sha("myproducts", "mySLFO", "pkgA", ref=merge_branch_name)
|
||||
old_sha2 = gitea_env.get_submodule_sha("myproducts", "mySLFO", "pkgB", ref=merge_branch_name)
|
||||
|
||||
prj_diff = create_submodule_diff("pkgA", old_sha1, pkg_head_sha1)
|
||||
prj_diff += create_submodule_diff("pkgB", old_sha2, pkg_head_sha2)
|
||||
|
||||
body = f"PR: mypool/pkgA!{pkg_pr_number1}\nPR: mypool/pkgB!{pkg_pr_number2}"
|
||||
prj_pr = gitea_env.create_gitea_pr("myproducts/mySLFO", prj_diff, "Test Project PR Multi Mix", False, base_branch=merge_branch_name, body=body)
|
||||
prj_pr_number = prj_pr["number"]
|
||||
|
||||
# 3. Add staging_bot as a reviewer.
|
||||
gitea_env.request_reviewers("myproducts/mySLFO", prj_pr_number, ["autogits_obs_staging_bot"])
|
||||
|
||||
# Wait for staging-bot to be added (verification)
|
||||
assert wait_for_staging_bot_reviewer(gitea_env, prj_pr_number), "Staging bot was not added as a reviewer."
|
||||
|
||||
# 4. Mock the OBS result list with 2 repositories and 2 architectures for each package.
|
||||
project_name = f"openSUSE:Leap:16.0:PullRequest:{prj_pr_number}"
|
||||
repos = ["repo1", "repo2"]
|
||||
archs = ["x86_64", "aarch64"]
|
||||
packages = ["pkgA", "pkgB"]
|
||||
repo_arch_package_status = {}
|
||||
|
||||
# 5. Set all repository, architecture, and package statuses to "in progress" (e.g., code="building").
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
for p in packages:
|
||||
repo_arch_package_status[(r, a, p)] = ("building", "building")
|
||||
|
||||
def build_result_handler(request):
|
||||
return create_build_result_xml(project_name, repo_arch_package_status)
|
||||
|
||||
setup_obs_mock(httpserver, project_name, build_result_handler)
|
||||
|
||||
# Expected Result 1: "Build is started in..."
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build is started in", project_name), "Staging bot did not post 'Build is started' comment."
|
||||
|
||||
# 6. Transition the first package (all repositories and architectures) to "finished" mode and set its package statuses to "success" (e.g., code="succeeded").
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
repo_arch_package_status[(r, a, "pkgA")] = ("published", "succeeded")
|
||||
|
||||
# 7. Transition the second package (all repositories and architectures) to "finished" mode and set its package statuses to "failed" (e.g., code="failed").
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
repo_arch_package_status[(r, a, "pkgB")] = ("finished", "failed")
|
||||
|
||||
# Expected Result 2: "Build failed" on project PR.
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build failed"), "Staging bot did not post 'Build failed' comment on project PR."
|
||||
|
||||
# Expected Result 3: "Build successful..." on the successful package's PR.
|
||||
assert wait_for_comment(gitea_env, "mypool/pkgA", pkg_pr_number1, "Build successful, for more information go in", project_name), "Staging bot did not post 'Build successful' comment on package PR A."
|
||||
|
||||
# Expected Result 4: "Build failed..." on the failed package's PR.
|
||||
assert wait_for_comment(gitea_env, "mypool/pkgB", pkg_pr_number2, "Build failed, for more information go in", project_name), "Staging bot did not post 'Build failed' comment on package PR B."
|
||||
|
||||
@pytest.mark.t006
|
||||
def test_006_build_multiple_packages_partial_fail(staging_main_env, httpserver):
|
||||
gitea_env, test_full_repo_name, merge_branch_name = staging_main_env
|
||||
|
||||
# 1. Create package PRs.
|
||||
diff = "diff --git a/test_br_partial_multi_pkgA.txt b/test_br_partial_multi_pkgA.txt\nnew file mode 100644\nindex 00000000..473a0f4c\n"
|
||||
pr1 = gitea_env.create_gitea_pr("mypool/pkgA", diff, "Test BR Partial Multi PR A", False, base_branch=merge_branch_name)
|
||||
pkg_pr_number1 = pr1["number"]
|
||||
pkg_head_sha1 = pr1["head"]["sha"]
|
||||
|
||||
diff2 = "diff --git a/test_br_partial_multi_pkgB.txt b/test_br_partial_multi_pkgB.txt\nnew file mode 100644\nindex 00000000..473a0f4c\n"
|
||||
pr2 = gitea_env.create_gitea_pr("mypool/pkgB", diff2, "Test BR Partial Multi PR B", False, base_branch=merge_branch_name)
|
||||
pkg_pr_number2 = pr2["number"]
|
||||
pkg_head_sha2 = pr2["head"]["sha"]
|
||||
|
||||
# 2. Create a project PR mentioning both packages in description and UPDATING SUBMODULES.
|
||||
old_sha1 = gitea_env.get_submodule_sha("myproducts", "mySLFO", "pkgA", ref=merge_branch_name)
|
||||
old_sha2 = gitea_env.get_submodule_sha("myproducts", "mySLFO", "pkgB", ref=merge_branch_name)
|
||||
|
||||
prj_diff = create_submodule_diff("pkgA", old_sha1, pkg_head_sha1)
|
||||
prj_diff += create_submodule_diff("pkgB", old_sha2, pkg_head_sha2)
|
||||
|
||||
body = f"PR: mypool/pkgA!{pkg_pr_number1}\nPR: mypool/pkgB!{pkg_pr_number2}"
|
||||
prj_pr = gitea_env.create_gitea_pr("myproducts/mySLFO", prj_diff, "Test Project PR Multi Partial", False, base_branch=merge_branch_name, body=body)
|
||||
prj_pr_number = prj_pr["number"]
|
||||
|
||||
# 3. Add staging_bot as a reviewer.
|
||||
gitea_env.request_reviewers("myproducts/mySLFO", prj_pr_number, ["autogits_obs_staging_bot"])
|
||||
|
||||
# Wait for staging-bot to be added (verification)
|
||||
assert wait_for_staging_bot_reviewer(gitea_env, prj_pr_number), "Staging bot was not added as a reviewer."
|
||||
|
||||
# 4. Mock the OBS result list with 2 repositories and 2 architectures for each package.
|
||||
project_name = f"openSUSE:Leap:16.0:PullRequest:{prj_pr_number}"
|
||||
repos = ["repo1", "repo2"]
|
||||
archs = ["x86_64", "aarch64"]
|
||||
packages = ["pkgA", "pkgB"]
|
||||
repo_arch_package_status = {}
|
||||
|
||||
# 5. Set all repository, architecture, and package statuses to "in progress" (e.g., code="building").
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
for p in packages:
|
||||
repo_arch_package_status[(r, a, p)] = ("building", "building")
|
||||
|
||||
def build_result_handler(request):
|
||||
return create_build_result_xml(project_name, repo_arch_package_status)
|
||||
|
||||
setup_obs_mock(httpserver, project_name, build_result_handler)
|
||||
|
||||
# Expected Result 1: "Build is started in..."
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build is started in", project_name), "Staging bot did not post 'Build is started' comment."
|
||||
|
||||
# 6. Transition the first package (all repositories and architectures) to "finished" mode and set its package statuses to "success" (e.g., code="succeeded").
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
repo_arch_package_status[(r, a, "pkgA")] = ("published", "succeeded")
|
||||
|
||||
# 7. Transition the second package:
|
||||
# Repository 1 (all architectures) to "finished" mode with "success" (e.g., code="succeeded").
|
||||
for a in archs:
|
||||
repo_arch_package_status[("repo1", a, "pkgB")] = ("published", "succeeded")
|
||||
|
||||
# Repository 2 (all architectures) to "finished" mode with "failed" (e.g., code="failed").
|
||||
for a in archs:
|
||||
repo_arch_package_status[("repo2", a, "pkgB")] = ("finished", "failed")
|
||||
|
||||
# Expected Result 2: "Build failed" on project PR.
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build failed"), "Staging bot did not post 'Build failed' comment on project PR."
|
||||
|
||||
# Expected Result 3: "Build successful..." on the first (successful) package's PR.
|
||||
assert wait_for_comment(gitea_env, "mypool/pkgA", pkg_pr_number1, "Build successful, for more information go in", project_name), "Staging bot did not post 'Build successful' comment on package PR A."
|
||||
|
||||
# Expected Result 4: "Build failed..." on the second (failed) package's PR.
|
||||
assert wait_for_comment(gitea_env, "mypool/pkgB", pkg_pr_number2, "Build failed, for more information go in", project_name), "Staging bot did not post 'Build failed' comment on package PR B."
|
||||
|
||||
@pytest.mark.t007
|
||||
def test_007_build_multiple_packages_arch_fail(staging_main_env, httpserver):
|
||||
gitea_env, test_full_repo_name, merge_branch_name = staging_main_env
|
||||
|
||||
# 1. Create package PRs.
|
||||
diff = "diff --git a/test_br_arch_multi_pkgA.txt b/test_br_arch_multi_pkgA.txt\nnew file mode 100644\nindex 00000000..473a0f4c\n"
|
||||
pr1 = gitea_env.create_gitea_pr("mypool/pkgA", diff, "Test BR Arch Multi PR A", False, base_branch=merge_branch_name)
|
||||
pkg_pr_number1 = pr1["number"]
|
||||
pkg_head_sha1 = pr1["head"]["sha"]
|
||||
|
||||
diff2 = "diff --git a/test_br_arch_multi_pkgB.txt b/test_br_arch_multi_pkgB.txt\nnew file mode 100644\nindex 00000000..473a0f4c\n"
|
||||
pr2 = gitea_env.create_gitea_pr("mypool/pkgB", diff2, "Test BR Arch Multi PR B", False, base_branch=merge_branch_name)
|
||||
pkg_pr_number2 = pr2["number"]
|
||||
pkg_head_sha2 = pr2["head"]["sha"]
|
||||
|
||||
# 2. Create a project PR mentioning both packages in description and UPDATING SUBMODULES.
|
||||
old_sha1 = gitea_env.get_submodule_sha("myproducts", "mySLFO", "pkgA", ref=merge_branch_name)
|
||||
old_sha2 = gitea_env.get_submodule_sha("myproducts", "mySLFO", "pkgB", ref=merge_branch_name)
|
||||
|
||||
prj_diff = create_submodule_diff("pkgA", old_sha1, pkg_head_sha1)
|
||||
prj_diff += create_submodule_diff("pkgB", old_sha2, pkg_head_sha2)
|
||||
|
||||
body = f"PR: mypool/pkgA!{pkg_pr_number1}\nPR: mypool/pkgB!{pkg_pr_number2}"
|
||||
prj_pr = gitea_env.create_gitea_pr("myproducts/mySLFO", prj_diff, "Test Project PR Multi Arch Fail", False, base_branch=merge_branch_name, body=body)
|
||||
prj_pr_number = prj_pr["number"]
|
||||
|
||||
# 3. Add staging_bot as a reviewer.
|
||||
gitea_env.request_reviewers("myproducts/mySLFO", prj_pr_number, ["autogits_obs_staging_bot"])
|
||||
|
||||
# Wait for staging-bot to be added (verification)
|
||||
assert wait_for_staging_bot_reviewer(gitea_env, prj_pr_number), "Staging bot was not added as a reviewer."
|
||||
|
||||
# 4. Mock the OBS result list with 2 repositories and 2 architectures for each package.
|
||||
project_name = f"openSUSE:Leap:16.0:PullRequest:{prj_pr_number}"
|
||||
repos = ["repo1", "repo2"]
|
||||
archs = ["x86_64", "aarch64"]
|
||||
packages = ["pkgA", "pkgB"]
|
||||
repo_arch_package_status = {}
|
||||
|
||||
# 5. Set all repository, architecture, and package statuses to "in progress" (e.g., code="building").
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
for p in packages:
|
||||
repo_arch_package_status[(r, a, p)] = ("building", "building")
|
||||
|
||||
def build_result_handler(request):
|
||||
return create_build_result_xml(project_name, repo_arch_package_status)
|
||||
|
||||
setup_obs_mock(httpserver, project_name, build_result_handler)
|
||||
|
||||
# Expected Result 1: "Build is started in..."
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build is started in", project_name), "Staging bot did not post 'Build is started' comment."
|
||||
|
||||
# 6. Transition the first package (all repositories and architectures) to "finished" mode and set its package statuses to "success" (e.g., code="succeeded").
|
||||
for r in repos:
|
||||
for a in archs:
|
||||
repo_arch_package_status[(r, a, "pkgA")] = ("published", "succeeded")
|
||||
|
||||
# 7. Transition the second package:
|
||||
# Architecture 1 (all repositories) to "finished" mode with "success" (e.g., code="succeeded").
|
||||
for r in repos:
|
||||
repo_arch_package_status[(r, archs[0], "pkgB")] = ("published", "succeeded")
|
||||
|
||||
# Architecture 2 (all repositories) to "finished" mode with "failed" (e.g., code="failed").
|
||||
for r in repos:
|
||||
repo_arch_package_status[(r, archs[1], "pkgB")] = ("finished", "failed")
|
||||
|
||||
# Expected Result 2: "Build failed" on project PR.
|
||||
assert wait_for_comment(gitea_env, "myproducts/mySLFO", prj_pr_number, "Build failed"), "Staging bot did not post 'Build failed' comment on project PR."
|
||||
|
||||
# Expected Result 3: "Build successful..." on the first (successful) package's PR.
|
||||
assert wait_for_comment(gitea_env, "mypool/pkgA", pkg_pr_number1, "Build successful, for more information go in", project_name), "Staging bot did not post 'Build successful' comment on package PR A."
|
||||
|
||||
# Expected Result 4: "Build failed..." on the second (failed) package's PR.
|
||||
assert wait_for_comment(gitea_env, "mypool/pkgB", pkg_pr_number2, "Build failed, for more information go in", project_name), "Staging bot did not post 'Build failed' comment on package PR B."
|
||||
1
obs-groups-bot/.gitignore
vendored
Normal file
1
obs-groups-bot/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
obs-groups-bot
|
||||
242
obs-groups-bot/main.go
Normal file
242
obs-groups-bot/main.go
Normal file
@@ -0,0 +1,242 @@
|
||||
// Connect to the Open Build Service (OBS) API, retrieves a list of all groups,
|
||||
// and exports their metadata (specifically member lists) into individual JSON files.
|
||||
//
|
||||
// The tool supports both command-line flags and environment variables for configuration
|
||||
// (not for authentication, which is only via env vars), and includes a debug mode for verbose output.
|
||||
// It handles different XML response formats from the OBS API and ensures that
|
||||
// the output JSON files are properly sanitized and formatted.
|
||||
//
|
||||
// The accepted command-line flags are:
|
||||
//
|
||||
// -debug: Enable debug output showing API URLs and responses.
|
||||
// -instance: Name of the OBS instance (used in metadata, default "openSUSE").
|
||||
// -host: Base URL of the OBS API (default "http://localhost:3000").
|
||||
// -output: Directory to save the JSON files (default "groups").
|
||||
//
|
||||
// Usage:
|
||||
//
|
||||
// # Using environment variables (OBS_USER, OBS_PASSWORD)
|
||||
// go run main.go
|
||||
//
|
||||
// # Targeting a specific OBS instance and output directory
|
||||
// go run main.go -host "https://api.opensuse.org" -output "./obs_groups"
|
||||
//
|
||||
// # Full command with debug mode
|
||||
// go run main.go -host http://localhost:8000 -output "./obs_groups" -instance "OBS" -debug
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
type groupsList struct {
|
||||
XMLName xml.Name `xml:"groups"`
|
||||
Groups []groupItem `xml:"group"`
|
||||
}
|
||||
|
||||
type groupsListAlt struct {
|
||||
XMLName xml.Name `xml:"directory"`
|
||||
Entries []groupEntry `xml:"entry"`
|
||||
}
|
||||
|
||||
type groupEntry struct {
|
||||
Name string `xml:"name,attr,omitempty"`
|
||||
Inner string `xml:",innerxml"`
|
||||
}
|
||||
|
||||
func (e *groupEntry) getName() string {
|
||||
if e.Name != "" {
|
||||
return e.Name
|
||||
}
|
||||
return e.Inner
|
||||
}
|
||||
|
||||
type groupItem struct {
|
||||
GroupID string `xml:"groupid,attr"`
|
||||
}
|
||||
|
||||
func getAllGroups(client *common.ObsClient) ([]string, error) {
|
||||
res, err := client.ObsRequest("GET", []string{"group"}, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
data, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Printf("Response status: %d, body length: %d", res.StatusCode, len(data))
|
||||
if res.StatusCode != 200 {
|
||||
bodyStr := string(data)
|
||||
if len(bodyStr) > 500 {
|
||||
bodyStr = bodyStr[:500]
|
||||
}
|
||||
return nil, fmt.Errorf("Unexpected return code: %d, body: %s", res.StatusCode, bodyStr)
|
||||
}
|
||||
|
||||
// Try parsing as <groups> format
|
||||
var groupsList groupsList
|
||||
err = xml.Unmarshal(data, &groupsList)
|
||||
if err == nil && len(groupsList.Groups) > 0 {
|
||||
groupIDs := make([]string, len(groupsList.Groups))
|
||||
for i, g := range groupsList.Groups {
|
||||
groupIDs[i] = g.GroupID
|
||||
}
|
||||
return groupIDs, nil
|
||||
}
|
||||
|
||||
// Try parsing as <directory> format
|
||||
var groupsAlt groupsListAlt
|
||||
err = xml.Unmarshal(data, &groupsAlt)
|
||||
if err == nil && len(groupsAlt.Entries) > 0 {
|
||||
groupIDs := make([]string, len(groupsAlt.Entries))
|
||||
for i, e := range groupsAlt.Entries {
|
||||
groupIDs[i] = e.getName()
|
||||
}
|
||||
return groupIDs, nil
|
||||
}
|
||||
|
||||
// Log what we got
|
||||
bodyStr := string(data)
|
||||
if len(bodyStr) > 1000 {
|
||||
bodyStr = bodyStr[:1000]
|
||||
}
|
||||
log.Printf("Failed to parse XML, got: %s", bodyStr)
|
||||
return nil, fmt.Errorf("Could not parse groups response")
|
||||
}
|
||||
|
||||
type GroupOutput struct {
|
||||
Meta ImportMeta `json:"_meta,omitempty"`
|
||||
Name string `json:"Name"`
|
||||
Reviewers []string `json:"Reviewers"`
|
||||
Silent bool `json:"Silent,omitempty"`
|
||||
}
|
||||
|
||||
type ImportMeta struct {
|
||||
ImportedFrom string `json:"imported_from"`
|
||||
ReadOnly bool `json:"read_only"`
|
||||
ImportTime time.Time `json:"import_time"`
|
||||
}
|
||||
|
||||
func sanitizeFilename(name string) string {
|
||||
name = strings.ReplaceAll(name, "/", "_")
|
||||
name = strings.ReplaceAll(name, ":", "_")
|
||||
name = strings.ReplaceAll(name, " ", "_")
|
||||
return name
|
||||
}
|
||||
|
||||
func processGroup(client *common.ObsClient, groupID, outputDir, instanceName string, importTime time.Time) error {
|
||||
meta, err := client.GetGroupMeta(groupID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("fetching group meta: %w", err)
|
||||
}
|
||||
|
||||
if meta == nil {
|
||||
return fmt.Errorf("group not found")
|
||||
}
|
||||
|
||||
common.LogDebug(fmt.Sprintf("Group meta for %s: Title: %s, Persons: %d", groupID, meta.Title, len(meta.Persons.Persons)))
|
||||
|
||||
reviewers := make([]string, 0, len(meta.Persons.Persons))
|
||||
for _, p := range meta.Persons.Persons {
|
||||
reviewers = append(reviewers, p.UserID)
|
||||
}
|
||||
|
||||
output := GroupOutput{
|
||||
Meta: ImportMeta{
|
||||
ImportedFrom: instanceName,
|
||||
ReadOnly: true,
|
||||
ImportTime: importTime,
|
||||
},
|
||||
Name: groupID,
|
||||
Reviewers: reviewers,
|
||||
}
|
||||
|
||||
filename := sanitizeFilename(groupID) + ".json"
|
||||
filePath := filepath.Join(outputDir, filename)
|
||||
|
||||
data, err := json.MarshalIndent(output, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("marshaling json: %w", err)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(filePath, data, 0644); err != nil {
|
||||
return fmt.Errorf("writing file: %w", err)
|
||||
}
|
||||
|
||||
common.LogDebug(fmt.Sprintf("Saved group %s to %s", groupID, filePath))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
debugModePtr := flag.Bool("debug", false, "Enable debug output showing API URLs")
|
||||
obsInstance := flag.String("instance", "openSUSE", "OBS instance name (used in metadata)")
|
||||
obsHost := flag.String("host", "http://localhost:3000", "OBS API host URL")
|
||||
outputDir := flag.String("output", "groups", "Output directory for JSON files")
|
||||
flag.Parse()
|
||||
|
||||
if *debugModePtr {
|
||||
common.SetLoggingLevel(common.LogLevelDebug)
|
||||
}
|
||||
|
||||
if err := common.RequireObsSecretToken(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
log.Printf("Connecting to OBS at %s (instance: %s)", *obsHost, *obsInstance)
|
||||
|
||||
client, err := common.NewObsClient(*obsHost)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create OBS client: %v", err)
|
||||
}
|
||||
|
||||
log.Println("Fetching list of all groups...")
|
||||
groupIDs, err := getAllGroups(client)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to get groups list: %v", err)
|
||||
}
|
||||
|
||||
log.Printf("Found %d groups: %v", len(groupIDs), groupIDs)
|
||||
log.Printf("Found %s ", groupIDs)
|
||||
|
||||
err = os.MkdirAll(*outputDir, 0755)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create output directory: %v", err)
|
||||
}
|
||||
|
||||
importTime := time.Now()
|
||||
|
||||
successCount := 0
|
||||
errorCount := 0
|
||||
|
||||
for i, groupID := range groupIDs {
|
||||
log.Printf("[%d/%d] Fetching group: %s", i+1, len(groupIDs), groupID)
|
||||
|
||||
if err := processGroup(client, groupID, *outputDir, *obsInstance, importTime); err != nil {
|
||||
log.Printf("Error processing group %s: %v", groupID, err)
|
||||
errorCount++
|
||||
continue
|
||||
}
|
||||
|
||||
successCount++
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
}
|
||||
|
||||
log.Printf("Done! Success: %d, Errors: %d", successCount, errorCount)
|
||||
log.Printf("JSON files saved to: %s", *outputDir)
|
||||
}
|
||||
211
obs-groups-bot/main_test.go
Normal file
211
obs-groups-bot/main_test.go
Normal file
@@ -0,0 +1,211 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
func TestGroupsListParsing(t *testing.T) {
|
||||
// Test <groups> format
|
||||
groupsXML := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<groups>
|
||||
<group groupid="group1"/>
|
||||
<group groupid="group2"/>
|
||||
<group groupid="group3"/>
|
||||
</groups>`
|
||||
|
||||
var groupsList groupsList
|
||||
err := xml.Unmarshal([]byte(groupsXML), &groupsList)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to unmarshal groups XML: %v", err)
|
||||
}
|
||||
|
||||
if len(groupsList.Groups) != 3 {
|
||||
t.Errorf("Expected 3 groups, got %d", len(groupsList.Groups))
|
||||
}
|
||||
|
||||
expected := []string{"group1", "group2", "group3"}
|
||||
for i, g := range groupsList.Groups {
|
||||
if g.GroupID != expected[i] {
|
||||
t.Errorf("Expected group %s, got %s", expected[i], g.GroupID)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestProcessGroup(t *testing.T) {
|
||||
// 1. Mock the OBS API server for GetGroupMeta
|
||||
groupID := "test:group"
|
||||
mockGroupMetaResponse := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<group>
|
||||
<title>Test Group Title</title>
|
||||
<person>
|
||||
<person userid="user1" role="maintainer"/>
|
||||
<person userid="user2" role="reviewer"/>
|
||||
</person>
|
||||
</group>`
|
||||
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
expectedPath := "/group/" + groupID
|
||||
if r.URL.Path != expectedPath {
|
||||
t.Errorf("Expected path %s, got %s", expectedPath, r.URL.Path)
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte(mockGroupMetaResponse))
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
// 2. Create a temporary directory for output
|
||||
outputDir := t.TempDir()
|
||||
|
||||
// 3. Initialize client pointing to mock server
|
||||
client, err := common.NewObsClient(server.URL)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create client: %v", err)
|
||||
}
|
||||
|
||||
// 4. Call processGroup
|
||||
instanceName := "test-instance"
|
||||
importTime := time.Now().UTC().Truncate(time.Second) // Truncate for stable comparison
|
||||
err = processGroup(client, groupID, outputDir, instanceName, importTime)
|
||||
if err != nil {
|
||||
t.Fatalf("processGroup failed: %v", err)
|
||||
}
|
||||
|
||||
// 5. Verify the output file
|
||||
expectedFilename := sanitizeFilename(groupID) + ".json"
|
||||
filePath := filepath.Join(outputDir, expectedFilename)
|
||||
|
||||
// Check if file exists
|
||||
if _, err := os.Stat(filePath); os.IsNotExist(err) {
|
||||
t.Fatalf("Expected output file was not created: %s", filePath)
|
||||
}
|
||||
|
||||
// Read and verify file content
|
||||
data, err := os.ReadFile(filePath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read output file: %v", err)
|
||||
}
|
||||
|
||||
var result GroupOutput
|
||||
if err := json.Unmarshal(data, &result); err != nil {
|
||||
t.Fatalf("Failed to unmarshal output JSON: %v", err)
|
||||
}
|
||||
|
||||
// Assertions
|
||||
expectedReviewers := []string{"user1", "user2"}
|
||||
expectedOutput := GroupOutput{
|
||||
Meta: ImportMeta{
|
||||
ImportedFrom: instanceName,
|
||||
ReadOnly: true,
|
||||
ImportTime: importTime,
|
||||
},
|
||||
Name: groupID,
|
||||
Reviewers: expectedReviewers,
|
||||
}
|
||||
|
||||
// Use reflect.DeepEqual for a robust comparison of the structs
|
||||
if !reflect.DeepEqual(result, expectedOutput) {
|
||||
t.Errorf("Output JSON does not match expected.\nGot: %+v\nWant: %+v", result, expectedOutput)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetAllGroups(t *testing.T) {
|
||||
// Mock the OBS API server
|
||||
mockResponse := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<groups>
|
||||
<group groupid="mock-group-1"/>
|
||||
<group groupid="mock-group-2"/>
|
||||
</groups>`
|
||||
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Verify the request path
|
||||
if r.URL.Path != "/group" {
|
||||
t.Errorf("Expected path /group, got %s", r.URL.Path)
|
||||
}
|
||||
// Verify method
|
||||
if r.Method != "GET" {
|
||||
t.Errorf("Expected method GET, got %s", r.Method)
|
||||
}
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte(mockResponse))
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
// Initialize client pointing to mock server
|
||||
client, err := common.NewObsClient(server.URL)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create client: %v", err)
|
||||
}
|
||||
|
||||
groups, err := getAllGroups(client)
|
||||
if err != nil {
|
||||
t.Fatalf("GetAllGroups failed: %v", err)
|
||||
}
|
||||
|
||||
if len(groups) != 2 {
|
||||
t.Errorf("Expected 2 groups, got %d", len(groups))
|
||||
}
|
||||
if groups[0] != "mock-group-1" {
|
||||
t.Errorf("Expected first group to be mock-group-1, got %s", groups[0])
|
||||
}
|
||||
}
|
||||
|
||||
func TestGroupsListDirectoryFormat(t *testing.T) {
|
||||
// Test <directory> format with name attribute
|
||||
dirXML := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<directory>
|
||||
<entry name="group-a"/>
|
||||
<entry name="group-b"/>
|
||||
<entry name="group-c"/>
|
||||
</directory>`
|
||||
|
||||
var groupsAlt groupsListAlt
|
||||
err := xml.Unmarshal([]byte(dirXML), &groupsAlt)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to unmarshal directory XML: %v", err)
|
||||
}
|
||||
|
||||
if len(groupsAlt.Entries) != 3 {
|
||||
t.Errorf("Expected 3 entries, got %d", len(groupsAlt.Entries))
|
||||
}
|
||||
|
||||
expected := []string{"group-a", "group-b", "group-c"}
|
||||
for i, e := range groupsAlt.Entries {
|
||||
if e.getName() != expected[i] {
|
||||
t.Errorf("Expected entry %s, got %s", expected[i], e.getName())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSanitizeFilename(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"simple", "simple"},
|
||||
{"group/name", "group_name"},
|
||||
{"project:group", "project_group"},
|
||||
{"group with spaces", "group_with_spaces"},
|
||||
{"group/name:space", "group_name_space"},
|
||||
{"", ""},
|
||||
{"multiple///slashes", "multiple___slashes"},
|
||||
}
|
||||
|
||||
for _, tc := range tests {
|
||||
result := sanitizeFilename(tc.input)
|
||||
if result != tc.expected {
|
||||
t.Errorf("sanitizeFilename(%q) = %q, expected %q", tc.input, result, tc.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user