2 Commits

Author SHA256 Message Date
07a5fbe4c3 direct: use relative paths when adding repositories
All checks were successful
go-generate-check / go-generate-check (pull_request) Successful in 8s
2026-02-09 16:39:01 +01:00
56c0832f04 common: Add relative path repository resolution helper
All checks were successful
go-generate-check / go-generate-check (pull_request) Successful in 21s
2026-02-09 16:09:00 +01:00
56 changed files with 205 additions and 2586 deletions

5
.gitignore vendored
View File

@@ -1,7 +1,2 @@
*.osc
*.conf
/integration/gitea-data
/integration/gitea-logs
/integration/rabbitmq-data
/integration/workflow-pr-repos
__pycache__/

View File

@@ -1,4 +0,0 @@
MODULES := devel-importer utils/hujson utils/maintainer-update gitea-events-rabbitmq-publisher gitea_status_proxy group-review obs-forward-bot obs-staging-bot obs-status-service workflow-direct workflow-pr
build:
for m in $(MODULES); do go build -C $$m -buildmode=pie || exit 1 ; done

View File

@@ -182,7 +182,6 @@ install -D -m0644 systemd/obs-status-service.service
install -D -m0755 workflow-direct/workflow-direct %{buildroot}%{_bindir}/workflow-direct
install -D -m0644 systemd/workflow-direct@.service %{buildroot}%{_unitdir}/workflow-direct@.service
install -D -m0755 workflow-pr/workflow-pr %{buildroot}%{_bindir}/workflow-pr
install -D -m0644 systemd/workflow-pr@.service %{buildroot}%{_unitdir}/workflow-pr@.service
install -D -m0755 utils/hujson/hujson %{buildroot}%{_bindir}/hujson
install -D -m0755 utils/maintainer-update/maintainer-update %{buildroot}%{_bindir}/maintainer-update
@@ -234,29 +233,17 @@ install -D -m0755 utils/maintainer-update/maintainer-update
%postun obs-status-service
%service_del_postun obs-status-service.service
%pre workflow-direct
%service_add_pre workflow-direct.service
%post workflow-direct
%service_add_post workflow-direct.service
%preun workflow-direct
%service_del_preun workflow-direct.service
%postun workflow-direct
%service_del_postun workflow-direct.service
%pre workflow-pr
%service_add_pre workflow-pr.service
%service_add_pre workflow-direct@.service
%post workflow-pr
%service_add_post workflow-pr.service
%service_add_post workflow-direct@.service
%preun workflow-pr
%service_del_preun workflow-pr.service
%service_del_preun workflow-direct@.service
%postun workflow-pr
%service_del_postun workflow-pr.service
%service_del_postun workflow-direct@.service
%files devel-importer
%license COPYING
@@ -315,5 +302,4 @@ install -D -m0755 utils/maintainer-update/maintainer-update
%license COPYING
%doc workflow-pr/README.md
%{_bindir}/workflow-pr
%{_unitdir}/workflow-pr@.service

View File

@@ -39,10 +39,6 @@ const (
Permission_ForceMerge = "force-merge"
Permission_Group = "release-engineering"
MergeModeFF = "ff-only"
MergeModeReplace = "replace"
MergeModeDevel = "devel"
)
type ConfigFile struct {
@@ -56,9 +52,9 @@ type ReviewGroup struct {
}
type QAConfig struct {
Name string
Origin string
Label string // requires this gitea lable to be set or skipped
Name string
Origin string
Label string // requires this gitea lable to be set or skipped
BuildDisableRepos []string // which repos to build disable in the new project
}
@@ -92,8 +88,7 @@ type AutogitConfig struct {
Committers []string // group in addition to Reviewers and Maintainers that can order the bot around, mostly as helper for factory-maintainers
Subdirs []string // list of directories to sort submodules into. Needed b/c _manifest cannot list non-existent directories
Labels map[string]string // list of tags, if not default, to apply
MergeMode string // project merge mode
Labels map[string]string // list of tags, if not default, to apply
NoProjectGitPR bool // do not automatically create project git PRs, just assign reviewers and assume somethign else creates the ProjectGit PR
ManualMergeOnly bool // only merge with "Merge OK" comment by Project Maintainers and/or Package Maintainers and/or reviewers
@@ -188,17 +183,6 @@ func ReadWorkflowConfig(gitea GiteaFileContentAndRepoFetcher, git_project string
}
}
config.GitProjectName = config.GitProjectName + "#" + branch
// verify merge modes
switch config.MergeMode {
case MergeModeFF, MergeModeDevel, MergeModeReplace:
break // good results
case "":
config.MergeMode = MergeModeFF
default:
return nil, fmt.Errorf("Unsupported merge mode in %s: %s", git_project, config.MergeMode)
}
return config, nil
}

View File

@@ -342,67 +342,3 @@ func TestConfigPermissions(t *testing.T) {
})
}
}
func TestConfigMergeModeParser(t *testing.T) {
tests := []struct {
name string
json string
mergeMode string
wantErr bool
}{
{
name: "empty",
json: "{}",
mergeMode: common.MergeModeFF,
},
{
name: "ff-only",
json: `{"MergeMode": "ff-only"}`,
mergeMode: common.MergeModeFF,
},
{
name: "replace",
json: `{"MergeMode": "replace"}`,
mergeMode: common.MergeModeReplace,
},
{
name: "devel",
json: `{"MergeMode": "devel"}`,
mergeMode: common.MergeModeDevel,
},
{
name: "unsupported",
json: `{"MergeMode": "invalid"}`,
wantErr: true,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
repo := models.Repository{
DefaultBranch: "master",
}
ctl := gomock.NewController(t)
gitea := mock_common.NewMockGiteaFileContentAndRepoFetcher(ctl)
gitea.EXPECT().GetRepositoryFileContent("foo", "bar", "", "workflow.config").Return([]byte(test.json), "abc", nil)
gitea.EXPECT().GetRepository("foo", "bar").Return(&repo, nil)
config, err := common.ReadWorkflowConfig(gitea, "foo/bar")
if test.wantErr {
if err == nil {
t.Fatal("Expected error, got nil")
}
return
}
if err != nil {
t.Fatal(err)
}
if config.MergeMode != test.mergeMode {
t.Errorf("Expected MergeMode %s, got %s", test.mergeMode, config.MergeMode)
}
})
}
}

View File

@@ -552,145 +552,6 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
return is_manually_reviewed_ok
}
func (rs *PRSet) AddMergeCommit(git Git, remote string, pr int) bool {
prinfo := rs.PRs[pr]
LogDebug("Adding merge commit for %s", PRtoString(prinfo.PR))
if !prinfo.PR.AllowMaintainerEdit {
LogError(" PR is not editable by maintainer")
return false
}
repo := prinfo.PR.Base.Repo
head := prinfo.PR.Head
br := rs.Config.Branch
if len(br) == 0 {
br = prinfo.PR.Base.Name
}
msg := fmt.Sprintf("Merge branch '%s' into %s", br, head.Name)
if err := git.GitExec(repo.Name, "merge", "--no-ff", "--no-commit", "-X", "theirs", head.Sha); err != nil {
if err := git.GitExec(repo.Name, "merge", "--no-ff", "--no-commit", "--allow-unrelated-histories", "-X", "theirs", head.Sha); err != nil {
return false
}
LogError("WARNING: Merging unrelated histories")
}
// ensure only files that are in head.Sha are kept
git.GitExecOrPanic(repo.Name, "read-tree", "-m", head.Sha)
git.GitExecOrPanic(repo.Name, "commit", "-m", msg)
git.GitExecOrPanic(repo.Name, "clean", "-fxd")
if !IsDryRun {
git.GitExecOrPanic(repo.Name, "push", remote, "HEAD:"+head.Name)
prinfo.PR.Head.Sha = strings.TrimSpace(git.GitExecWithOutputOrPanic(repo.Name, "rev-list", "-1", "HEAD")) // need to update as it's pushed but pr not refetched
}
return true
}
func (rs *PRSet) HasMerge(git Git, pr int) bool {
prinfo := rs.PRs[pr]
repo := prinfo.PR.Base.Repo
head := prinfo.PR.Head
br := rs.Config.Branch
if len(br) == 0 {
br = prinfo.PR.Base.Name
}
parents, err := git.GitExecWithOutput(repo.Name, "show", "-s", "--format=%P", head.Sha)
if err == nil {
p := strings.Fields(strings.TrimSpace(parents))
if len(p) == 2 {
targetHead, _ := git.GitExecWithOutput(repo.Name, "rev-parse", "HEAD")
targetHead = strings.TrimSpace(targetHead)
if p[0] == targetHead || p[1] == targetHead {
return true
}
}
}
return false
}
func (rs *PRSet) PrepareForMerge(git Git) bool {
// verify that package can merge here. Checkout current target branch of each PRSet, make a temporary branch
// PR_#_mergetest and perform the merge based
if rs.Config.MergeMode == MergeModeDevel {
return true // always can merge as we set branch here, not merge anything
} else {
// make sure that all the package PRs are in mergeable state
for idx, prinfo := range rs.PRs {
if rs.IsPrjGitPR(prinfo.PR) {
continue
}
repo := prinfo.PR.Base.Repo
head := prinfo.PR.Head
br := rs.Config.Branch
if len(br) == 0 {
br = prinfo.PR.Base.Name
}
remote, err := git.GitClone(repo.Name, br, repo.SSHURL)
if err != nil {
return false
}
git.GitExecOrPanic(repo.Name, "fetch", remote, head.Sha)
switch rs.Config.MergeMode {
case MergeModeFF:
if err := git.GitExec(repo.Name, "merge-base", "--is-ancestor", "HEAD", head.Sha); err != nil {
return false
}
case MergeModeReplace:
Verify:
if err := git.GitExec(repo.Name, "merge-base", "--is-ancestor", "HEAD", head.Sha); err != nil {
if !rs.HasMerge(git, idx) {
forkRemote, err := git.GitClone(repo.Name, head.Name, head.Repo.SSHURL)
if err != nil {
LogError("Failed to clone head repo:", head.Name, head.Repo.SSHURL)
return false
}
LogDebug("Merge commit is missing and this is not FF merge possibility")
git.GitExecOrPanic(repo.Name, "checkout", remote+"/"+br)
if !rs.AddMergeCommit(git, forkRemote, idx) {
return false
}
if !IsDryRun {
goto Verify
}
}
}
}
}
}
// now we check project git if mergeable
prjgit_info, err := rs.GetPrjGitPR()
if err != nil {
return false
}
prjgit := prjgit_info.PR
_, _, prjgitBranch := rs.Config.GetPrjGit()
remote, err := git.GitClone(DefaultGitPrj, prjgitBranch, prjgit.Base.Repo.SSHURL)
if err != nil {
return false
}
testBranch := fmt.Sprintf("PR_%d_mergetest", prjgit.Index)
git.GitExecOrPanic(DefaultGitPrj, "fetch", remote, prjgit.Head.Sha)
if err := git.GitExec(DefaultGitPrj, "checkout", "-B", testBranch, prjgit.Base.Sha); err != nil {
return false
}
if err := git.GitExec(DefaultGitPrj, "merge", "--no-ff", "--no-commit", prjgit.Head.Sha); err != nil {
return false
}
return true
}
func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
prjgit_info, err := rs.GetPrjGitPR()
if err != nil {
@@ -829,12 +690,8 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
}
prinfo.RemoteName, err = git.GitClone(repo.Name, br, repo.SSHURL)
PanicOnError(err)
if rs.Config.MergeMode == MergeModeDevel {
git.GitExecOrPanic(repo.Name, "checkout", "-B", br, head.Sha)
} else {
git.GitExecOrPanic(repo.Name, "fetch", prinfo.RemoteName, head.Sha)
git.GitExecOrPanic(repo.Name, "merge", "--ff", head.Sha)
}
git.GitExecOrPanic(repo.Name, "fetch", prinfo.RemoteName, head.Sha)
git.GitExecOrPanic(repo.Name, "merge", "--ff", head.Sha)
}
@@ -851,12 +708,7 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
repo := prinfo.PR.Base.Repo
if !IsDryRun {
params := []string{"push"}
if rs.Config.MergeMode == MergeModeDevel {
params = append(params, "-f")
}
params = append(params, prinfo.RemoteName)
git.GitExecOrPanic(repo.Name, params...)
git.GitExecOrPanic(repo.Name, "push", prinfo.RemoteName)
} else {
LogInfo("*** WOULD push", repo.Name, "to", prinfo.RemoteName)
}

View File

@@ -2,7 +2,6 @@ package common_test
import (
"errors"
"fmt"
"os"
"os/exec"
"path"
@@ -1229,7 +1228,7 @@ func TestPRMerge(t *testing.T) {
Owner: &models.User{
UserName: "org",
},
SSHURL: "ssh://git@src.opensuse.org/org/prj.git",
SSHURL: "file://" + path.Join(repoDir, "prjgit"),
},
},
Head: &models.PRBranchInfo{
@@ -1249,7 +1248,7 @@ func TestPRMerge(t *testing.T) {
Owner: &models.User{
UserName: "org",
},
SSHURL: "ssh://git@src.opensuse.org/org/prj.git",
SSHURL: "file://" + path.Join(cmd.Dir, "prjgit"),
},
},
Head: &models.PRBranchInfo{
@@ -1339,346 +1338,3 @@ func TestPRChanges(t *testing.T) {
})
}
}
func TestPRPrepareForMerge(t *testing.T) {
tests := []struct {
name string
setup func(*mock_common.MockGit, *models.PullRequest, *models.PullRequest)
config *common.AutogitConfig
expected bool
editable bool
}{
{
name: "Success Devel",
config: &common.AutogitConfig{
Organization: "org",
GitProjectName: "org/_ObsPrj#master",
MergeMode: common.MergeModeDevel,
},
setup: func(m *mock_common.MockGit, prjPR, pkgPR *models.PullRequest) {},
expected: true,
},
{
name: "Success FF",
config: &common.AutogitConfig{
Organization: "org",
GitProjectName: "org/_ObsPrj#master",
MergeMode: common.MergeModeFF,
},
setup: func(m *mock_common.MockGit, prjPR, pkgPR *models.PullRequest) {
m.EXPECT().GitClone("pkg", "master", pkgPR.Base.Repo.SSHURL).Return("origin", nil)
m.EXPECT().GitExecOrPanic("pkg", "fetch", "origin", pkgPR.Head.Sha)
m.EXPECT().GitExec("pkg", "merge-base", "--is-ancestor", "HEAD", pkgPR.Head.Sha).Return(nil)
m.EXPECT().GitClone("_ObsPrj", "master", prjPR.Base.Repo.SSHURL).Return("origin", nil)
m.EXPECT().GitExecOrPanic("_ObsPrj", "fetch", "origin", prjPR.Head.Sha)
m.EXPECT().GitExec("_ObsPrj", "checkout", "-B", "PR_1_mergetest", prjPR.Base.Sha).Return(nil)
m.EXPECT().GitExec("_ObsPrj", "merge", "--no-ff", "--no-commit", prjPR.Head.Sha).Return(nil)
},
expected: true,
},
{
name: "Success Replace MergeCommit",
config: &common.AutogitConfig{
Organization: "org",
GitProjectName: "org/_ObsPrj#master",
MergeMode: common.MergeModeReplace,
},
setup: func(m *mock_common.MockGit, prjPR, pkgPR *models.PullRequest) {
m.EXPECT().GitClone("pkg", "master", pkgPR.Base.Repo.SSHURL).Return("origin", nil)
m.EXPECT().GitExecOrPanic("pkg", "fetch", "origin", pkgPR.Head.Sha)
// merge-base fails initially
m.EXPECT().GitExec("pkg", "merge-base", "--is-ancestor", "HEAD", pkgPR.Head.Sha).Return(fmt.Errorf("not ancestor"))
// HasMerge returns true
m.EXPECT().GitExecWithOutput("pkg", "show", "-s", "--format=%P", pkgPR.Head.Sha).Return("parent1 target_head", nil)
m.EXPECT().GitExecWithOutput("pkg", "rev-parse", "HEAD").Return("target_head", nil)
m.EXPECT().GitClone("_ObsPrj", "master", prjPR.Base.Repo.SSHURL).Return("origin", nil)
m.EXPECT().GitExecOrPanic("_ObsPrj", "fetch", "origin", prjPR.Head.Sha)
m.EXPECT().GitExec("_ObsPrj", "checkout", "-B", "PR_1_mergetest", prjPR.Base.Sha).Return(nil)
m.EXPECT().GitExec("_ObsPrj", "merge", "--no-ff", "--no-commit", prjPR.Head.Sha).Return(nil)
},
expected: true,
},
{
name: "Merge Conflict in PrjGit",
config: &common.AutogitConfig{
Organization: "org",
GitProjectName: "org/_ObsPrj#master",
MergeMode: common.MergeModeFF,
},
setup: func(m *mock_common.MockGit, prjPR, pkgPR *models.PullRequest) {
m.EXPECT().GitClone("pkg", "master", pkgPR.Base.Repo.SSHURL).Return("origin", nil)
m.EXPECT().GitExecOrPanic("pkg", "fetch", "origin", pkgPR.Head.Sha)
m.EXPECT().GitExec("pkg", "merge-base", "--is-ancestor", "HEAD", pkgPR.Head.Sha).Return(nil)
m.EXPECT().GitClone("_ObsPrj", "master", prjPR.Base.Repo.SSHURL).Return("origin", nil)
m.EXPECT().GitExecOrPanic("_ObsPrj", "fetch", "origin", prjPR.Head.Sha)
m.EXPECT().GitExec("_ObsPrj", "checkout", "-B", "PR_1_mergetest", prjPR.Base.Sha).Return(nil)
m.EXPECT().GitExec("_ObsPrj", "merge", "--no-ff", "--no-commit", prjPR.Head.Sha).Return(fmt.Errorf("conflict"))
},
expected: false,
},
{
name: "Not FF in PkgGit",
config: &common.AutogitConfig{
Organization: "org",
GitProjectName: "org/_ObsPrj#master",
MergeMode: common.MergeModeFF,
},
setup: func(m *mock_common.MockGit, prjPR, pkgPR *models.PullRequest) {
m.EXPECT().GitClone("pkg", "master", pkgPR.Base.Repo.SSHURL).Return("origin", nil)
m.EXPECT().GitExecOrPanic("pkg", "fetch", "origin", pkgPR.Head.Sha)
m.EXPECT().GitExec("pkg", "merge-base", "--is-ancestor", "HEAD", pkgPR.Head.Sha).Return(fmt.Errorf("not ancestor"))
},
expected: false,
},
{
name: "Success Replace with AddMergeCommit",
config: &common.AutogitConfig{
Organization: "org",
GitProjectName: "org/_ObsPrj#master",
MergeMode: common.MergeModeReplace,
},
editable: true,
setup: func(m *mock_common.MockGit, prjPR, pkgPR *models.PullRequest) {
m.EXPECT().GitClone("pkg", "master", pkgPR.Base.Repo.SSHURL).Return("origin", nil)
m.EXPECT().GitExecOrPanic("pkg", "fetch", "origin", pkgPR.Head.Sha)
// First merge-base fails
m.EXPECT().GitExec("pkg", "merge-base", "--is-ancestor", "HEAD", pkgPR.Head.Sha).Return(fmt.Errorf("not ancestor"))
// HasMerge returns false
m.EXPECT().GitExecWithOutput("pkg", "show", "-s", "--format=%P", pkgPR.Head.Sha).Return("parent1", nil)
m.EXPECT().GitClone("pkg", pkgPR.Head.Name, pkgPR.Base.Repo.SSHURL).Return("origin_fork", nil)
// AddMergeCommit is called
m.EXPECT().GitExecOrPanic("pkg", "checkout", "origin/master")
m.EXPECT().GitExec("pkg", "merge", "--no-ff", "--no-commit", "-X", "theirs", pkgPR.Head.Sha).Return(nil)
m.EXPECT().GitExecOrPanic("pkg", "read-tree", "-m", pkgPR.Head.Sha)
m.EXPECT().GitExecOrPanic("pkg", "commit", "-m", gomock.Any())
m.EXPECT().GitExecOrPanic("pkg", "clean", "-fxd")
m.EXPECT().GitExecOrPanic("pkg", "push", "origin_fork", "HEAD:"+pkgPR.Head.Name)
m.EXPECT().GitExecWithOutputOrPanic("pkg", "rev-list", "-1", "HEAD").Return("new_pkg_head_sha")
// Second merge-base succeeds (after goto Verify)
m.EXPECT().GitExec("pkg", "merge-base", "--is-ancestor", "HEAD", "new_pkg_head_sha").Return(nil)
m.EXPECT().GitClone("_ObsPrj", "master", prjPR.Base.Repo.SSHURL).Return("origin", nil)
m.EXPECT().GitExecOrPanic("_ObsPrj", "fetch", "origin", prjPR.Head.Sha)
m.EXPECT().GitExec("_ObsPrj", "checkout", "-B", "PR_1_mergetest", prjPR.Base.Sha).Return(nil)
m.EXPECT().GitExec("_ObsPrj", "merge", "--no-ff", "--no-commit", prjPR.Head.Sha).Return(nil)
},
expected: true,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
prjPR := &models.PullRequest{
Index: 1,
Base: &models.PRBranchInfo{
Name: "master",
Sha: "base_sha",
Repo: &models.Repository{
Owner: &models.User{UserName: "org"},
Name: "_ObsPrj",
SSHURL: "ssh://git@src.opensuse.org/org/_ObsPrj.git",
},
},
Head: &models.PRBranchInfo{
Sha: "head_sha",
Repo: &models.Repository{
Owner: &models.User{UserName: "org"},
Name: "_ObsPrj",
SSHURL: "ssh://git@src.opensuse.org/org/_ObsPrj.git",
},
},
}
pkgPR := &models.PullRequest{
Index: 2,
Base: &models.PRBranchInfo{
Name: "master",
Sha: "pkg_base_sha",
Repo: &models.Repository{
Owner: &models.User{UserName: "org"},
Name: "pkg",
SSHURL: "ssh://git@src.opensuse.org/org/pkg.git",
},
},
Head: &models.PRBranchInfo{
Name: "branch_name",
Sha: "pkg_head_sha",
Repo: &models.Repository{
Owner: &models.User{UserName: "org"},
Name: "pkg",
SSHURL: "ssh://git@src.opensuse.org/org/pkg.git",
},
},
AllowMaintainerEdit: test.editable,
}
ctl := gomock.NewController(t)
git := mock_common.NewMockGit(ctl)
test.setup(git, prjPR, pkgPR)
prset := &common.PRSet{
Config: test.config,
PRs: []*common.PRInfo{
{PR: prjPR},
{PR: pkgPR},
},
}
if res := prset.PrepareForMerge(git); res != test.expected {
t.Errorf("Expected %v, got %v", test.expected, res)
}
})
}
}
func TestPRMergeMock(t *testing.T) {
tests := []struct {
name string
setup func(*mock_common.MockGit, *models.PullRequest, *models.PullRequest)
config *common.AutogitConfig
}{
{
name: "Success FF",
config: &common.AutogitConfig{
Organization: "org",
GitProjectName: "org/_ObsPrj#master",
MergeMode: common.MergeModeFF,
},
setup: func(m *mock_common.MockGit, prjPR, pkgPR *models.PullRequest) {
m.EXPECT().GitClone("_ObsPrj", "master", prjPR.Base.Repo.SSHURL).Return("origin", nil)
m.EXPECT().GitExecOrPanic("_ObsPrj", "fetch", "origin", prjPR.Head.Sha)
m.EXPECT().GitExec("_ObsPrj", "merge", "--no-ff", "-m", gomock.Any(), prjPR.Head.Sha).Return(nil)
m.EXPECT().GitClone("pkg", "master", pkgPR.Base.Repo.SSHURL).Return("origin_pkg", nil)
m.EXPECT().GitExecOrPanic("pkg", "fetch", "origin_pkg", pkgPR.Head.Sha)
m.EXPECT().GitExecOrPanic("pkg", "merge", "--ff", pkgPR.Head.Sha)
m.EXPECT().GitExecOrPanic("pkg", "push", "origin_pkg")
m.EXPECT().GitExecOrPanic("_ObsPrj", "push", "origin")
},
},
{
name: "Success Devel",
config: &common.AutogitConfig{
Organization: "org",
GitProjectName: "org/_ObsPrj#master",
MergeMode: common.MergeModeDevel,
},
setup: func(m *mock_common.MockGit, prjPR, pkgPR *models.PullRequest) {
m.EXPECT().GitClone("_ObsPrj", "master", prjPR.Base.Repo.SSHURL).Return("origin", nil)
m.EXPECT().GitExecOrPanic("_ObsPrj", "fetch", "origin", prjPR.Head.Sha)
m.EXPECT().GitExec("_ObsPrj", "merge", "--no-ff", "-m", gomock.Any(), prjPR.Head.Sha).Return(nil)
m.EXPECT().GitClone("pkg", "master", pkgPR.Base.Repo.SSHURL).Return("origin_pkg", nil)
m.EXPECT().GitExecOrPanic("pkg", "checkout", "-B", "master", pkgPR.Head.Sha)
m.EXPECT().GitExecOrPanic("pkg", "push", "-f", "origin_pkg")
m.EXPECT().GitExecOrPanic("_ObsPrj", "push", "origin")
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
prjPR := &models.PullRequest{
Index: 1,
Base: &models.PRBranchInfo{
Name: "master",
Sha: "prj_base_sha",
Repo: &models.Repository{
Owner: &models.User{UserName: "org"},
Name: "_ObsPrj",
SSHURL: "ssh://git@src.opensuse.org/org/_ObsPrj.git",
},
},
Head: &models.PRBranchInfo{
Sha: "prj_head_sha",
},
}
pkgPR := &models.PullRequest{
Index: 2,
Base: &models.PRBranchInfo{
Name: "master",
Sha: "pkg_base_sha",
Repo: &models.Repository{
Owner: &models.User{UserName: "org"},
Name: "pkg",
SSHURL: "ssh://git@src.opensuse.org/org/pkg.git",
},
},
Head: &models.PRBranchInfo{
Sha: "pkg_head_sha",
},
}
ctl := gomock.NewController(t)
git := mock_common.NewMockGit(ctl)
reviewUnrequestMock := mock_common.NewMockGiteaReviewUnrequester(ctl)
reviewUnrequestMock.EXPECT().UnrequestReview(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes().Return(nil)
test.setup(git, prjPR, pkgPR)
prset := &common.PRSet{
Config: test.config,
PRs: []*common.PRInfo{
{PR: prjPR},
{PR: pkgPR},
},
}
if err := prset.Merge(reviewUnrequestMock, git); err != nil {
t.Errorf("Unexpected error: %v", err)
}
})
}
}
func TestPRAddMergeCommit(t *testing.T) {
pkgPR := &models.PullRequest{
Index: 2,
Base: &models.PRBranchInfo{
Name: "master",
Sha: "pkg_base_sha",
Repo: &models.Repository{
Owner: &models.User{UserName: "org"},
Name: "pkg",
SSHURL: "ssh://git@src.opensuse.org/org/pkg.git",
},
},
Head: &models.PRBranchInfo{
Name: "branch_name",
Sha: "pkg_head_sha",
},
AllowMaintainerEdit: true,
}
config := &common.AutogitConfig{
Organization: "org",
GitProjectName: "org/_ObsPrj#master",
MergeMode: common.MergeModeReplace,
}
ctl := gomock.NewController(t)
git := mock_common.NewMockGit(ctl)
git.EXPECT().GitExec("pkg", "merge", "--no-ff", "--no-commit", "-X", "theirs", pkgPR.Head.Sha).Return(nil)
git.EXPECT().GitExecOrPanic("pkg", "read-tree", "-m", pkgPR.Head.Sha)
git.EXPECT().GitExecOrPanic("pkg", "commit", "-m", gomock.Any())
git.EXPECT().GitExecOrPanic("pkg", "clean", "-fxd")
git.EXPECT().GitExecOrPanic("pkg", "push", "origin", "HEAD:branch_name")
git.EXPECT().GitExecWithOutputOrPanic("pkg", "rev-list", "-1", "HEAD").Return("new_head_sha")
prset := &common.PRSet{
Config: config,
PRs: []*common.PRInfo{
{PR: &models.PullRequest{}}, // prjgit at index 0
{PR: pkgPR}, // pkg at index 1
},
}
if res := prset.AddMergeCommit(git, "origin", 1); !res {
t.Errorf("Expected true, got %v", res)
}
}

View File

@@ -43,6 +43,63 @@ type NewRepos struct {
const maintainership_line = "MAINTAINER"
var true_lines []string = []string{"1", "TRUE", "YES", "OK", "T"}
var InvalidUrlError error = errors.New("PrjGit or PackageGit URLs cannot be empty.")
var AbsoluteUrlError error = errors.New("PrjGit or PackageGit URLs cannot be relative.")
var HostsNotEqualError error = errors.New("PrjGit or PackageGit are not the same hosts.")
var AbsoluteUrlWithQuery error = errors.New("PrjGit or PackageGit with query parameter. Unsupported.")
var InvalidPath error = errors.New("PrjGit or PackageGit path has unsupported format.")
func RelativeRepositoryPath(prjgit, packagegit string) (string, error) {
if len(prjgit) == 0 || len(packagegit) == 0 {
return "", InvalidUrlError
}
prjgiturl, err := url.Parse(prjgit)
if err != nil {
return "", err
}
if !prjgiturl.IsAbs() {
return "", AbsoluteUrlError
}
if len(prjgiturl.RawQuery) != 0 {
return "", AbsoluteUrlWithQuery
}
pkggiturl, err := url.Parse(packagegit)
if err != nil {
return "", err
}
if !pkggiturl.IsAbs() {
return "", AbsoluteUrlError
}
if len(pkggiturl.RawQuery) != 0 {
return "", AbsoluteUrlWithQuery
}
if pkggiturl.Hostname() != prjgiturl.Hostname() {
return "", HostsNotEqualError
}
prjgitpath := SplitStringNoEmpty(prjgiturl.Path, "/")
pkggitpath := SplitStringNoEmpty(pkggiturl.Path, "/")
if len(prjgitpath) != 2 || len(pkggitpath) != 2 {
return "", InvalidPath
}
prjgitpath[1] = strings.TrimRight(prjgitpath[1], ".git")
pkggitpath[1] = strings.TrimRight(pkggitpath[1], ".git")
if prjgitpath[0] == pkggitpath[0] {
if prjgitpath[1] == pkggitpath[1] {
return "", InvalidPath
}
return "../" + pkggitpath[1], nil
}
return "../../" + pkggitpath[0] + "/" + pkggitpath[1], nil
}
func HasSpace(s string) bool {
return strings.IndexFunc(s, unicode.IsSpace) >= 0

View File

@@ -7,6 +7,121 @@ import (
"src.opensuse.org/autogits/common"
)
func TestRelativeRepositoryPath(t *testing.T) {
tests := []struct {
name string
gitprj, repo string
hasError bool
relative string
}{
{
name: "Invalid repository URLs",
hasError: true,
},
{
name: "Invalid repository URLs",
gitprj: "http://test",
hasError: true,
},
{
name: "Not absolute urls",
gitprj: "foo httptest",
repo: "http://test",
hasError: true,
},
{
name: "Not aboslute urls",
gitprj: "http://test",
repo: "/test",
hasError: true,
},
{
name: "Repos not on the same server",
gitprj: "https://host1/path1/path2",
repo: "https://host2/path1/path2",
hasError: true,
},
{
name: "Repos with query parameters",
gitprj: "https://host1/path1/path2?query=foo",
repo: "https://host1/path1/path3",
hasError: true,
},
{
name: "Repos with query parameters",
gitprj: "https://host1/path1/path2",
repo: "https://host1/path1/path3?query=foo",
hasError: true,
},
{
name: "Repos are not same repo",
gitprj: "https://host1/path1/path2.git",
repo: "https://host1/path1/path2",
hasError: true,
},
{
name: "Repos in same org",
gitprj: "https://host1/path1/path2.git",
repo: "https://host1/path1/path3",
relative: "../path3",
},
{
name: "Repos in same org",
gitprj: "https://host1/path1/path2.git",
repo: "https://host1/path1/path3.git",
relative: "../path3",
},
{
name: "Repos in different org",
gitprj: "https://host1/path1/path2.git",
repo: "https://host1/path2/path3.git",
relative: "../../path2/path3",
},
{
name: "Too long paths",
gitprj: "https://host1/path1/path2.git",
repo: "https://host1/path2/path3/path3.git",
hasError: true,
},
{
name: "Too long paths",
gitprj: "https://host1/path1/path2/path2.git",
repo: "https://host1/path2/path3.git",
hasError: true,
},
{
name: "SSH repos not supported",
gitprj: "https://host1/path1/path2.git",
repo: "gitea@src.opensuse.org:path1/path3.git",
hasError: true,
},
{
name: "SSH repos not supported",
gitprj: "gitea@src.opensuse.org:path1/path3.git",
repo: "https://host1/path1/path2.git",
hasError: true,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
r, err := common.RelativeRepositoryPath(test.gitprj, test.repo)
if err != nil && !test.hasError {
t.Error("Expected no error but have one", err)
}
if err == nil && test.hasError {
t.Error("Expected an error but had none. Returned:", r)
}
if err == nil && test.relative != r {
t.Error("Expected", test.relative, "but have", r)
}
})
}
}
func TestGitUrlParse(t *testing.T) {
tests := []struct {
name string
@@ -241,7 +356,7 @@ func TestNewPackageIssueParsing(t *testing.T) {
},
},
{
name: "Default branch and junk lines and approval for maintainership",
name: "Default branch and junk lines and approval for maintainership",
input: "\n\nsome comments\n\norg1/repo2\n\nmaintainership: yes",
issues: &common.NewRepos{
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
@@ -251,7 +366,7 @@ func TestNewPackageIssueParsing(t *testing.T) {
},
},
{
name: "Default branch and junk lines and no maintainership",
name: "Default branch and junk lines and no maintainership",
input: "\n\nsome comments\n\norg1/repo2\n\nmaintainership: NEVER",
issues: &common.NewRepos{
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
@@ -260,7 +375,7 @@ func TestNewPackageIssueParsing(t *testing.T) {
},
},
{
name: "3 repos with comments and maintainership",
name: "3 repos with comments and maintainership",
input: "\n\nsome comments for org1/repo2 are here and more\n\norg1/repo2#master\n org2/repo3#master\n some/repo3#m\nMaintainer ok",
issues: &common.NewRepos{
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
@@ -272,11 +387,11 @@ func TestNewPackageIssueParsing(t *testing.T) {
},
},
{
name: "Invalid repos with spaces",
name: "Invalid repos with spaces",
input: "or g/repo#branch\norg/r epo#branch\norg/repo#br anch\norg/repo#branch As foo ++",
},
{
name: "Valid repos with spaces",
name: "Valid repos with spaces",
input: " org / repo # branch",
issues: &common.NewRepos{
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
@@ -285,7 +400,7 @@ func TestNewPackageIssueParsing(t *testing.T) {
},
},
{
name: "Package name is not repo name",
name: "Package name is not repo name",
input: " org / repo # branch as repo++ \nmaintainer true",
issues: &common.NewRepos{
Repos: []struct{ Organization, Repository, Branch, PackageName string }{

View File

@@ -1,11 +0,0 @@
FROM opensuse/tumbleweed
ENV container=podman
ENV LANG=en_US.UTF-8
RUN zypper -vvvn install podman podman-compose vim make python3-pytest python3-requests python3-pytest-dependency
COPY . /opt/project/
WORKDIR /opt/project/integration

View File

@@ -1,76 +0,0 @@
# We want to be able to test in two **modes**:
# A. bots are used from official packages as defined in */Dockerfile.package
# B. bots are just picked up from binaries that are placed in corresponding parent directory.
# The topology is defined in podman-compose file and can be spawned in two ways:
# 1. Privileged container (needs no additional dependancies)
# 2. podman-compose on a local machine (needs dependencies as defined in the Dockerfile)
# Typical workflow:
# A1: - run 'make test_package'
# B1: - run 'make test_local' (make sure that the go binaries in parent folder are built)
# A2:
# 1. 'make build_package' - prepares images (recommended, otherwise there might be surprises if image fails to build during `make up`)
# 2. 'make up' - spawns podman-compose
# 3. 'pytest -v tests/*' - run tests
# 4. 'make down' - once the containers are not needed
# B2: (make sure the go binaries in the parent folder are built)
# 4. 'make build_local' - prepared images (recommended, otherwise there might be surprises if image fails to build during `make up`)
# 5. 'make up' - spawns podman-compose
# 6. 'pytest -v tests/*' - run tests
# 7. 'make down' - once the containers are not needed
AUTO_DETECT_MODE := $(shell if test -e ../workflow-pr/workflow-pr; then echo .local; else echo .package; fi)
# try to detect mode B1, otherwise mode A1
test: GIWTF_IMAGE_SUFFIX=$(AUTO_DETECT_MODE)
test: build_container test_container
# mode A1
test_package: GIWTF_IMAGE_SUFFIX=.package
test_package: build_container test_container
# mode B1
test_local: GIWTF_IMAGE_SUFFIX=.local
test_local: build_container test_container
MODULES := gitea-events-rabbitmq-publisher obs-staging-bot workflow-pr
# Prepare topology 1
build_container:
podman build ../ -f integration/Dockerfile -t autogits_integration
# Run tests in topology 1
test_container:
podman run --rm --privileged -t --network integration_gitea-network -e GIWTF_IMAGE_SUFFIX=$(GIWTF_IMAGE_SUFFIX) autogits_integration /usr/bin/bash -c "make build && make up && sleep 25 && pytest -v tests/*"
build_local: AUTO_DETECT_MODE=.local
build_local: build
build_package: AUTO_DETECT_MODE=.package
build_package: build
# parse all service images from podman-compose and build them (topology 2)
build:
podman pull docker.io/library/rabbitmq:3.13.7-management
for i in $$(grep -A 1000 services: podman-compose.yml | grep -oE '^ [^: ]+'); do GIWTF_IMAGE_SUFFIX=$(AUTO_DETECT_MODE) podman-compose build $$i || exit 1; done
# this will spawn prebuilt containers (topology 2)
up:
podman-compose up -d
# tear down (topology 2)
down:
podman-compose down
# mode A
up-bots-package:
GIWTF_IMAGE_SUFFIX=.package podman-compose up -d
# mode B
up-bots-local:
GIWTF_IMAGE_SUFFIX=.local podman-compose up -d

View File

@@ -1 +0,0 @@
sudo rm -rf gitea-data/ gitea-logs/ rabbitmq-data/ workflow-pr-repos/

View File

@@ -1 +0,0 @@
Dockerfile.package

View File

@@ -1,15 +0,0 @@
FROM registry.suse.com/bci/bci-base:15.7
# Add the custom CA to the trust store
COPY integration/rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
RUN update-ca-certificates
RUN zypper -n in which binutils
# Copy the pre-built binary into the container
# The user will build this and place it in the same directory as this Dockerfile
COPY gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publisher /usr/local/bin/
COPY integration/gitea-events-rabbitmq-publisher/entrypoint.sh /usr/local/bin/entrypoint.sh
RUN chmod +x /usr/local/bin/entrypoint.sh
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]

View File

@@ -1,15 +0,0 @@
FROM registry.suse.com/bci/bci-base:15.7
# Add the custom CA to the trust store
COPY integration/rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
RUN update-ca-certificates
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
RUN zypper --gpg-auto-import-keys ref
RUN zypper -n in git-core curl autogits-gitea-events-rabbitmq-publisher binutils
COPY integration/gitea-events-rabbitmq-publisher/entrypoint.sh /usr/local/bin/entrypoint.sh
RUN chmod +x /usr/local/bin/entrypoint.sh
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]

View File

@@ -1,13 +0,0 @@
#!/bin/sh
set -e
exe=$(which gitea-events-rabbitmq-publisher 2>/dev/null) || :
exe=${exe:-/usr/local/bin/gitea-events-rabbitmq-publisher}
package=$(rpm -qa | grep autogits-gitea-events-rabbitmq-publisher) || :
echo "!!!!!!!!!!!!!!!! using binary $exe; installed package: $package"
which strings > /dev/null 2>&1 && strings "$exe" | grep -A 2 vcs.revision= | head -4 || :
echo "RABBITMQ_HOST: $RABBITMQ_HOST"
exec $exe "$@"

View File

@@ -1,25 +0,0 @@
FROM registry.suse.com/bci/bci-base:15.7
RUN zypper ar --repo https://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo \
&& zypper -n --gpg-auto-import-keys refresh
RUN zypper -n install \
git \
sqlite3 \
curl \
gawk \
openssh \
jq \
devel_Factory_git-workflow:gitea \
&& rm -rf /var/cache/zypp/*
# Copy the minimal set of required files from the local 'container-files' directory
COPY container-files/ /
RUN chmod -R 777 /etc/gitea/conf
# Make the setup and entrypoint scripts executable
RUN chmod +x /opt/setup/setup-gitea.sh && chmod +x /opt/setup/entrypoint.sh && chmod +x /opt/setup/setup-webhook.sh && chmod +x /opt/setup/setup-dummy-data.sh
# Use the new entrypoint script to start the container
ENTRYPOINT ["/opt/setup/entrypoint.sh"]

View File

@@ -1,42 +0,0 @@
WORK_PATH = /var/lib/gitea
[server]
CERT_FILE = /etc/gitea/https/cert.pem
KEY_FILE = /etc/gitea/https/key.pem
STATIC_ROOT_PATH = /usr/share/gitea
APP_DATA_PATH = /var/lib/gitea/data
PPROF_DATA_PATH = /var/lib/gitea/data/tmp/pprof
PROTOCOL = http
DOMAIN = gitea-test
SSH_DOMAIN = gitea-test
ROOT_URL = http://gitea-test:3000/
HTTP_PORT = 3000
DISABLE_SSH = false
START_SSH_SERVER = true
SSH_PORT = 3022
LFS_START_SERVER = true
[lfs]
PATH = /var/lib/gitea/data/lfs
[database]
DB_TYPE = sqlite3
PATH = /var/lib/gitea/data/gitea.db
[security]
INSTALL_LOCK = true
[oauth2]
ENABLED = false
[log]
ROOT_PATH = /var/log/gitea
MODE = console, file
; Either "Trace", "Debug", "Info", "Warn", "Error" or "None", default is "Info"
LEVEL = Debug
[service]
ENABLE_BASIC_AUTHENTICATION = true
[webhook]
ALLOWED_HOST_LIST = gitea-publisher

View File

@@ -1,19 +0,0 @@
#!/bin/bash
set -e
# Run setup to ensure permissions, migrations, and the admin user are ready.
# The setup script is now idempotent.
/opt/setup/setup-gitea.sh
# Start the webhook setup script in the background.
# It will wait for the main Gitea process to be ready before creating the webhook.
/opt/setup/setup-webhook.sh &
echo "Starting Gitea..."
# The original systemd service ran as user 'gitea' and group 'gitea'
# with a working directory of '/var/lib/gitea'.
# We will switch to that user and run the web command.
# Using exec means Gitea will become PID 1, allowing it to receive signals correctly.
cd /var/lib/gitea
exec su -s /bin/bash gitea -c "/usr/bin/gitea web --config /etc/gitea/conf/app.ini"

View File

@@ -1,2 +0,0 @@
#!/bin/bash
# This script is now empty as dummy data setup is handled by pytest fixtures.

View File

@@ -1,100 +0,0 @@
#!/bin/bash
set -x
set -e
# Set ownership on the volume mounts. This allows the 'gitea' user to write to them.
# We use -R to ensure all subdirectories (like /var/lib/gitea/data) are covered.
chown -R gitea:gitea /var/lib/gitea /var/log/gitea
# Set ownership on the config directory.
chown -R gitea:gitea /etc/gitea
# Run database migrations to initialize the sqlite3 db based on app.ini.
su -s /bin/bash gitea -c 'gitea migrate'
# Create a default admin user if it doesn't exist
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "admin" {found=1} END {exit !found}'; then
echo "Creating admin user..."
su -s /bin/bash gitea -c 'gitea admin user create --username admin --password opensuse --email admin@example.com --must-change-password=false --admin'
else
echo "Admin user already exists."
fi
# Generate an access token for the admin user
ADMIN_TOKEN_FILE="/var/lib/gitea/admin.token"
if [ -f "$ADMIN_TOKEN_FILE" ]; then
echo "Admin token already exists at $ADMIN_TOKEN_FILE."
else
echo "Generating admin token..."
ADMIN_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u admin -t admin-token")
if [ -n "$ADMIN_TOKEN" ]; then
printf "%s" "$ADMIN_TOKEN" > "$ADMIN_TOKEN_FILE"
chmod 777 "$ADMIN_TOKEN_FILE"
chown gitea:gitea "$ADMIN_TOKEN_FILE"
echo "Admin token generated and saved to $ADMIN_TOKEN_FILE."
else
echo "Failed to generate admin token."
fi
fi
# Generate SSH key for the admin user if it doesn't exist
SSH_KEY_DIR="/var/lib/gitea/ssh-keys"
mkdir -p "$SSH_KEY_DIR"
if [ ! -f "$SSH_KEY_DIR/id_ed25519" ]; then
echo "Generating SSH key for admin user..."
ssh-keygen -t ed25519 -N "" -f "$SSH_KEY_DIR/id_ed25519"
chown -R gitea:gitea "$SSH_KEY_DIR"
chmod 700 "$SSH_KEY_DIR"
chmod 600 "$SSH_KEY_DIR/id_ed25519"
chmod 644 "$SSH_KEY_DIR/id_ed25519.pub"
fi
# Create a autogits_obs_staging_bot user if it doesn't exist
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "autogits_obs_staging_bot" {found=1} END {exit !found}'; then
echo "Creating autogits_obs_staging_bot user..."
su -s /bin/bash gitea -c 'gitea admin user create --username autogits_obs_staging_bot --password opensuse --email autogits_obs_staging_bot@example.com --must-change-password=false'
else
echo "autogits_obs_staging_bot user already exists."
fi
# Generate an access token for the autogits_obs_staging_bot user
BOT_TOKEN_FILE="/var/lib/gitea/autogits_obs_staging_bot.token"
if [ -f "$BOT_TOKEN_FILE" ]; then
echo "autogits_obs_staging_bot token already exists at $BOT_TOKEN_FILE."
else
echo "Generating autogits_obs_staging_bot token..."
BOT_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u autogits_obs_staging_bot -t autogits_obs_staging_bot-token")
if [ -n "$BOT_TOKEN" ]; then
printf "%s" "$BOT_TOKEN" > "$BOT_TOKEN_FILE"
chmod 666 "$BOT_TOKEN_FILE"
chown gitea:gitea "$BOT_TOKEN_FILE"
echo "autogits_obs_staging_bot token generated and saved to $BOT_TOKEN_FILE."
else
echo "Failed to generate autogits_obs_staging_bot token."
fi
fi
# Create a workflow-pr user if it doesn't exist
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "workflow-pr" {found=1} END {exit !found}'; then
echo "Creating workflow-pr user..."
su -s /bin/bash gitea -c 'gitea admin user create --username workflow-pr --password opensuse --email workflow-pr@example.com --must-change-password=false'
else
echo "workflow-pr user already exists."
fi
# Generate an access token for the workflow-pr user
BOT_TOKEN_FILE="/var/lib/gitea/workflow-pr.token"
if [ -f "$BOT_TOKEN_FILE" ]; then
echo "workflow-pr token already exists at $BOT_TOKEN_FILE."
else
echo "Generating workflow-pr token..."
BOT_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u workflow-pr -t workflow-pr-token")
if [ -n "$BOT_TOKEN" ]; then
printf "%s" "$BOT_TOKEN" > "$BOT_TOKEN_FILE"
chmod 666 "$BOT_TOKEN_FILE"
chown gitea:gitea "$BOT_TOKEN_FILE"
echo "workflow-pr token generated and saved to $BOT_TOKEN_FILE."
else
echo "Failed to generate workflow-pr token."
fi
fi

View File

@@ -1,92 +0,0 @@
#!/bin/bash
set -e
GITEA_URL="http://localhost:3000"
WEBHOOK_URL="http://gitea-publisher:8002/rabbitmq-forwarder"
TOKEN_NAME="webhook-creator"
echo "Webhook setup script started in background."
# Wait 10s for the main Gitea process to start
sleep 10
# Wait for Gitea API to be ready
echo "Waiting for Gitea API at $GITEA_URL..."
while ! curl -s -f "$GITEA_URL/api/v1/version" > /dev/null; do
echo "Gitea API not up yet, waiting 5s..."
sleep 5
done
echo "Gitea API is up."
# The `gitea admin` command needs to be run as the gitea user.
# The -raw flag gives us the token directly.
echo "Generating or retrieving admin token..."
TOKEN_FILE="/var/lib/gitea/admin.token"
if [ -f "$TOKEN_FILE" ]; then
TOKEN=$(cat "$TOKEN_FILE" | tr -d '\n\r ')
echo "Admin token loaded from $TOKEN_FILE."
else
TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u admin -t $TOKEN_NAME")
if [ -n "$TOKEN" ]; then
printf "%s" "$TOKEN" > "$TOKEN_FILE"
chmod 666 "$TOKEN_FILE"
chown gitea:gitea "$TOKEN_FILE"
echo "Admin token generated and saved to $TOKEN_FILE."
fi
fi
if [ -z "$TOKEN" ]; then
echo "Failed to generate or retrieve admin token. This might be because the token already exists in Gitea but not in $TOKEN_FILE. Exiting."
exit 1
fi
# Run the dummy data setup script
/opt/setup/setup-dummy-data.sh "$GITEA_URL" "$TOKEN"
# Add SSH key via API
PUB_KEY_FILE="/var/lib/gitea/ssh-keys/id_ed25519.pub"
if [ -f "$PUB_KEY_FILE" ]; then
echo "Checking for existing SSH key 'bot-key'..."
KEYS_URL="$GITEA_URL/api/v1/admin/users/workflow-pr/keys"
EXISTING_KEYS=$(curl -s -X GET -H "Authorization: token $TOKEN" "$KEYS_URL")
if ! echo "$EXISTING_KEYS" | grep -q "\"title\":\"bot-key\""; then
echo "Registering SSH key 'bot-key' via API..."
KEY_CONTENT=$(cat "$PUB_KEY_FILE")
curl -s -X POST "$KEYS_URL" \
-H "Authorization: token $TOKEN" \
-H "Content-Type: application/json" \
-d "{
\"key\": \"$KEY_CONTENT\",
\"read_only\": false,
\"title\": \"bot-key\"
}"
echo -e "\nSSH key registered."
else
echo "SSH key 'bot-key' already registered."
fi
fi
# Check if the webhook already exists
echo "Checking for existing system webhook..."
DB_PATH="/var/lib/gitea/data/gitea.db"
EXISTS=$(su -s /bin/bash gitea -c "sqlite3 '$DB_PATH' \"SELECT 1 FROM webhook WHERE url = '$WEBHOOK_URL' AND is_system_webhook = 1 LIMIT 1;\"")
if [ "$EXISTS" = "1" ]; then
echo "System webhook for $WEBHOOK_URL already exists. Exiting."
exit 0
fi
echo "Creating Gitea system webhook for $WEBHOOK_URL via direct database INSERT..."
# The events JSON requires escaped double quotes for the sqlite3 command.
EVENTS_JSON='{\"push_only\":false,\"send_everything\":true,\"choose_events\":false,\"branch_filter\":\"*\",\"events\":{\"create\":false,\"delete\":false,\"fork\":false,\"issue_assign\":false,\"issue_comment\":false,\"issue_label\":false,\"issue_milestone\":false,\"issues\":false,\"package\":false,\"pull_request\":false,\"pull_request_assign\":false,\"pull_request_comment\":false,\"pull_request_label\":false,\"pull_request_milestone\":false,\"pull_request_review\":false,\"pull_request_review_request\":false,\"pull_request_sync\":false,\"push\":false,\"release\":false,\"repository\":false,\"status\":false,\"wiki\":false,\"workflow_job\":false,\"workflow_run\":false}}'
NOW_UNIX=$(date +%s)
INSERT_CMD="INSERT INTO webhook (repo_id, owner_id, is_system_webhook, url, http_method, content_type, events, is_active, type, meta, created_unix, updated_unix) VALUES (0, 0, 1, '$WEBHOOK_URL', 'POST', 1, '$EVENTS_JSON', 1, 'gitea', '', $NOW_UNIX, $NOW_UNIX);"
su -s /bin/bash gitea -c "sqlite3 '$DB_PATH' \"$INSERT_CMD\""
echo "System webhook created successfully."
exit 0

View File

@@ -1,14 +0,0 @@
# Use a base Python image
FROM registry.suse.com/bci/python:3.11
# Set the working directory
WORKDIR /app
# Copy the server script
COPY server.py .
# Expose the port the server will run on
EXPOSE 8080
# Command to run the server
CMD ["python3", "-u", "server.py"]

View File

@@ -1,18 +0,0 @@
<project name="openSUSE:Leap:16.0:PullRequest">
<title>Leap 16.0 PullRequest area</title>
<description>Base project to define the pull request builds</description>
<person userid="autogits_obs_staging_bot" role="maintainer"/>
<person userid="maxlin_factory" role="maintainer"/>
<group groupid="maintenance-opensuse.org" role="maintainer"/>
<debuginfo>
<enable/>
</debuginfo>
<repository name="standard">
<path project="openSUSE:Leap:16.0" repository="standard"/>
<arch>x86_64</arch>
<arch>i586</arch>
<arch>aarch64</arch>
<arch>ppc64le</arch>
<arch>s390x</arch>
</repository>
</project>

View File

@@ -1,59 +0,0 @@
<project name="openSUSE:Leap:16.0">
<title>openSUSE Leap 16.0 based on SLFO</title>
<description>Leap 16.0 based on SLES 16.0 (specifically SLFO:1.2)</description>
<link project="openSUSE:Backports:SLE-16.0"/>
<scmsync>http://gitea-test:3000/products/SLFO#main</scmsync>
<person userid="dimstar_suse" role="maintainer"/>
<person userid="lkocman-factory" role="maintainer"/>
<person userid="maxlin_factory" role="maintainer"/>
<person userid="factory-auto" role="reviewer"/>
<person userid="licensedigger" role="reviewer"/>
<group groupid="autobuild-team" role="maintainer"/>
<group groupid="factory-maintainers" role="maintainer"/>
<group groupid="maintenance-opensuse.org" role="maintainer"/>
<group groupid="factory-staging" role="reviewer"/>
<build>
<disable repository="ports"/>
</build>
<debuginfo>
<enable/>
</debuginfo>
<repository name="standard" rebuild="local">
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
<path project="SUSE:SLFO:1.2" repository="standard"/>
<arch>local</arch>
<arch>i586</arch>
<arch>x86_64</arch>
<arch>aarch64</arch>
<arch>ppc64le</arch>
<arch>s390x</arch>
</repository>
<repository name="product">
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="product" trigger="manual"/>
<path project="openSUSE:Leap:16.0:NonFree" repository="standard"/>
<path project="openSUSE:Leap:16.0" repository="images"/>
<path project="openSUSE:Leap:16.0" repository="standard"/>
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
<path project="SUSE:SLFO:1.2" repository="standard"/>
<arch>local</arch>
<arch>i586</arch>
<arch>x86_64</arch>
<arch>aarch64</arch>
<arch>ppc64le</arch>
<arch>s390x</arch>
</repository>
<repository name="ports">
<arch>armv7l</arch>
</repository>
<repository name="images">
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="images" trigger="manual"/>
<path project="openSUSE:Leap:16.0" repository="standard"/>
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
<path project="SUSE:SLFO:1.2" repository="standard"/>
<arch>i586</arch>
<arch>x86_64</arch>
<arch>aarch64</arch>
<arch>ppc64le</arch>
<arch>s390x</arch>
</repository>
</project>

View File

@@ -1,140 +0,0 @@
import http.server
import socketserver
import os
import logging
import signal
import sys
import threading
import fnmatch
PORT = 8080
RESPONSE_DIR = "/app/responses"
STATE_DIR = "/tmp/mock_obs_state"
class MockOBSHandler(http.server.SimpleHTTPRequestHandler):
def do_GET(self):
logging.info(f"GET request for: {self.path}")
path_without_query = self.path.split('?')[0]
# Check for state stored by a PUT request first
sanitized_put_path = 'PUT' + path_without_query.replace('/', '_')
state_file_path = os.path.join(STATE_DIR, sanitized_put_path)
if os.path.exists(state_file_path):
logging.info(f"Found stored PUT state for {self.path} at {state_file_path}")
self.send_response(200)
self.send_header("Content-type", "application/xml")
file_size = os.path.getsize(state_file_path)
self.send_header("Content-Length", str(file_size))
self.end_headers()
with open(state_file_path, 'rb') as f:
self.wfile.write(f.read())
return
# If no PUT state file, fall back to the glob/exact match logic
self.handle_request('GET')
def do_PUT(self):
logging.info(f"PUT request for: {self.path}")
logging.info(f"Headers: {self.headers}")
path_without_query = self.path.split('?')[0]
body = b''
if self.headers.get('Transfer-Encoding', '').lower() == 'chunked':
logging.info("Chunked transfer encoding detected")
while True:
line = self.rfile.readline().strip()
if not line:
break
chunk_length = int(line, 16)
if chunk_length == 0:
self.rfile.readline()
break
body += self.rfile.read(chunk_length)
self.rfile.read(2) # Read the trailing CRLF
else:
content_length = int(self.headers.get('Content-Length', 0))
body = self.rfile.read(content_length)
logging.info(f"Body: {body.decode('utf-8')}")
sanitized_path = 'PUT' + path_without_query.replace('/', '_')
state_file_path = os.path.join(STATE_DIR, sanitized_path)
logging.info(f"Saving state for {self.path} to {state_file_path}")
os.makedirs(os.path.dirname(state_file_path), exist_ok=True)
with open(state_file_path, 'wb') as f:
f.write(body)
self.send_response(200)
self.send_header("Content-type", "text/plain")
response_body = b"OK"
self.send_header("Content-Length", str(len(response_body)))
self.end_headers()
self.wfile.write(response_body)
def do_POST(self):
logging.info(f"POST request for: {self.path}")
self.handle_request('POST')
def do_DELETE(self):
logging.info(f"DELETE request for: {self.path}")
self.handle_request('DELETE')
def handle_request(self, method):
path_without_query = self.path.split('?')[0]
sanitized_request_path = method + path_without_query.replace('/', '_')
logging.info(f"Handling request, looking for match for: {sanitized_request_path}")
response_file = None
# Check for glob match first
if os.path.exists(RESPONSE_DIR):
for filename in os.listdir(RESPONSE_DIR):
if fnmatch.fnmatch(sanitized_request_path, filename):
response_file = os.path.join(RESPONSE_DIR, filename)
logging.info(f"Found matching response file (glob): {response_file}")
break
# Fallback to exact match if no glob match
if response_file is None:
exact_file = os.path.join(RESPONSE_DIR, sanitized_request_path)
if os.path.exists(exact_file):
response_file = exact_file
logging.info(f"Found matching response file (exact): {response_file}")
if response_file:
logging.info(f"Serving content from {response_file}")
self.send_response(200)
self.send_header("Content-type", "application/xml")
file_size = os.path.getsize(response_file)
self.send_header("Content-Length", str(file_size))
self.end_headers()
with open(response_file, 'rb') as f:
self.wfile.write(f.read())
else:
logging.info(f"Response file not found for {sanitized_request_path}. Sending 404.")
self.send_response(404)
self.send_header("Content-type", "text/plain")
body = f"Mock response not found for {sanitized_request_path}".encode('utf-8')
self.send_header("Content-Length", str(len(body)))
self.end_headers()
self.wfile.write(body)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s')
if not os.path.exists(STATE_DIR):
logging.info(f"Creating state directory: {STATE_DIR}")
os.makedirs(STATE_DIR)
if not os.path.exists(RESPONSE_DIR):
os.makedirs(RESPONSE_DIR)
with socketserver.TCPServer(("", PORT), MockOBSHandler) as httpd:
logging.info(f"Serving mock OBS API on port {PORT}")
def graceful_shutdown(sig, frame):
logging.info("Received SIGTERM, shutting down gracefully...")
threading.Thread(target=httpd.shutdown).start()
signal.signal(signal.SIGTERM, graceful_shutdown)
httpd.serve_forever()
logging.info("Server has shut down.")

View File

@@ -1 +0,0 @@
./Dockerfile.package

View File

@@ -1,18 +0,0 @@
# Use a base Python image
FROM registry.suse.com/bci/bci-base:15.7
# Install any necessary dependencies for the bot
# e.g., git, curl, etc.
RUN zypper -n in git-core curl binutils
# Copy the bot binary and its entrypoint script
COPY obs-staging-bot/obs-staging-bot /usr/local/bin/obs-staging-bot
COPY integration/obs-staging-bot/entrypoint.sh /usr/local/bin/entrypoint.sh
RUN chmod +x /usr/local/bin/entrypoint.sh
# Create a non-root user to run the bot
RUN useradd -m -u 1001 bot
USER 1001
# Set the entrypoint
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]

View File

@@ -1,19 +0,0 @@
# Use a base Python image
FROM registry.suse.com/bci/bci-base:15.7
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
RUN zypper --gpg-auto-import-keys ref
# Install any necessary dependencies for the bot
# e.g., git, curl, etc.
RUN zypper -n in git-core curl autogits-obs-staging-bot binutils
COPY integration/obs-staging-bot/entrypoint.sh /usr/local/bin/entrypoint.sh
RUN chmod +x /usr/local/bin/entrypoint.sh
# Create a non-root user to run the bot
RUN useradd -m -u 1001 bot
USER 1001
# Set the entrypoint
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]

View File

@@ -1,28 +0,0 @@
#!/bin/sh
set -e
# This script waits for the Gitea admin token to be created,
# exports it as an environment variable, and then executes the main container command.
TOKEN_FILE="/gitea-data/autogits_obs_staging_bot.token"
echo "OBS Staging Bot: Waiting for Gitea autogits_obs_staging_bot token at $TOKEN_FILE..."
while [ ! -s "$TOKEN_FILE" ]; do
sleep 2
done
export GITEA_TOKEN=$(cat "$TOKEN_FILE" | tr -d '\n\r ')
echo "OBS Staging Bot: GITEA_TOKEN exported."
# Execute the bot as the current user (root), using 'env' to pass required variables.
echo "OBS Staging Bot: Executing bot..."
exe=$(which obs-staging-bot)
exe=${exe:-/usr/local/bin/obs-staging-bot}
package=$(rpm -qa | grep autogits-obs-staging-bot) || :
echo "!!!!!!!!!!!!!!!! using binary $exe; installed package: $package"
which strings > /dev/null 2>&1 && strings "$exe" | grep -A 2 vcs.revision= | head -4 || :
exec $exe "$@"

View File

@@ -1,136 +0,0 @@
version: "3.8"
networks:
gitea-network:
driver: bridge
services:
gitea:
build: ./gitea
container_name: gitea-test
environment:
- GITEA_WORK_DIR=/var/lib/gitea
networks:
- gitea-network
ports:
# Map the HTTP and SSH ports defined in your app.ini
- "3000:3000"
- "3022:3022"
volumes:
# Persist Gitea's data (repositories, sqlite db, etc.) to a local directory
# The :z flag allows sharing between containers
- ./gitea-data:/var/lib/gitea:z
# Persist Gitea's logs to a local directory
- ./gitea-logs:/var/log/gitea:Z
restart: unless-stopped
rabbitmq:
image: rabbitmq:3.13.7-management
container_name: rabbitmq-test
healthcheck:
test: ["CMD", "rabbitmq-diagnostics", "check_running", "-q"]
interval: 30s
timeout: 30s
retries: 3
networks:
- gitea-network
ports:
# AMQP protocol port with TLS
- "5671:5671"
# HTTP management UI
- "15672:15672"
volumes:
# Persist RabbitMQ data
- ./rabbitmq-data:/var/lib/rabbitmq:Z
# Mount TLS certs
- ./rabbitmq-config/certs:/etc/rabbitmq/certs:Z
# Mount rabbitmq config
- ./rabbitmq-config/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:Z
# Mount exchange definitions
- ./rabbitmq-config/definitions.json:/etc/rabbitmq/definitions.json:Z
restart: unless-stopped
gitea-publisher:
build:
context: ..
dockerfile: integration/gitea-events-rabbitmq-publisher/Dockerfile${GIWTF_IMAGE_SUFFIX}
container_name: gitea-publisher
networks:
- gitea-network
depends_on:
gitea:
condition: service_started
rabbitmq:
condition: service_healthy
environment:
- RABBITMQ_HOST=rabbitmq-test
- RABBITMQ_USERNAME=gitea
- RABBITMQ_PASSWORD=gitea
- SSL_CERT_FILE=/usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
command: [ "-listen", "0.0.0.0:8002", "-topic-domain", "suse", "-debug" ]
restart: unless-stopped
workflow-pr:
build:
context: ..
dockerfile: integration/workflow-pr/Dockerfile${GIWTF_IMAGE_SUFFIX}
container_name: workflow-pr
networks:
- gitea-network
depends_on:
gitea:
condition: service_started
rabbitmq:
condition: service_healthy
environment:
- AMQP_USERNAME=gitea
- AMQP_PASSWORD=gitea
- SSL_CERT_FILE=/usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
volumes:
- ./gitea-data:/var/lib/gitea:ro,z
- ./workflow-pr/workflow-pr.json:/etc/workflow-pr.json:ro,z
- ./workflow-pr-repos:/var/lib/workflow-pr/repos:Z
command: [
"-check-on-start",
"-debug",
"-gitea-url", "http://gitea-test:3000",
"-url", "amqps://rabbitmq-test:5671",
"-config", "/etc/workflow-pr.json",
"-repo-path", "/var/lib/workflow-pr/repos"
]
restart: unless-stopped
mock-obs:
build: ./mock-obs
container_name: mock-obs
networks:
- gitea-network
ports:
- "8080:8080"
volumes:
- ./mock-obs/responses:/app/responses:z # Use :z for shared SELinux label
restart: unless-stopped
obs-staging-bot:
build:
context: ..
dockerfile: integration/obs-staging-bot/Dockerfile${GIWTF_IMAGE_SUFFIX}
container_name: obs-staging-bot
networks:
- gitea-network
depends_on:
gitea:
condition: service_started
mock-obs:
condition: service_started
environment:
- OBS_USER=mock
- OBS_PASSWORD=mock-long-password
volumes:
- ./gitea-data:/gitea-data:ro,z
command:
- "-debug"
- "-gitea-url=http://gitea-test:3000"
- "-obs=http://mock-obs:8080"
- "-obs-web=http://mock-obs:8080"
restart: unless-stopped

View File

@@ -1,30 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIFKzCCAxOgAwIBAgIUJsg/r0ZyIVxtAkrlZKOr4LvYEvMwDQYJKoZIhvcNAQEL
BQAwGDEWMBQGA1UEAwwNcmFiYml0bXEtdGVzdDAeFw0yNjAxMjQxMjQyMjNaFw0z
NjAxMjIxMjQyMjNaMBgxFjAUBgNVBAMMDXJhYmJpdG1xLXRlc3QwggIiMA0GCSqG
SIb3DQEBAQUAA4ICDwAwggIKAoICAQC9OjTq4DgqVo0mRpS8DGRR6SFrSpb2bqnl
YI7xSI3y67i/oP4weiZSawk2+euxhsN4FfOlsAgvpg4WyRQH5PwnXOA1Lxz51qp1
t0VumE3B1RDheiBTE8loG1FvmikOiek2gzz76nK0R1sbKY1+/NVJpMs6dL6NzJXG
N6aCpWTk7oeY+lW5bPBG0VRA7RUG80w9R9RDtqYc0SYUmm43tjjxPZ81rhCXFx/F
v1kxnNTQJdATNrTn9SofymSfm42f4loOGyGBsqJYybKXOPDxrM1erBN5eCwTpJMS
4J30aMSdQTzza2Z4wi2LR0vq/FU/ouqzlRp7+7tNJbVAsqhiUa2eeAVkFwZl9wRw
lddY0W85U507nw5M3iQv2GTOhJRXwhWpzDUFQ0fT56hAY/V+VbF1iHGAVIz4XlUj
gC21wuXz0xRdqP8cCd8UHLSbp8dmie161GeKVwO037aP+1hZJbm7ePsS5Na+qYG1
LCy0GhfQn71BsYUaGJtfRcaMwIbqaNIYn+Y6S1FVjxDPXCxFXDrIcFvldmJYTyeK
7KrkO2P1RbEiwYyPPUhthbb1Agi9ZutZsnadmPRk27t9bBjNnWaY2z17hijnzVVz
jOHuPlpb7cSaagVzLTT0zrZ+ifnZWwdl0S2ZrjBAeVrkNt7DOCUqwBnuBqYiRZFt
A1QicHxaEQIDAQABo20wazAdBgNVHQ4EFgQU3l25Ghab2k7UhwxftZ2vZ1HO9Sow
HwYDVR0jBBgwFoAU3l25Ghab2k7UhwxftZ2vZ1HO9SowDwYDVR0TAQH/BAUwAwEB
/zAYBgNVHREEETAPgg1yYWJiaXRtcS10ZXN0MA0GCSqGSIb3DQEBCwUAA4ICAQB9
ilcsRqIvnyN25Oh668YC/xxyeNTIaIxjMLyJaMylBRjNwo1WfbdpXToaEXgot5gK
5HGlu3OIBBwBryNAlBtf/usxzLzmkEsm1Dsn9sJNY1ZTkD8MO9yyOtLqBlqAsIse
oPVjzSdjk1fP3uyoG/ZUVAFZHZD3/9BEsftfS13oUVxo7vYz1DSyUATT/4QTYMQB
PytL6EKJ0dLyuy7rIkZVkaUi+P7GuDXj25Mi6Zkxaw2QnssSuoqy1bAMkzEyNFK5
0wlNWEY8H3jRZuAz1T4AXb9sjeCgBKZoWXgmGbzleOophdzvlq66UGAWPWYFGp8Q
4GJognovhKzSY9+3n+rMPLAXSao48SYDlyTOZeBo1DTluR5QjVd+NWbEdIsA6buQ
a6uPTSVKsulm7hyUlEZp+SsYAtVoZx3jzKKjZXjnaxOfUFWx6pTxNXvxR7pQ/8Ls
IfduGy4VjKVQdyuwCE7eVEPDK6d53WWs6itziuj7gfq8mHvZivIA65z05lTwqkvb
1WS2aht+zacqVSYyNrK+/kJA2CST3ggc1EO73lRvbfO9LJZWMdO+f/tkXH4zkfmL
A3JtJcLOWuv+ZrZvHMpKlBFNMySxE3IeGX+Ad9bGyhZvZULut95/QD7Xy4cPRZHF
R3SRn0rn/BeTly+5fkEoFk+ttah8IbwzhduPyPIxng==
-----END CERTIFICATE-----

View File

@@ -1,52 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQC9OjTq4DgqVo0m
RpS8DGRR6SFrSpb2bqnlYI7xSI3y67i/oP4weiZSawk2+euxhsN4FfOlsAgvpg4W
yRQH5PwnXOA1Lxz51qp1t0VumE3B1RDheiBTE8loG1FvmikOiek2gzz76nK0R1sb
KY1+/NVJpMs6dL6NzJXGN6aCpWTk7oeY+lW5bPBG0VRA7RUG80w9R9RDtqYc0SYU
mm43tjjxPZ81rhCXFx/Fv1kxnNTQJdATNrTn9SofymSfm42f4loOGyGBsqJYybKX
OPDxrM1erBN5eCwTpJMS4J30aMSdQTzza2Z4wi2LR0vq/FU/ouqzlRp7+7tNJbVA
sqhiUa2eeAVkFwZl9wRwlddY0W85U507nw5M3iQv2GTOhJRXwhWpzDUFQ0fT56hA
Y/V+VbF1iHGAVIz4XlUjgC21wuXz0xRdqP8cCd8UHLSbp8dmie161GeKVwO037aP
+1hZJbm7ePsS5Na+qYG1LCy0GhfQn71BsYUaGJtfRcaMwIbqaNIYn+Y6S1FVjxDP
XCxFXDrIcFvldmJYTyeK7KrkO2P1RbEiwYyPPUhthbb1Agi9ZutZsnadmPRk27t9
bBjNnWaY2z17hijnzVVzjOHuPlpb7cSaagVzLTT0zrZ+ifnZWwdl0S2ZrjBAeVrk
Nt7DOCUqwBnuBqYiRZFtA1QicHxaEQIDAQABAoICAA+AWvDpzNgVDouV6R3NkxNN
upXgPqUx9BuNETCtbal6i4AxR1l/zC9gwti82QTKQi2OeM74MHd8zjcqIkiyRsDP
wDNDKIfEAONTT+4LLoWEN5WNDGRZ4Nw1LrLqiVX+ULtNPXvynRJtLQa43PVL74oQ
pLBle23A1n0uNmcJ9w21B6ktysN9q+JVSCZodZpD6Jk1jus8JXgDXy/9Za2NMTV8
A5ShbYz/ETSBJCSnERz7GARW7TN6V0jS6vLTSqMQJyn0KYbHNDr7TPTL7psRuaI5
jP/cqxmx1/WKLo5k3cR3IW/cesDGQXZhMRQvNymXJkxvWMPS36lmfyZtbFNflw4Z
9OD+2RKt5jFDJjG8fYiYoYBdLiTj2Wdvo4mbRPNkTL75o65riDkDCQuZhDXFBm3s
B1aDv5y1AXrzNZ5JSikszKgbLNPYB0rI3unp6i0P1985w6dyel0MGG+ouaeiyrxS
9IgJDnE4BJ79mEzHTXtbZ/+3aGAK/Y6mU8Pz2s6/+6ccT0miievsMS+si1KESF31
WLnsMdcrJcxqcm7Ypo24G0yBJluSDKtD1cqQUGN1MKp+EEv1SCH+4csaa3ooRB0o
YveySjqxtmhVpQuY3egCOaXhPmX7lgYwoe+G4UIkUMwPn20WMg+jFxgPASdh4lqE
mzpePP7STvEZAr+rrLu1AoIBAQDmCEiKOsUTtJlX3awOIRtCkIqBxS1E6rpyjfxK
A6+zpXnE++8MhIJ07+9bPdOshGjS3JbJ+hu+IocbNg++rjRArYQnJh8/qBZ2GB2v
Ryfptsoxtk/xUsmOfchvk4tOjvDHZrJehUtGc+LzX/WUqpgtEk1Gnx7RGRuDNnqS
Q1+yU4NubHwOHPswBBXOnVtopcAHFpKhbKRFOHOwMZN99qcWVIkv4J9c6emcPMLI
I/QPIvwB6WmbLa0o3JNXlD4kPdqCgNW36KEFiW8m+4tgzF3HWYSAyIeBRFG7ouE6
yk5hiptPKhZlTmTAkQSssCXksiTw1rsspFULZSRyaaaPunvVAoIBAQDSlrKu+B2h
AJtxWy5MQDOiroqT3KDneIGXPYgH3/tiDmxy0CIEbSb5SqZ6zAmihs3dWWCmc1JH
YObRrqIxu+qVi4K+Uz8l7WBrS7DkjZjajq+y/mrZYUNRoL2q9mnNqRNan7zxWDJc
U4u2NH9P4LOz6ttE4OG9SC3/gZLoepA+ANZatu93749IT7z8ske0MVPP76jVI1Gl
D7cPIlzcBUdJgNV8UOkxeqU3+S6Jn17Tkx5qMWND/2BCN4voQ4pfGWSkbaHlMLh1
2SbVuR+HYPY3aPJeSY7MEPoc7d2SSVOcVDr2AQwSDSCCgIFZOZlawehUz9R51hK8
LlaccFWXhS9NAoIBAEFZNRJf48DXW4DErq5M5WuhmFeJZnTfohwNDhEQvwdwCQnW
8HBD7LO/veXTyKCH9SeCFyxF6z+2m181mn93Cc0d/h8JC3OQEuF1tGko88PHc+Vv
f4J1HGFohlp8NeUZYnmjSSTlBR98qIqvRhr348daHa3kYmLQmSpLfcKzdSo542qp
UwzHWuynHHLX7THrdIQO+5T0Qi6P/P2e9+GfApSra1W4oE1K/lyuPj+RRzJNo/3/
C0tUTI8BKrKEoKq3D65nX0+hvKzQAE24xD25kSKi4aucTDKC8B04BngnJOE8+SYi
NL6O6Lxz9joAyKMRoMDyn7Xs8WQNVa9TKEhImAkCggEBAMljmIm/egZIoF7thf8h
vr+rD5eL/Myf776E95wgVTVW+dtqs71r7UOmYkM48VXeeO1f1hAYZO0h/Fs2GKJb
RWGyQ1xkHBXXRsgVYJuR1kXdAqW4rNIqM8jSYdAnStOFB5849+YOJEsrEocy+TWY
fAJpbTwXm4n6hxK8BZQR8fN5tYSXQbd+/5V1vBQlInFuYuqOFPWPizrBJp1wjUFU
QvJGJON4NSo+UdaPlDPEl1jabtG7XWTfylxI5qE+RgvgKuEcfyDBUQZSntLw8Pf0
gEJJOM92pPr+mVIlICoPucfcvW4ZXkO9DgP/hLOhY8jpe5fwERBa6xvPbMC6pP/8
PFkCggEBAOLtvboBThe57QRphsKHmCtRJHmT4oZzhMYsE+5GMGYzPNWod1hSyfXn
EB8iTmAFP5r7FdC10B8mMpACXuDdi2jbmlYOTU6xNTprSKtv8r8CvorWJdsQwRsy
pZ7diSCeyi0z/sIx//ov0b3WD0E8BG/HWsFbX0p5xXpaljYEv5dK7xUiWgBW+15a
N1AeVcPiXRDwhQMVcvVOvzgwKsw+Rpls/9W4hihcBHaiMcBUDFWxJtnf4ZAGAZS3
/694MOYlmfgT/cDqF9oOsCdxM0w24kL0dcUM7zPk314ixAAfUwXaxisBhS2roJ88
HsuK9JPSK/AS0IqUtKiq4LZ9ErixYF0=
-----END PRIVATE KEY-----

View File

@@ -1,35 +0,0 @@
{
"users": [
{
"name": "gitea",
"password_hash": "5IdZmMJhNb4otX/nz9Xtmkpj9khl6+5eAmXNs/oHYwQNO3jg",
"hashing_algorithm": "rabbit_password_hashing_sha256",
"tags": "administrator"
}
],
"vhosts": [
{
"name": "/"
}
],
"permissions": [
{
"user": "gitea",
"vhost": "/",
"configure": ".*",
"write": ".*",
"read": ".*"
}
],
"exchanges": [
{
"name": "pubsub",
"vhost": "/",
"type": "topic",
"durable": true,
"auto_delete": false,
"internal": false,
"arguments": {}
}
]
}

View File

@@ -1,83 +0,0 @@
# Test Plan: workflow-pr Bot
## 1. Introduction
This document outlines the test plan for the `workflow-pr` bot. The bot is responsible for synchronizing pull requests between ProjectGit and PackageGit repositories, managing reviews, and handling merges. This test plan aims to ensure the bot's functionality, reliability, and performance.
## 2. Scope
### In Scope
* Pull Request synchronization (creation, update, closing).
* Reviewer management (adding, re-adding, mandatory vs. advisory).
* Merge management, including `ManualMergeOnly` and `ManualMergeProject` flags.
* Configuration parsing (`workflow.config`).
* Label management (`staging/Auto`, `review/Pending`, `review/Done`).
* Maintainership and permissions handling.
### Out of Scope
* Package deletion requests (planned feature).
* Underlying infrastructure (Gitea, RabbitMQ, OBS).
* Performance and load testing.
* Closing a PackageGit PR (currently disabled).
## 3. Test Objectives
* Verify that pull requests are correctly synchronized between ProjectGit and PackageGit.
* Ensure that reviewers are correctly added to pull requests based on the configuration.
* Validate that pull requests are merged only when all conditions are met.
* Confirm that the bot correctly handles various configurations in `workflow.config`.
* Verify that labels are correctly applied to pull requests.
* Ensure that maintainership and permissions are correctly enforced.
## 4. Test Strategy
The testing will be conducted in a dedicated test environment that mimics the production environment. The strategy will involve a combination of:
* **Component Testing:** Testing individual components of the bot in isolation using unit tests written in Go.
* **Integration Testing:** Testing the bot's interaction with Gitea, RabbitMQ, and a mock OBS server using `pytest`.
* **End-to-End Testing:** Testing the complete workflow from creating a pull request to merging it using `pytest`.
### Test Automation
* **Unit Tests:** Go's built-in testing framework will be used to write unit tests for individual functions and methods.
* **Integration and End-to-End Tests:** `pytest` will be used to write integration and end-to-end tests that use the Gitea API to create pull requests and verify the bot's behavior.
### Success Metrics
* **Test Coverage:** The goal is to achieve at least 80% test coverage for the bot's codebase.
* **Bug Detection Rate:** The number of bugs found during the testing phase.
* **Test Pass Rate:** The percentage of test cases that pass without any issues.
## 5. Test Cases
| Test Case ID | Description | Steps to Reproduce | Expected Results | Priority |
| :--- | :--- | :--- | :--- | :--- |
| **TC-SYNC-001** | **Create ProjectGit PR from PackageGit PR** | 1. Create a new PR in a PackageGit repository. | 1. A new PR is created in the corresponding ProjectGit repository with the title "Forwarded PRs: <package_name>".<br>2. The ProjectGit PR description contains a link to the PackageGit PR (e.g., `PR: org/package_repo!pr_number`).<br>3. The package submodule in the ProjectGit PR points to the PackageGit PR's commit. | High |
| **TC-SYNC-002** | **Update ProjectGit PR from PackageGit PR** | 1. Push a new commit to an existing PackageGit PR. | 1. The corresponding ProjectGit PR's head branch is updated with the new commit. | High |
| **TC-SYNC-003** | **WIP Flag Synchronization** | 1. Mark a PackageGit PR as "Work In Progress".<br>2. Remove the WIP flag from the PackageGit PR. | 1. The corresponding ProjectGit PR is also marked as "Work In Progress".<br>2. The WIP flag on the ProjectGit PR is removed. | Medium |
| **TC-SYNC-004** | **WIP Flag (multiple referenced package PRs)** | 1. Create a ProjectGit PR that references multiple PackageGit PRs.<br>2. Mark one of the PackageGit PRs as "Work In Progress".<br>3. Remove the "Work In Progress" flag from all PackageGit PRs. | 1. The ProjectGit PR is marked as "Work In Progress".<br>2. The "Work In Progress" flag is removed from the ProjectGit PR only after it has been removed from all associated PackageGit PRs. | Medium |
| **TC-SYNC-005** | **NoProjectGitPR = true, edits disabled** | 1. Set `NoProjectGitPR = true` in `workflow.config`.<br>2. Create a PackageGit PR without "Allow edits from maintainers" enabled. <br>3. Push a new commit to the PackageGit PR. | 1. No ProjectGit PR is created.<br>2. The bot adds a warning comment to the PackageGit PR explaining that it cannot update the PR. | High |
| **TC-SYNC-006** | **NoProjectGitPR = true, edits enabled** | 1. Set `NoProjectGitPR = true` in `workflow.config`.<br>2. Create a PackageGit PR with "Allow edits from maintainers" enabled.<br>3. Push a new commit to the PackageGit PR. | 1. No ProjectGit PR is created.<br>2. The submodule commit on the project PR is updated with the new commit from the PackageGit PR. | High |
| **TC-COMMENT-001** | **Detect duplicate comments** | 1. Create a PackageGit PR.<br>2. Wait for the `workflow-pr` bot to act on the PR.<br>3. Edit the body of the PR to trigger the bot a second time. | 1. The bot should not post a duplicate comment. | High |
| **TC-REVIEW-001** | **Add mandatory reviewers** | 1. Create a new PackageGit PR. | 1. All mandatory reviewers are added to both the PackageGit and ProjectGit PRs. | High |
| **TC-REVIEW-002** | **Add advisory reviewers** | 1. Create a new PackageGit PR with advisory reviewers defined in the configuration. | 1. Advisory reviewers are added to the PR, but their approval is not required for merging. | Medium |
| **TC-REVIEW-003** | **Re-add reviewers** | 1. Push a new commit to a PackageGit PR after it has been approved. | 1. The original reviewers are re-added to the PR. | Medium |
| **TC-REVIEW-004** | **Package PR created by a maintainer** | 1. Create a PackageGit PR from the account of a package maintainer. | 1. No review is requested from other package maintainers. | High |
| **TC-REVIEW-005** | **Package PR created by an external user (approve)** | 1. Create a PackageGit PR from the account of a user who is not a package maintainer.<br>2. One of the package maintainers approves the PR. | 1. All package maintainers are added as reviewers.<br>2. Once one maintainer approves the PR, the other maintainers are removed as reviewers. | High |
| **TC-REVIEW-006** | **Package PR created by an external user (reject)** | 1. Create a PackageGit PR from the account of a user who is not a package maintainer.<br>2. One of the package maintainers rejects the PR. | 1. All package maintainers are added as reviewers.<br>2. Once one maintainer rejects the PR, the other maintainers are removed as reviewers. | High |
| **TC-REVIEW-007** | **Package PR created by a maintainer with ReviewRequired=true** | 1. Set `ReviewRequired = true` in `workflow.config`.<br>2. Create a PackageGit PR from the account of a package maintainer. | 1. A review is requested from other package maintainers if available. | High |
| **TC-MERGE-001** | **Automatic Merge** | 1. Create a PackageGit PR.<br>2. Ensure all mandatory reviews are completed on both project and package PRs. | 1. The PR is automatically merged. | High |
| **TC-MERGE-002** | **ManualMergeOnly with Package Maintainer** | 1. Create a PackageGit PR with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the package PR from the account of a package maintainer for that package. | 1. The PR is merged. | High |
| **TC-MERGE-003** | **ManualMergeOnly with unauthorized user** | 1. Create a PackageGit PR with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the package PR from the account of a user who is not a maintainer for that package. | 1. The PR is not merged. | High |
| **TC-MERGE-004** | **ManualMergeOnly with multiple packages** | 1. Create a ProjectGit PR that references multiple PackageGit PRs with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on each package PR from the account of a package maintainer. | 1. The PR is merged only after "merge ok" is commented on all associated PackageGit PRs. | High |
| **TC-MERGE-005** | **ManualMergeOnly with Project Maintainer** | 1. Create a PackageGit PR with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the package PR from the account of a project maintainer. | 1. The PR is merged. | High |
| **TC-MERGE-006** | **ManualMergeProject with Project Maintainer** | 1. Create a PackageGit PR with `ManualMergeProject` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the project PR from the account of a project maintainer. | 1. The PR is merged. | High |
| **TC-MERGE-007** | **ManualMergeProject with unauthorized user** | 1. Create a PackageGit PR with `ManualMergeProject` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the project PR from the account of a package maintainer. | 1. The PR is not merged. | High |
| **TC-CONFIG-001** | **Invalid Configuration** | 1. Provide an invalid `workflow.config` file. | 1. The bot reports an error and does not process any PRs. | High |
| **TC-LABEL-001** | **Apply `staging/Auto` label** | 1. Create a new PackageGit PR. | 1. The `staging/Auto` label is applied to the ProjectGit PR. | High |
| **TC-LABEL-002** | **Apply `review/Pending` label** | 1. Create a new PackageGit PR. | 1. The `review/Pending` label is applied to the ProjectGit PR when there are pending reviews. | Medium |
| **TC-LABEL-003** | **Apply `review/Done` label** | 1. Ensure all mandatory reviews for a PR are completed. | 1. The `review/Done` label is applied to the ProjectGit PR when all mandatory reviews are completed. | Medium |

View File

@@ -1,78 +0,0 @@
"""
This module contains pytest fixtures for setting up the test environment.
"""
import pytest
import requests
import time
import os
# Assuming GiteaAPIClient is in tests/lib/common_test_utils.py
from tests.lib.common_test_utils import GiteaAPIClient
@pytest.fixture(scope="session")
def gitea_env():
"""
Sets up the Gitea environment with dummy data and provides a GiteaAPIClient instance.
"""
gitea_url = "http://127.0.0.1:3000"
# Read admin token
admin_token_path = "./gitea-data/admin.token" # Corrected path
admin_token = None
try:
with open(admin_token_path, "r") as f:
admin_token = f.read().strip()
except FileNotFoundError:
raise Exception(f"Admin token file not found at {admin_token_path}. Ensure it's generated and accessible.")
# Headers for authenticated requests
auth_headers = {"Authorization": f"token {admin_token}", "Content-Type": "application/json"}
# Wait for Gitea to be available
print(f"Waiting for Gitea at {gitea_url}...")
max_retries = 30
for i in range(max_retries):
try:
# Check a specific API endpoint that indicates readiness
response = requests.get(f"{gitea_url}/api/v1/version", headers=auth_headers, timeout=5)
if response.status_code == 200:
print("Gitea API is available.")
break
except requests.exceptions.ConnectionError:
pass
print(f"Gitea not ready ({response.status_code if 'response' in locals() else 'ConnectionError'}), retrying in 5 seconds... ({i+1}/{max_retries})")
time.sleep(5)
else:
raise Exception("Gitea did not become available within the expected time.")
client = GiteaAPIClient(base_url=gitea_url, token=admin_token)
# Setup dummy data
print("--- Starting Gitea Dummy Data Setup from Pytest Fixture ---")
client.create_org("products")
client.create_org("pool")
client.create_repo("products", "SLFO")
client.create_repo("pool", "pkgA")
client.create_repo("pool", "pkgB")
# The add_submodules method also creates workflow.config and staging.config
client.add_submodules("products", "SLFO")
client.add_collaborator("products", "SLFO", "autogits_obs_staging_bot", "write")
client.add_collaborator("products", "SLFO", "workflow-pr", "write")
client.add_collaborator("pool", "pkgA", "workflow-pr", "write")
client.add_collaborator("pool", "pkgB", "workflow-pr", "write")
client.update_repo_settings("products", "SLFO")
client.update_repo_settings("pool", "pkgA")
client.update_repo_settings("pool", "pkgB")
print("--- Gitea Dummy Data Setup Complete ---")
time.sleep(5) # Add a small delay for Gitea to fully process changes
yield client
# Teardown (optional, depending on test strategy)
# For now, we'll leave resources for inspection. If a clean slate is needed for each test,
# this fixture's scope would be 'function' and teardown logic would be added here.

View File

@@ -1,23 +0,0 @@
<resultlist state="0fef640bfb56c3e76fcfb698b19b59c0">
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="aarch64" code="unpublished" state="unpublished">
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
<status package="openjpeg2" code="succeeded"/>
</result>
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="ppc64le" code="unpublished" state="unpublished">
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
<status package="openjpeg2" code="succeeded"/>
</result>
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="x86_64" code="unpublished" state="unpublished">
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
<status package="openjpeg2" code="succeeded"/>
</result>
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="s390x" code="unpublished" state="unpublished">
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
<status package="openjpeg2" code="succeeded"/>
</result>
</resultlist>

View File

@@ -1,18 +0,0 @@
<project name="openSUSE:Leap:16.0:PullRequest">
<title>Leap 16.0 PullRequest area</title>
<description>Base project to define the pull request builds</description>
<person userid="autogits_obs_staging_bot" role="maintainer"/>
<person userid="maxlin_factory" role="maintainer"/>
<group groupid="maintenance-opensuse.org" role="maintainer"/>
<debuginfo>
<enable/>
</debuginfo>
<repository name="standard">
<path project="openSUSE:Leap:16.0" repository="standard"/>
<arch>x86_64</arch>
<arch>i586</arch>
<arch>aarch64</arch>
<arch>ppc64le</arch>
<arch>s390x</arch>
</repository>
</project>

View File

@@ -1,59 +0,0 @@
<project name="openSUSE:Leap:16.0">
<title>openSUSE Leap 16.0 based on SLFO</title>
<description>Leap 16.0 based on SLES 16.0 (specifically SLFO:1.2)</description>
<link project="openSUSE:Backports:SLE-16.0"/>
<scmsync>http://gitea-test:3000/products/SLFO#main</scmsync>
<person userid="dimstar_suse" role="maintainer"/>
<person userid="lkocman-factory" role="maintainer"/>
<person userid="maxlin_factory" role="maintainer"/>
<person userid="factory-auto" role="reviewer"/>
<person userid="licensedigger" role="reviewer"/>
<group groupid="autobuild-team" role="maintainer"/>
<group groupid="factory-maintainers" role="maintainer"/>
<group groupid="maintenance-opensuse.org" role="maintainer"/>
<group groupid="factory-staging" role="reviewer"/>
<build>
<disable repository="ports"/>
</build>
<debuginfo>
<enable/>
</debuginfo>
<repository name="standard" rebuild="local">
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
<path project="SUSE:SLFO:1.2" repository="standard"/>
<arch>local</arch>
<arch>i586</arch>
<arch>x86_64</arch>
<arch>aarch64</arch>
<arch>ppc64le</arch>
<arch>s390x</arch>
</repository>
<repository name="product">
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="product" trigger="manual"/>
<path project="openSUSE:Leap:16.0:NonFree" repository="standard"/>
<path project="openSUSE:Leap:16.0" repository="images"/>
<path project="openSUSE:Leap:16.0" repository="standard"/>
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
<path project="SUSE:SLFO:1.2" repository="standard"/>
<arch>local</arch>
<arch>i586</arch>
<arch>x86_64</arch>
<arch>aarch64</arch>
<arch>ppc64le</arch>
<arch>s390x</arch>
</repository>
<repository name="ports">
<arch>armv7l</arch>
</repository>
<repository name="images">
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="images" trigger="manual"/>
<path project="openSUSE:Leap:16.0" repository="standard"/>
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
<path project="SUSE:SLFO:1.2" repository="standard"/>
<arch>i586</arch>
<arch>x86_64</arch>
<arch>aarch64</arch>
<arch>ppc64le</arch>
<arch>s390x</arch>
</repository>
</project>

View File

@@ -1,301 +0,0 @@
import os
import time
import pytest
import requests
import json
import xml.etree.ElementTree as ET
from pathlib import Path
import base64
TEST_DATA_DIR = Path(__file__).parent.parent / "data"
BUILD_RESULT_TEMPLATE = TEST_DATA_DIR / "build_result.xml.template"
MOCK_RESPONSES_DIR = Path(__file__).parent.parent.parent / "mock-obs" / "responses"
MOCK_BUILD_RESULT_FILE = (
MOCK_RESPONSES_DIR / "GET_build_openSUSE:Leap:16.0:PullRequest:*__result"
)
MOCK_BUILD_RESULT_FILE1 = MOCK_RESPONSES_DIR / "GET_build_openSUSE:Leap:16.0__result"
@pytest.fixture
def mock_build_result():
"""
Fixture to create a mock build result file from the template.
Returns a factory function that the test can call with parameters.
"""
def _create_result_file(package_name: str, code: str):
tree = ET.parse(BUILD_RESULT_TEMPLATE)
root = tree.getroot()
for status_tag in root.findall(".//status"):
status_tag.set("package", package_name)
status_tag.set("code", code)
MOCK_RESPONSES_DIR.mkdir(exist_ok=True)
tree.write(MOCK_BUILD_RESULT_FILE)
tree.write(MOCK_BUILD_RESULT_FILE1)
return str(MOCK_BUILD_RESULT_FILE)
yield _create_result_file
if MOCK_BUILD_RESULT_FILE.exists():
MOCK_BUILD_RESULT_FILE.unlink()
MOCK_BUILD_RESULT_FILE1.unlink()
class GiteaAPIClient:
def __init__(self, base_url, token):
self.base_url = base_url
self.headers = {"Authorization": f"token {token}", "Content-Type": "application/json"}
def _request(self, method, path, **kwargs):
url = f"{self.base_url}/api/v1/{path}"
response = requests.request(method, url, headers=self.headers, **kwargs)
try:
response.raise_for_status()
except requests.exceptions.HTTPError as e:
print(f"HTTPError in _request: {e}")
print(f"Response Content: {e.response.text}")
raise
return response
def create_org(self, org_name):
print(f"--- Checking organization: {org_name} ---")
try:
self._request("GET", f"orgs/{org_name}")
print(f"Organization '{org_name}' already exists.")
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
print(f"Creating organization '{org_name}'...")
data = {"username": org_name, "full_name": org_name}
self._request("POST", "orgs", json=data)
print(f"Organization '{org_name}' created.")
else:
raise
def create_repo(self, org_name, repo_name):
print(f"--- Checking repository: {org_name}/{repo_name} ---")
try:
self._request("GET", f"repos/{org_name}/{repo_name}")
print(f"Repository '{org_name}/{repo_name}' already exists.")
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
print(f"Creating repository '{org_name}/{repo_name}'...")
data = {
"name": repo_name,
"auto_init": True,
"default_branch": "main",
"gitignores": "Go",
"license": "MIT",
"private": False,
"readme": "Default"
}
self._request("POST", f"orgs/{org_name}/repos", json=data)
print(f"Repository '{org_name}/{repo_name}' created with a README.")
time.sleep(1) # Added delay to allow Git operations to become available
else:
raise
def add_collaborator(self, org_name, repo_name, collaborator_name, permission="write"):
print(f"--- Adding {collaborator_name} as a collaborator to {org_name}/{repo_name} with '{permission}' permission ---")
data = {"permission": permission}
# Gitea API returns 204 No Content on success and doesn't fail if already present.
self._request("PUT", f"repos/{org_name}/{repo_name}/collaborators/{collaborator_name}", json=data)
print(f"Attempted to add {collaborator_name} to {org_name}/{repo_name}.")
def add_submodules(self, org_name, repo_name):
print(f"--- Adding submodules to {org_name}/{repo_name} using diffpatch ---")
parent_repo_path = f"repos/{org_name}/{repo_name}"
try:
self._request("GET", f"{parent_repo_path}/contents/.gitmodules")
print("Submodules appear to be already added. Skipping.")
return
except requests.exceptions.HTTPError as e:
if e.response.status_code != 404:
raise
# Get latest commit SHAs for the submodules
pkg_a_sha = self._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
pkg_b_sha = self._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
if not pkg_a_sha or not pkg_b_sha:
raise Exception("Error: Could not get submodule commit SHAs. Cannot apply patch.")
diff_content = f"""diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 0000000..f1838bd
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,6 @@
+[submodule "pkgA"]
+ path = pkgA
+ url = ../../pool/pkgA.git
+[submodule "pkgB"]
+ path = pkgB
+ url = ../../pool/pkgB.git
diff --git a/pkgA b/pkgA
new file mode 160000
index 0000000..{pkg_a_sha}
--- /dev/null
+++ b/pkgA
@@ -0,0 +1 @@
+Subproject commit {pkg_a_sha}
diff --git a/pkgB b/pkgB
new file mode 160000
index 0000000..{pkg_b_sha}
--- /dev/null
+++ b/pkgB
@@ -0,0 +1 @@
+Subproject commit {pkg_b_sha}
diff --git a/workflow.config b/workflow.config
new file mode 100644
--- /dev/null
+++ b/workflow.config
@@ -0,0 +7 @@
+{{
+ "Workflows": ["pr"],
+ "GitProjectName": "products/SLFO#main",
+ "Organization": "pool",
+ "Branch": "main",
+ "ManualMergeProject": true,
+ "Reviewers": [ "-autogits_obs_staging_bot" ]
+}}
diff --git a/staging.config b/staging.config
new file mode 100644
--- /dev/null
+++ b/staging.config
@@ -0,0 +3 @@
+{{
+ "ObsProject": "openSUSE:Leap:16.0",
+ "StagingProject": "openSUSE:Leap:16.0:PullRequest"
+}}
"""
message = "Add pkgA and pkgB as submodules and config files"
data = {
"branch": "main",
"content": diff_content,
"message": message
}
print(f"Applying submodule patch to {org_name}/{repo_name}...")
self._request("POST", f"{parent_repo_path}/diffpatch", json=data)
print("Submodule patch applied.")
def update_repo_settings(self, org_name, repo_name):
print(f"--- Updating repository settings for: {org_name}/{repo_name} ---")
repo_data = self._request("GET", f"repos/{org_name}/{repo_name}").json()
# Ensure these are boolean values, not string
repo_data["allow_manual_merge"] = True
repo_data["autodetect_manual_merge"] = True
self._request("PATCH", f"repos/{org_name}/{repo_name}", json=repo_data)
print(f"Repository settings for '{org_name}/{repo_name}' updated.")
def create_gitea_pr(self, repo_full_name: str, diff_content: str, title: str):
owner, repo = repo_full_name.split("/")
url = f"repos/{owner}/{repo}/pulls"
base_branch = "main"
# Create a new branch for the PR
new_branch_name = f"pr-branch-{int(time.time())}"
# Get the latest commit SHA of the base branch
base_commit_sha = self._request("GET", f"repos/{owner}/{repo}/branches/{base_branch}").json()["commit"]["id"]
# Create the new branch
self._request("POST", f"repos/{owner}/{repo}/branches", json={
"new_branch_name": new_branch_name,
"old_ref": base_commit_sha # Use the commit SHA directly
})
# Create a new file or modify an existing one in the new branch
file_path = f"test-file-{int(time.time())}.txt"
file_content = "This is a test file for the PR."
self._request("POST", f"repos/{owner}/{repo}/contents/{file_path}", json={
"content": base64.b64encode(file_content.encode('utf-8')).decode('ascii'),
"message": "Add test file",
"branch": new_branch_name
})
# Now create the PR
data = {
"head": new_branch_name, # Use the newly created branch as head
"base": base_branch,
"title": title,
"body": "Test Pull Request"
}
response = self._request("POST", url, json=data)
return response.json()
def modify_gitea_pr(self, repo_full_name: str, pr_number: int, diff_content: str, message: str):
owner, repo = repo_full_name.split("/")
# Get PR details to find the head branch
pr_details = self._request("GET", f"repos/{owner}/{repo}/pulls/{pr_number}").json()
head_branch = pr_details["head"]["ref"]
file_path = f"modified-file-{int(time.time())}.txt"
file_content = "This is a modified test file for the PR."
self._request("POST", f"repos/{owner}/{repo}/contents/{file_path}", json={
"content": base64.b64encode(file_content.encode('utf-8')).decode('ascii'),
"message": message,
"branch": head_branch
})
def update_gitea_pr_properties(self, repo_full_name: str, pr_number: int, **kwargs):
owner, repo = repo_full_name.split("/")
url = f"repos/{owner}/{repo}/pulls/{pr_number}"
response = self._request("PATCH", url, json=kwargs)
return response.json()
def get_timeline_events(self, repo_full_name: str, pr_number: int):
owner, repo = repo_full_name.split("/")
url = f"repos/{owner}/{repo}/issues/{pr_number}/timeline"
# Retry logic for timeline events
for i in range(10): # Try up to 10 times
try:
response = self._request("GET", url)
timeline_events = response.json()
if timeline_events: # Check if timeline_events list is not empty
return timeline_events
print(f"Attempt {i+1}: Timeline for PR {pr_number} is empty. Retrying in 3 seconds...")
time.sleep(3)
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
print(f"Attempt {i+1}: Timeline for PR {pr_number} not found yet. Retrying in 3 seconds...")
time.sleep(3)
else:
raise # Re-raise other HTTP errors
raise Exception(f"Failed to retrieve timeline for PR {pr_number} after multiple retries.")
def get_comments(self, repo_full_name: str, pr_number: int):
owner, repo = repo_full_name.split("/")
url = f"repos/{owner}/{repo}/issues/{pr_number}/comments"
# Retry logic for comments
for i in range(10): # Try up to 10 times
try:
response = self._request("GET", url)
comments = response.json()
print(f"Attempt {i+1}: Comments for PR {pr_number} received: {comments}") # Added debug print
if comments: # Check if comments list is not empty
return comments
print(f"Attempt {i+1}: Comments for PR {pr_number} are empty. Retrying in 3 seconds...")
time.sleep(3)
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
print(f"Attempt {i+1}: Comments for PR {pr_number} not found yet. Retrying in 3 seconds...")
time.sleep(3)
else:
raise # Re-raise other HTTP errors
raise Exception(f"Failed to retrieve comments for PR {pr_number} after multiple retries.")
def get_pr_details(self, repo_full_name: str, pr_number: int):
owner, repo = repo_full_name.split("/")
url = f"repos/{owner}/{repo}/pulls/{pr_number}"
response = self._request("GET", url)
return response.json()

View File

@@ -1,153 +0,0 @@
import pytest
import re
import time
import subprocess
import requests
from pathlib import Path
from tests.lib.common_test_utils import (
GiteaAPIClient,
mock_build_result,
)
# =============================================================================
# TEST CASES
# =============================================================================
def test_pr_workflow_succeeded(gitea_env, mock_build_result):
"""End-to-end test for a successful PR workflow."""
diff = "diff --git a/test.txt b/test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR - should succeed")
initial_pr_number = pr["number"]
compose_dir = Path(__file__).parent.parent
forwarded_pr_number = None
print(
f"Polling pool/pkgA PR #{initial_pr_number} timeline for forwarded PR event..."
)
for _ in range(20):
time.sleep(1)
timeline_events = gitea_env.get_timeline_events("pool/pkgA", initial_pr_number)
for event in timeline_events:
if event.get("type") == "pull_ref":
if not (ref_issue := event.get("ref_issue")):
continue
url_to_check = ref_issue.get("html_url", "")
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
if match:
forwarded_pr_number = match.group(1)
break
if forwarded_pr_number:
break
assert (
forwarded_pr_number is not None
), "Workflow bot did not create a pull_ref event on the timeline."
print(f"Found forwarded PR: products/SLFO #{forwarded_pr_number}")
print(f"Polling products/SLFO PR #{forwarded_pr_number} for reviewer assignment...")
reviewer_added = False
for _ in range(15):
time.sleep(1)
pr_details = gitea_env.get_pr_details("products/SLFO", forwarded_pr_number)
if any(
r.get("login") == "autogits_obs_staging_bot"
for r in pr_details.get("requested_reviewers", [])
):
reviewer_added = True
break
assert reviewer_added, "Staging bot was not added as a reviewer."
print("Staging bot has been added as a reviewer.")
mock_build_result(package_name="pkgA", code="succeeded")
print("Restarting obs-staging-bot...")
subprocess.run(
["podman-compose", "restart", "obs-staging-bot"],
cwd=compose_dir,
check=True,
capture_output=True,
)
print(f"Polling products/SLFO PR #{forwarded_pr_number} for final status...")
status_comment_found = False
for _ in range(20):
time.sleep(1)
timeline_events = gitea_env.get_timeline_events("products/SLFO", forwarded_pr_number)
for event in timeline_events:
print(event.get("body", "not a body"))
if event.get("body") and "successful" in event["body"]:
status_comment_found = True
break
if status_comment_found:
break
assert status_comment_found, "Staging bot did not post a 'successful' comment."
def test_pr_workflow_failed(gitea_env, mock_build_result):
"""End-to-end test for a failed PR workflow."""
diff = "diff --git a/another_test.txt b/another_test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR - should fail")
initial_pr_number = pr["number"]
compose_dir = Path(__file__).parent.parent
forwarded_pr_number = None
print(
f"Polling pool/pkgA PR #{initial_pr_number} timeline for forwarded PR event..."
)
for _ in range(20):
time.sleep(1)
timeline_events = gitea_env.get_timeline_events("pool/pkgA", initial_pr_number)
for event in timeline_events:
if event.get("type") == "pull_ref":
if not (ref_issue := event.get("ref_issue")):
continue
url_to_check = ref_issue.get("html_url", "")
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
if match:
forwarded_pr_number = match.group(1)
break
if forwarded_pr_number:
break
assert (
forwarded_pr_number is not None
), "Workflow bot did not create a pull_ref event on the timeline."
print(f"Found forwarded PR: products/SLFO #{forwarded_pr_number}")
print(f"Polling products/SLFO PR #{forwarded_pr_number} for reviewer assignment...")
reviewer_added = False
for _ in range(15):
time.sleep(1)
pr_details = gitea_env.get_pr_details("products/SLFO", forwarded_pr_number)
if any(
r.get("login") == "autogits_obs_staging_bot"
for r in pr_details.get("requested_reviewers", [])
):
reviewer_added = True
break
assert reviewer_added, "Staging bot was not added as a reviewer."
print("Staging bot has been added as a reviewer.")
mock_build_result(package_name="pkgA", code="failed")
print("Restarting obs-staging-bot...")
subprocess.run(
["podman-compose", "restart", "obs-staging-bot"],
cwd=compose_dir,
check=True,
capture_output=True,
)
print(f"Polling products/SLFO PR #{forwarded_pr_number} for final status...")
status_comment_found = False
for _ in range(20):
time.sleep(1)
timeline_events = gitea_env.get_timeline_events("products/SLFO", forwarded_pr_number)
for event in timeline_events:
if event.get("body") and "failed" in event["body"]:
status_comment_found = True
break
if status_comment_found:
break
assert status_comment_found, "Staging bot did not post a 'failed' comment."

View File

@@ -1,117 +0,0 @@
import pytest
import re
import time
import subprocess
import requests
from pathlib import Path
from tests.lib.common_test_utils import (
GiteaAPIClient,
)
# =============================================================================
# TEST CASES
# =============================================================================
pytest.pr = None
pytest.pr_details = None
pytest.initial_pr_number = None
pytest.forwarded_pr_number = None
@pytest.mark.dependency()
def test_001_project_pr(gitea_env):
"""Forwarded PR correct title"""
diff = "diff --git a/another_test.txt b/another_test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
pytest.pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR")
pytest.initial_pr_number = pytest.pr["number"]
time.sleep(5) # Give Gitea some time to process the PR and make the timeline available
compose_dir = Path(__file__).parent.parent
pytest.forwarded_pr_number = None
print(
f"Polling pool/pkgA PR #{pytest.initial_pr_number} timeline for forwarded PR event..."
)
# Instead of polling timeline, check if forwarded PR exists directly
for _ in range(20):
time.sleep(1)
timeline_events = gitea_env.get_timeline_events("pool/pkgA", pytest.initial_pr_number)
for event in timeline_events:
if event.get("type") == "pull_ref":
if not (ref_issue := event.get("ref_issue")):
continue
url_to_check = ref_issue.get("html_url", "")
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
if match:
pytest.forwarded_pr_number = match.group(1)
break
if pytest.forwarded_pr_number:
break
assert (
pytest.forwarded_pr_number is not None
), "Workflow bot did not create a forwarded PR."
pytest.pr_details = gitea_env.get_pr_details("products/SLFO", pytest.forwarded_pr_number)
assert (
pytest.pr_details["title"] == "Forwarded PRs: pkgA"
), "Forwarded PR correct title"
@pytest.mark.dependency(depends=["test_001_project_pr"])
def test_002_updated_project_pr(gitea_env):
"""Forwarded PR head is updated"""
diff = "diff --git a/another_test.txt b/another_test.txt\nnew file mode 100444\nindex 0000000..e69de21\n"
gitea_env.modify_gitea_pr("pool/pkgA", pytest.initial_pr_number, diff, "Tweaks")
sha_old = pytest.pr_details["head"]["sha"]
sha_changed = False
for _ in range(20):
time.sleep(1)
new_pr_details = gitea_env.get_pr_details("products/SLFO", pytest.forwarded_pr_number)
sha_new = new_pr_details["head"]["sha"]
if sha_new != sha_old:
print(f"Sha changed from {sha_old} to {sha_new}")
sha_changed = True
break
assert sha_changed, "Forwarded PR has sha updated"
@pytest.mark.dependency(depends=["test_001_project_pr"])
def test_003_wip(gitea_env):
"""WIP flag set for PR"""
# 1. set WIP flag in PR f"pool/pkgA#{pytest.initial_pr_number}"
initial_pr_details = gitea_env.get_pr_details("pool/pkgA", pytest.initial_pr_number)
wip_title = "WIP: " + initial_pr_details["title"]
gitea_env.update_gitea_pr_properties("pool/pkgA", pytest.initial_pr_number, title=wip_title)
# 2. in loop check whether WIP flag is set for PR f"products/SLFO #{pytest.forwarded_pr_number}"
wip_flag_set = False
for _ in range(20):
time.sleep(1)
forwarded_pr_details = gitea_env.get_pr_details(
"products/SLFO", pytest.forwarded_pr_number
)
if "WIP: " in forwarded_pr_details["title"]:
wip_flag_set = True
break
assert wip_flag_set, "WIP flag was not set in the forwarded PR."
# Remove WIP flag from PR f"pool/pkgA#{pytest.initial_pr_number}"
initial_pr_details = gitea_env.get_pr_details("pool/pkgA", pytest.initial_pr_number)
non_wip_title = initial_pr_details["title"].replace("WIP: ", "")
gitea_env.update_gitea_pr_properties(
"pool/pkgA", pytest.initial_pr_number, title=non_wip_title
)
# In loop check whether WIP flag is removed for PR f"products/SLFO #{pytest.forwarded_pr_number}"
wip_flag_removed = False
for _ in range(20):
time.sleep(1)
forwarded_pr_details = gitea_env.get_pr_details(
"products/SLFO", pytest.forwarded_pr_number
)
if "WIP: " not in forwarded_pr_details["title"]:
wip_flag_removed = True
break
assert wip_flag_removed, "WIP flag was not removed from the forwarded PR."

View File

@@ -1 +0,0 @@
Dockerfile.package

View File

@@ -1,17 +0,0 @@
# Use the same base image as the Gitea container
FROM registry.suse.com/bci/bci-base:15.7
# Add the custom CA to the trust store
COPY integration/rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
RUN update-ca-certificates
# Install git and ssh
RUN zypper -n in git-core openssh-clients binutils
# Copy the pre-built binary into the container
COPY workflow-pr/workflow-pr /usr/local/bin/workflow-pr
COPY integration/workflow-pr/entrypoint.sh /usr/local/bin/entrypoint.sh
RUN chmod +4755 /usr/local/bin/entrypoint.sh
# Set the entrypoint for the container
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]

View File

@@ -1,18 +0,0 @@
# Use the same base image as the Gitea container
FROM registry.suse.com/bci/bci-base:15.7
# Add the custom CA to the trust store
COPY rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
RUN update-ca-certificates
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
RUN zypper --gpg-auto-import-keys ref
# Install git and ssh
RUN zypper -n in git-core openssh-clients autogits-workflow-pr binutils
COPY integration/workflow-pr/entrypoint.sh /usr/local/bin/entrypoint.sh
RUN chmod +4755 /usr/local/bin/entrypoint.sh
# Set the entrypoint for the container
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]

View File

@@ -1,66 +0,0 @@
#!/bin/bash
TOKEN_FILE="/var/lib/gitea/workflow-pr.token"
# Wait for the token file to be created by the gitea setup script
echo "Waiting for $TOKEN_FILE..."
while [ ! -s "$TOKEN_FILE" ]; do
sleep 2
done
# Read token and trim whitespace/newlines
GITEA_TOKEN=$(cat "$TOKEN_FILE" | tr -d '\n\r ' )
if [ -z "$GITEA_TOKEN" ]; then
echo "Error: Token file $TOKEN_FILE is empty after trimming."
exit 1
fi
export GITEA_TOKEN
echo "GITEA_TOKEN exported (length: ${#GITEA_TOKEN})"
# Wait for the dummy data to be created by the gitea setup script
echo "Waiting for workflow.config in products/SLFO..."
API_URL="http://gitea-test:3000/api/v1/repos/products/SLFO/contents/workflow.config"
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token $GITEA_TOKEN" "$API_URL")
while [ "$HTTP_STATUS" != "200" ]; do
echo "workflow.config not found yet (HTTP Status: $HTTP_STATUS). Retrying in 5s..."
sleep 5
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token $GITEA_TOKEN" "$API_URL")
done
# Wait for the shared SSH key to be generated by the gitea setup script
echo "Waiting for /var/lib/gitea/ssh-keys/id_ed25519..."
while [ ! -f /var/lib/gitea/ssh-keys/id_ed25519 ]; do
sleep 2
done
export AUTOGITS_IDENTITY_FILE="/root/.ssh/id_ed25519"
# Pre-populate known_hosts with Gitea's SSH host key
echo "Preparing SSH environment in /root/.ssh..."
mkdir -p /root/.ssh
chmod 700 /root/.ssh
# Copy the private key to the standard location and set permissions
cp /var/lib/gitea/ssh-keys/id_ed25519 /root/.ssh/id_ed25519
chmod 600 /root/.ssh/id_ed25519
echo "Scanning Gitea SSH host key..."
# We try multiple times because Gitea might still be starting its SSH server
for i in {1..10}; do
ssh-keyscan -p 3022 gitea-test >> /root/.ssh/known_hosts 2>/dev/null && break
echo "Retrying ssh-keyscan in 2s..."
sleep 2
done
chmod 644 /root/.ssh/known_hosts
exe=$(which workflow-pr)
exe=${exe:-/usr/local/bin/workflow-pr}
package=$(rpm -qa | grep autogits-workflow-pr) || :
echo "!!!!!!!!!!!!!!!! using binary $exe; installed package: $package"
which strings > /dev/null 2>&1 && strings "$exe" | grep -A 2 vcs.revision= | head -4 || :
exec "$exe" "$@"

View File

@@ -1,3 +0,0 @@
[
"products/SLFO#main"
]

View File

@@ -857,7 +857,6 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
l := len(stagingConfig.ObsProject)
if l >= len(stagingConfig.StagingProject) || stagingConfig.ObsProject != stagingConfig.StagingProject[0:l] {
common.LogError("StagingProject (", stagingConfig.StagingProject, ") is not child of target project", stagingConfig.ObsProject)
return true, nil
}
}

View File

@@ -6,17 +6,13 @@ After=network-online.target
Type=exec
ExecStart=/usr/bin/workflow-direct
EnvironmentFile=-/etc/default/%i/workflow-direct.env
#DynamicUser=yes
DynamicUser=yes
NoNewPrivileges=yes
ProtectSystem=strict
# DynamicUser does not work as we cannot seem to be able to put SSH keyfiles into the temp home that are readable by SSH
# Also, systemd override is needed away to assign User to run this. This should be dependent per instance.
ProtectHome=no
PrivateTmp=yes
# RuntimeDirectory=%i
RuntimeDirectory=%i
# SLES 15 doesn't have HOME set for dynamic users, so we improvise
# BindReadOnlyPaths=/etc/default/%i/known_hosts:/etc/ssh/ssh_known_hosts /etc/default/%i/config.json:%t/%i/config.json /etc/default/%i/id_ed25519 /etc/default/%i/id_ed25519.pub
# WorkingDirectory=%t/%i
BindReadOnlyPaths=/etc/default/%i/known_hosts:/etc/ssh/ssh_known_hosts /etc/default/%i/config.json:%t/%i/config.json /etc/default/%i/id_ed25519 /etc/default/%i/id_ed25519.pub
WorkingDirectory=%t/%i
[Install]
WantedBy=multi-user.target

View File

@@ -1,23 +0,0 @@
[Unit]
Description=WorkflowPR git bot for %i
After=network-online.target
[Service]
Type=exec
ExecStart=/usr/bin/workflow-pr
EnvironmentFile=-/etc/default/%i/workflow-pr.env
#DynamicUser=yes
NoNewPrivileges=yes
ProtectSystem=strict
# DynamicUser does not work as we cannot seem to be able to put SSH keyfiles into the temp home that are readable by SSH
# Also, systemd override is needed away to assign User to run this. This should be dependent per instance.
ProtectHome=no
PrivateTmp=yes
# RuntimeDirectory=%i
# SLES 15 doesn't have HOME set for dynamic users, so we improvise
# BindReadOnlyPaths=/etc/default/%i/known_hosts:/etc/ssh/ssh_known_hosts /etc/default/%i/config.json:%t/%i/config.json /etc/default/%i/id_ed25519 /etc/default/%i/id_ed25519.pub
# WorkingDirectory=%t/%i
[Install]
WantedBy=multi-user.target

View File

@@ -123,7 +123,9 @@ func processConfiguredRepositoryAction(action *common.RepositoryWebhookEvent, co
common.LogError(" - ", action.Repository.Name, "repo is not sha256. Ignoring.")
return
}
common.PanicOnError(git.GitExec(gitPrj, "submodule", "--quiet", "add", "--force", "--depth", "1", action.Repository.Clone_Url, action.Repository.Name))
relpath, err := common.RelativeRepositoryPath(prjGitRepo.CloneURL, action.Repository.Clone_Url)
common.PanicOnError(err)
common.PanicOnError(git.GitExec(gitPrj, "submodule", "--quiet", "add", "--force", "--depth", "1", relpath, action.Repository.Name))
defer git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
branch := strings.TrimSpace(git.GitExecWithOutputOrPanic(path.Join(gitPrj, action.Repository.Name), "branch", "--show-current"))
@@ -420,7 +422,9 @@ next_repo:
}
// add repository to git project
common.PanicOnError(git.GitExec(gitPrj, "submodule", "--quiet", "add", "--force", "--depth", "1", r.CloneURL, r.Name))
relpath, err := common.RelativeRepositoryPath(repo.CloneURL, r.CloneURL)
common.PanicOnError(err)
common.PanicOnError(git.GitExec(gitPrj, "submodule", "--quiet", "add", "--force", "--depth", "1", relpath, r.Name))
curBranch := strings.TrimSpace(git.GitExecWithOutputOrPanic(path.Join(gitPrj, r.Name), "branch", "--show-current"))
if branch != curBranch {

View File

@@ -94,19 +94,6 @@ Package Deletion Requests
If you wish to re-add a package, create a new PrjGit PR which adds again the submodule on the branch that has the "-removed" suffix. The bot will automatically remove this suffix from the project branch in the pool.
Merge Modes
-----------
| Merge Mode | Description
|------------|--------------------------------------------------------------------------------
| ff-only | Only allow --ff-only merges in the package branch. This is best suited for
| | devel projects and openSUSE Tumbleweed development, where history should be linear
| replace | Merge is done via `-X theirs` strategy and old files are removed in the merge.
| | This works well for downstream codestreams, like Leap, that would update their branch
| | using latest version.
| devel | No merge, just set the project branch to PR HEAD. This is suitable for downstream
| | projects like Leap during development cycle, where keeping maintenance history is not important
Labels
------

View File

@@ -44,35 +44,19 @@ var CurrentUser *models.User
var GitHandler common.GitHandlerGenerator
var Gitea common.Gitea
func getEnvOverrideString(env, def string) string {
if envValue := os.Getenv(env); len(envValue) != 0 {
return envValue
}
return def
}
func getEnvOverrideBool(env string, def bool) bool {
if envValue := os.Getenv(env); len(envValue) != 0 {
if value, err := strconv.Atoi(envValue); err == nil && value > 0 {
return true
}
}
return def
}
func main() {
flag.StringVar(&GitAuthor, "git-author", "AutoGits PR Review Bot", "Git commit author")
flag.StringVar(&GitEmail, "git-email", "amajer+devel-git@suse.de", "Git commit email")
workflowConfig := flag.String("config", getEnvOverrideString("AUTOGITS_CONFIG", ""), "Repository and workflow definition file")
workflowConfig := flag.String("config", "", "Repository and workflow definition file")
giteaUrl := flag.String("gitea-url", "https://src.opensuse.org", "Gitea instance")
rabbitUrl := flag.String("url", "amqps://rabbit.opensuse.org", "URL for RabbitMQ instance")
debugMode := flag.Bool("debug", getEnvOverrideBool("AUTOGITS_DEBUG", false), "Extra debugging information")
checkOnStart := flag.Bool("check-on-start", getEnvOverrideBool("AUTOGITS_CHECK_ON_START", false), "Check all repositories for consistency on start, without delays")
debugMode := flag.Bool("debug", false, "Extra debugging information")
checkOnStart := flag.Bool("check-on-start", false, "Check all repositories for consistency on start, without delays")
checkIntervalHours := flag.Float64("check-interval", 5, "Check interval (+-random delay) for repositories for consitency, in hours")
flag.BoolVar(&ListPROnly, "list-prs-only", false, "Only lists PRs without acting on them")
flag.Int64Var(&PRID, "id", -1, "Process only the specific ID and ignore the rest. Use for debugging")
basePath := flag.String("repo-path", getEnvOverrideString("AUTOGITS_REPO_PATH", ""), "Repository path. Default is temporary directory")
basePath := flag.String("repo-path", "", "Repository path. Default is temporary directory")
pr := flag.String("only-pr", "", "Only specific PR to process. For debugging")
flag.BoolVar(&common.IsDryRun, "dry", false, "Dry mode. Do not push changes to remote repo.")
flag.Parse()

View File

@@ -406,12 +406,6 @@ func (pr *PRProcessor) Process(req *models.PullRequest) error {
}
common.LogInfo("fetched PRSet of size:", len(prset.PRs))
if !prset.PrepareForMerge(git) {
common.LogError("PRs are NOT mergeable.")
} else {
common.LogInfo("PRs are in mergeable state.")
}
prjGitPRbranch := prGitBranchNameForPR(prRepo, prNo)
prjGitPR, err := prset.GetPrjGitPR()
if err == common.PRSet_PrjGitMissing {