24 Commits

Author SHA256 Message Date
91d22f7eea staging: add tests for idempotency and label changes
All checks were successful
go-generate-check / go-generate-check (pull_request) Successful in 33s
We do not want duplicate comments. And if we do have label changes,
new comments should be added.
2026-02-24 18:22:08 +01:00
913b8c8a4b staging: Match previous message format
All checks were successful
go-generate-check / go-generate-check (pull_request) Successful in 28s
Match changes in older message format. That is,

    Build is started in https://host/project/show/SUSE:SLFO:2.2:PullRequest:2162 .

    Additional QA builds:
    https://host/project/show/SUSE:SLFO:2.2:PullRequest:2162:SLES
    https://host/project/show/SUSE:SLFO:2.2:PullRequest:2162:SL-Micro

Add unit test to verify this exact format.
2026-02-24 12:23:35 +01:00
e1825dc658 staging: CommentPROnce everywhere
All checks were successful
go-generate-check / go-generate-check (pull_request) Successful in 32s
This replaces last usage of gitea.AddComment() where we do not
check if the comment already exists.
2026-02-23 19:16:40 +01:00
59965e7b5c staging: comment once on PRs using timeline
We need to comment once on PRs and verify using issue timeline
that only one comment is present

Furthermore, staging and secondary QA links should be present
in a single comment as tooling already expects this format.
2026-02-23 19:05:44 +01:00
24a4a592a7 staging: add PollWorkNotifications coverage 2026-02-23 16:01:47 +01:00
d3d9d66797 staging: add tests on commentOnPackagePR 2026-02-23 15:48:38 +01:00
7a2f7a6ee7 staging: test default projectgit repo 2026-02-23 15:44:45 +01:00
34a3a4795b staging: increase coverage of PulllRequest processing 2026-02-23 15:39:02 +01:00
bb5daebdfa staging: return correct error
Don't clobber our error before returning it
2026-02-23 15:37:41 +01:00
70bba5e239 staging: improve CreateQASubProject unit coverage 2026-02-23 15:11:30 +01:00
5793391586 staging: add core logic unit tests 2026-02-23 15:05:51 +01:00
d923db3f87 staging: tests for Notification and Review handling 2026-02-23 14:47:51 +01:00
fc4547f9a9 tests: sanitize check 2026-02-23 14:44:17 +01:00
6fa57fc4d4 staging: Fix logic error
We need to report only once all building is finished, and not partial
results. Partial results are not yet finalized, so we can only
report that build is still in progress.

Add unit tests to cover these scenarios
2026-02-23 14:33:51 +01:00
82d4e2ed5d staging: mock interface setup 2026-02-23 14:17:53 +01:00
8920644792 staging: Use interfaces allowing dependency injection
This includes also a few formatting changes
2026-02-23 14:10:10 +01:00
06772ca662 common: Add ObsClientInterface
This allows for dependency injection for future unit tests.
2026-02-23 13:43:23 +01:00
643e0d2522 Merge branch 'main' of src.opensuse.org:git-workflow/autogits
All checks were successful
go-generate-check / go-generate-check (push) Successful in 24s
2026-02-22 14:32:54 +01:00
603e5c67e7 pr: obsolete -url in favour of -rabbit-url
-url is too generic, so it's obsoleted. We document that in
the REAMDE.md

Also documented the config parameter with examples.

Expose more parameters in the env
2026-02-22 14:30:39 +01:00
17b67b8133 tests: Add unit tests for some utility functions 2026-02-22 13:11:51 +01:00
9bc290af01 systemd: daemons need env variables
These are not optional parameters for instanciated daemon so
they should not be treated as such
2026-02-22 13:10:58 +01:00
c16d1f9940 pr: move env fetch functions to common code
We can use these functions in all daemon.
2026-02-22 13:09:47 +01:00
Andrii Nikitin
3e1b3c5c84 Add some merge and review tests 2026-02-20 15:16:33 +01:00
Andrii Nikitin
fc4899b75a t: more workflow-pr sync integration tests 2026-02-20 15:16:33 +01:00
23 changed files with 3865 additions and 198 deletions

View File

@@ -83,3 +83,260 @@ func (c *MockObsStatusFetcherWithStateBuildStatusWithStateCall) DoAndReturn(f fu
c.Call = c.Call.DoAndReturn(f)
return c
}
// MockObsClientInterface is a mock of ObsClientInterface interface.
type MockObsClientInterface struct {
ctrl *gomock.Controller
recorder *MockObsClientInterfaceMockRecorder
isgomock struct{}
}
// MockObsClientInterfaceMockRecorder is the mock recorder for MockObsClientInterface.
type MockObsClientInterfaceMockRecorder struct {
mock *MockObsClientInterface
}
// NewMockObsClientInterface creates a new mock instance.
func NewMockObsClientInterface(ctrl *gomock.Controller) *MockObsClientInterface {
mock := &MockObsClientInterface{ctrl: ctrl}
mock.recorder = &MockObsClientInterfaceMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockObsClientInterface) EXPECT() *MockObsClientInterfaceMockRecorder {
return m.recorder
}
// BuildStatus mocks base method.
func (m *MockObsClientInterface) BuildStatus(project string, packages ...string) (*common.BuildResultList, error) {
m.ctrl.T.Helper()
varargs := []any{project}
for _, a := range packages {
varargs = append(varargs, a)
}
ret := m.ctrl.Call(m, "BuildStatus", varargs...)
ret0, _ := ret[0].(*common.BuildResultList)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// BuildStatus indicates an expected call of BuildStatus.
func (mr *MockObsClientInterfaceMockRecorder) BuildStatus(project any, packages ...any) *MockObsClientInterfaceBuildStatusCall {
mr.mock.ctrl.T.Helper()
varargs := append([]any{project}, packages...)
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BuildStatus", reflect.TypeOf((*MockObsClientInterface)(nil).BuildStatus), varargs...)
return &MockObsClientInterfaceBuildStatusCall{Call: call}
}
// MockObsClientInterfaceBuildStatusCall wrap *gomock.Call
type MockObsClientInterfaceBuildStatusCall struct {
*gomock.Call
}
// Return rewrite *gomock.Call.Return
func (c *MockObsClientInterfaceBuildStatusCall) Return(arg0 *common.BuildResultList, arg1 error) *MockObsClientInterfaceBuildStatusCall {
c.Call = c.Call.Return(arg0, arg1)
return c
}
// Do rewrite *gomock.Call.Do
func (c *MockObsClientInterfaceBuildStatusCall) Do(f func(string, ...string) (*common.BuildResultList, error)) *MockObsClientInterfaceBuildStatusCall {
c.Call = c.Call.Do(f)
return c
}
// DoAndReturn rewrite *gomock.Call.DoAndReturn
func (c *MockObsClientInterfaceBuildStatusCall) DoAndReturn(f func(string, ...string) (*common.BuildResultList, error)) *MockObsClientInterfaceBuildStatusCall {
c.Call = c.Call.DoAndReturn(f)
return c
}
// DeleteProject mocks base method.
func (m *MockObsClientInterface) DeleteProject(project string) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "DeleteProject", project)
ret0, _ := ret[0].(error)
return ret0
}
// DeleteProject indicates an expected call of DeleteProject.
func (mr *MockObsClientInterfaceMockRecorder) DeleteProject(project any) *MockObsClientInterfaceDeleteProjectCall {
mr.mock.ctrl.T.Helper()
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteProject", reflect.TypeOf((*MockObsClientInterface)(nil).DeleteProject), project)
return &MockObsClientInterfaceDeleteProjectCall{Call: call}
}
// MockObsClientInterfaceDeleteProjectCall wrap *gomock.Call
type MockObsClientInterfaceDeleteProjectCall struct {
*gomock.Call
}
// Return rewrite *gomock.Call.Return
func (c *MockObsClientInterfaceDeleteProjectCall) Return(arg0 error) *MockObsClientInterfaceDeleteProjectCall {
c.Call = c.Call.Return(arg0)
return c
}
// Do rewrite *gomock.Call.Do
func (c *MockObsClientInterfaceDeleteProjectCall) Do(f func(string) error) *MockObsClientInterfaceDeleteProjectCall {
c.Call = c.Call.Do(f)
return c
}
// DoAndReturn rewrite *gomock.Call.DoAndReturn
func (c *MockObsClientInterfaceDeleteProjectCall) DoAndReturn(f func(string) error) *MockObsClientInterfaceDeleteProjectCall {
c.Call = c.Call.DoAndReturn(f)
return c
}
// GetHomeProject mocks base method.
func (m *MockObsClientInterface) GetHomeProject() string {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetHomeProject")
ret0, _ := ret[0].(string)
return ret0
}
// GetHomeProject indicates an expected call of GetHomeProject.
func (mr *MockObsClientInterfaceMockRecorder) GetHomeProject() *MockObsClientInterfaceGetHomeProjectCall {
mr.mock.ctrl.T.Helper()
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHomeProject", reflect.TypeOf((*MockObsClientInterface)(nil).GetHomeProject))
return &MockObsClientInterfaceGetHomeProjectCall{Call: call}
}
// MockObsClientInterfaceGetHomeProjectCall wrap *gomock.Call
type MockObsClientInterfaceGetHomeProjectCall struct {
*gomock.Call
}
// Return rewrite *gomock.Call.Return
func (c *MockObsClientInterfaceGetHomeProjectCall) Return(arg0 string) *MockObsClientInterfaceGetHomeProjectCall {
c.Call = c.Call.Return(arg0)
return c
}
// Do rewrite *gomock.Call.Do
func (c *MockObsClientInterfaceGetHomeProjectCall) Do(f func() string) *MockObsClientInterfaceGetHomeProjectCall {
c.Call = c.Call.Do(f)
return c
}
// DoAndReturn rewrite *gomock.Call.DoAndReturn
func (c *MockObsClientInterfaceGetHomeProjectCall) DoAndReturn(f func() string) *MockObsClientInterfaceGetHomeProjectCall {
c.Call = c.Call.DoAndReturn(f)
return c
}
// GetProjectMeta mocks base method.
func (m *MockObsClientInterface) GetProjectMeta(project string) (*common.ProjectMeta, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetProjectMeta", project)
ret0, _ := ret[0].(*common.ProjectMeta)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetProjectMeta indicates an expected call of GetProjectMeta.
func (mr *MockObsClientInterfaceMockRecorder) GetProjectMeta(project any) *MockObsClientInterfaceGetProjectMetaCall {
mr.mock.ctrl.T.Helper()
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectMeta", reflect.TypeOf((*MockObsClientInterface)(nil).GetProjectMeta), project)
return &MockObsClientInterfaceGetProjectMetaCall{Call: call}
}
// MockObsClientInterfaceGetProjectMetaCall wrap *gomock.Call
type MockObsClientInterfaceGetProjectMetaCall struct {
*gomock.Call
}
// Return rewrite *gomock.Call.Return
func (c *MockObsClientInterfaceGetProjectMetaCall) Return(arg0 *common.ProjectMeta, arg1 error) *MockObsClientInterfaceGetProjectMetaCall {
c.Call = c.Call.Return(arg0, arg1)
return c
}
// Do rewrite *gomock.Call.Do
func (c *MockObsClientInterfaceGetProjectMetaCall) Do(f func(string) (*common.ProjectMeta, error)) *MockObsClientInterfaceGetProjectMetaCall {
c.Call = c.Call.Do(f)
return c
}
// DoAndReturn rewrite *gomock.Call.DoAndReturn
func (c *MockObsClientInterfaceGetProjectMetaCall) DoAndReturn(f func(string) (*common.ProjectMeta, error)) *MockObsClientInterfaceGetProjectMetaCall {
c.Call = c.Call.DoAndReturn(f)
return c
}
// SetHomeProject mocks base method.
func (m *MockObsClientInterface) SetHomeProject(project string) {
m.ctrl.T.Helper()
m.ctrl.Call(m, "SetHomeProject", project)
}
// SetHomeProject indicates an expected call of SetHomeProject.
func (mr *MockObsClientInterfaceMockRecorder) SetHomeProject(project any) *MockObsClientInterfaceSetHomeProjectCall {
mr.mock.ctrl.T.Helper()
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetHomeProject", reflect.TypeOf((*MockObsClientInterface)(nil).SetHomeProject), project)
return &MockObsClientInterfaceSetHomeProjectCall{Call: call}
}
// MockObsClientInterfaceSetHomeProjectCall wrap *gomock.Call
type MockObsClientInterfaceSetHomeProjectCall struct {
*gomock.Call
}
// Return rewrite *gomock.Call.Return
func (c *MockObsClientInterfaceSetHomeProjectCall) Return() *MockObsClientInterfaceSetHomeProjectCall {
c.Call = c.Call.Return()
return c
}
// Do rewrite *gomock.Call.Do
func (c *MockObsClientInterfaceSetHomeProjectCall) Do(f func(string)) *MockObsClientInterfaceSetHomeProjectCall {
c.Call = c.Call.Do(f)
return c
}
// DoAndReturn rewrite *gomock.Call.DoAndReturn
func (c *MockObsClientInterfaceSetHomeProjectCall) DoAndReturn(f func(string)) *MockObsClientInterfaceSetHomeProjectCall {
c.Call = c.Call.DoAndReturn(f)
return c
}
// SetProjectMeta mocks base method.
func (m *MockObsClientInterface) SetProjectMeta(meta *common.ProjectMeta) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "SetProjectMeta", meta)
ret0, _ := ret[0].(error)
return ret0
}
// SetProjectMeta indicates an expected call of SetProjectMeta.
func (mr *MockObsClientInterfaceMockRecorder) SetProjectMeta(meta any) *MockObsClientInterfaceSetProjectMetaCall {
mr.mock.ctrl.T.Helper()
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetProjectMeta", reflect.TypeOf((*MockObsClientInterface)(nil).SetProjectMeta), meta)
return &MockObsClientInterfaceSetProjectMetaCall{Call: call}
}
// MockObsClientInterfaceSetProjectMetaCall wrap *gomock.Call
type MockObsClientInterfaceSetProjectMetaCall struct {
*gomock.Call
}
// Return rewrite *gomock.Call.Return
func (c *MockObsClientInterfaceSetProjectMetaCall) Return(arg0 error) *MockObsClientInterfaceSetProjectMetaCall {
c.Call = c.Call.Return(arg0)
return c
}
// Do rewrite *gomock.Call.Do
func (c *MockObsClientInterfaceSetProjectMetaCall) Do(f func(*common.ProjectMeta) error) *MockObsClientInterfaceSetProjectMetaCall {
c.Call = c.Call.Do(f)
return c
}
// DoAndReturn rewrite *gomock.Call.DoAndReturn
func (c *MockObsClientInterfaceSetProjectMetaCall) DoAndReturn(f func(*common.ProjectMeta) error) *MockObsClientInterfaceSetProjectMetaCall {
c.Call = c.Call.DoAndReturn(f)
return c
}

View File

@@ -46,6 +46,15 @@ type ObsStatusFetcherWithState interface {
BuildStatusWithState(project string, opts *BuildResultOptions, packages ...string) (*BuildResultList, error)
}
type ObsClientInterface interface {
GetProjectMeta(project string) (*ProjectMeta, error)
SetProjectMeta(meta *ProjectMeta) error
DeleteProject(project string) error
BuildStatus(project string, packages ...string) (*BuildResultList, error)
GetHomeProject() string
SetHomeProject(project string)
}
type ObsClient struct {
baseUrl *url.URL
client *http.Client
@@ -57,6 +66,14 @@ type ObsClient struct {
HomeProject string
}
func (c *ObsClient) GetHomeProject() string {
return c.HomeProject
}
func (c *ObsClient) SetHomeProject(project string) {
c.HomeProject = project
}
func NewObsClient(host string) (*ObsClient, error) {
baseUrl, err := url.Parse(host)
if err != nil {

View File

@@ -26,6 +26,7 @@ import (
"net/url"
"regexp"
"slices"
"strconv"
"strings"
"unicode"
@@ -286,3 +287,30 @@ func TrimRemovedBranchSuffix(branchName string) string {
return branchName
}
func GetEnvOverrideString(envValue, def string) string {
if len(envValue) != 0 {
return envValue
}
return def
}
func GetEnvOverrideBool(envValue string, def bool) bool {
if len(envValue) == 0 {
return def
}
if value, err := strconv.Atoi(envValue); err == nil {
if value > 0 {
return true
}
return false
}
envValue = strings.TrimSpace(strings.ToLower(envValue))
switch envValue {
case "t", "true", "yes", "y", "on":
return true
}
return false
}

View File

@@ -222,6 +222,60 @@ func TestRemovedBranchName(t *testing.T) {
}
}
func TestSplitStringNoEmpty(t *testing.T) {
tests := []struct {
name string
input string
sep string
expected []string
}{
{"Empty string", "", ",", []string{}},
{"Only separators", ",,,", ",", []string{}},
{"Spaces and separators", " , , ", ",", []string{}},
{"Normal split", "a,b,c", ",", []string{"a", "b", "c"}},
{"Leading/trailing spaces", " a , b ", ",", []string{"a", "b"}},
{"Multiple separators", "a,,b", ",", []string{"a", "b"}},
{"Newlines", "line1\n\nline2", "\n", []string{"line1", "line2"}},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
res := common.SplitStringNoEmpty(test.input, test.sep)
if !reflect.DeepEqual(res, test.expected) {
t.Errorf("SplitStringNoEmpty(%q, %q) = %v; want %v", test.input, test.sep, res, test.expected)
}
})
}
}
func TestTranslateHttpsToSshUrl(t *testing.T) {
tests := []struct {
name string
input string
expected string
err bool
}{
{"Opensuse HTTPS", "https://src.opensuse.org/org/repo", "ssh://gitea@src.opensuse.org/org/repo", false},
{"Suse HTTPS", "https://src.suse.de/org/repo", "ssh://gitea@src.suse.de/org/repo", false},
{"Already SSH", "ssh://gitea@src.opensuse.org/org/repo", "ssh://gitea@src.opensuse.org/org/repo", false},
{"Native SSH", "gitea@src.opensuse.org:org/repo", "gitea@src.opensuse.org:org/repo", false},
{"Unknown URL", "https://github.com/org/repo", "", true},
{"Empty URL", "", "", true},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
res, err := common.TranslateHttpsToSshUrl(test.input)
if (err != nil) != test.err {
t.Errorf("TranslateHttpsToSshUrl(%q) error = %v; want error %v", test.input, err, test.err)
}
if res != test.expected {
t.Errorf("TranslateHttpsToSshUrl(%q) = %q; want %q", test.input, res, test.expected)
}
})
}
}
func TestNewPackageIssueParsing(t *testing.T) {
tests := []struct {
name string
@@ -241,7 +295,7 @@ func TestNewPackageIssueParsing(t *testing.T) {
},
},
{
name: "Default branch and junk lines and approval for maintainership",
name: "Default branch and junk lines and approval for maintainership",
input: "\n\nsome comments\n\norg1/repo2\n\nmaintainership: yes",
issues: &common.NewRepos{
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
@@ -251,7 +305,7 @@ func TestNewPackageIssueParsing(t *testing.T) {
},
},
{
name: "Default branch and junk lines and no maintainership",
name: "Default branch and junk lines and no maintainership",
input: "\n\nsome comments\n\norg1/repo2\n\nmaintainership: NEVER",
issues: &common.NewRepos{
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
@@ -260,7 +314,7 @@ func TestNewPackageIssueParsing(t *testing.T) {
},
},
{
name: "3 repos with comments and maintainership",
name: "3 repos with comments and maintainership",
input: "\n\nsome comments for org1/repo2 are here and more\n\norg1/repo2#master\n org2/repo3#master\n some/repo3#m\nMaintainer ok",
issues: &common.NewRepos{
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
@@ -272,11 +326,11 @@ func TestNewPackageIssueParsing(t *testing.T) {
},
},
{
name: "Invalid repos with spaces",
name: "Invalid repos with spaces",
input: "or g/repo#branch\norg/r epo#branch\norg/repo#br anch\norg/repo#branch As foo ++",
},
{
name: "Valid repos with spaces",
name: "Valid repos with spaces",
input: " org / repo # branch",
issues: &common.NewRepos{
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
@@ -285,7 +339,7 @@ func TestNewPackageIssueParsing(t *testing.T) {
},
},
{
name: "Package name is not repo name",
name: "Package name is not repo name",
input: " org / repo # branch as repo++ \nmaintainer true",
issues: &common.NewRepos{
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
@@ -305,3 +359,58 @@ func TestNewPackageIssueParsing(t *testing.T) {
})
}
}
func TestGetEnvOverride(t *testing.T) {
t.Run("GetEnvOverrideString", func(t *testing.T) {
tests := []struct {
envValue string
def string
expected string
}{
{"", "default", "default"},
{"override", "default", "override"},
}
for _, test := range tests {
if res := common.GetEnvOverrideString(test.envValue, test.def); res != test.expected {
t.Errorf("GetEnvOverrideString(%q, %q) = %q; want %q", test.envValue, test.def, res, test.expected)
}
}
})
t.Run("GetEnvOverrideBool", func(t *testing.T) {
tests := []struct {
name string
envValue string
def bool
expected bool
}{
{"Empty env value, default false", "", false, false},
{"Empty env value, default true", "", true, true},
{"Env '1', default false", "1", false, true},
{"Env '2', default false", "2", false, true},
{"Env '0', default false", "0", false, false},
{"Env 'invalid', default true", "abc", true, false},
{"Env 'true', default false", "true", false, true},
{"Env 'YES', default false", "YES", false, true},
{"Env '0', default true", "0", true, false},
{"Env 'false', default true", "false", true, false},
{"Env 'FALSE', default true", "FALSE", true, false},
{"Env ' true ', default false", " true ", false, true},
{"Env 'no', default true", "no", true, false},
{"Env 'NO', default true", "NO", true, false},
{"Env 'off', default true", "off", true, false},
{"Env 'on', default false", "on", false, true},
{"Env 'invalid', default false", "tbc", false, false},
{"Env 'garbage', default false", "!@#$", false, false},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
if res := common.GetEnvOverrideBool(test.envValue, test.def); res != test.expected {
t.Errorf("GetEnvOverrideBool(%q, %v) = %v; want %v", test.envValue, test.def, res, test.expected)
}
})
}
})
}

View File

@@ -16,10 +16,10 @@
# 3. 'pytest -v tests/*' - run tests
# 4. 'make down' - once the containers are not needed
# B2: (make sure the go binaries in the parent folder are built)
# 4. 'make build_local' - prepared images (recommended, otherwise there might be surprises if image fails to build during `make up`)
# 5. 'make up' - spawns podman-compose
# 6. 'pytest -v tests/*' - run tests
# 7. 'make down' - once the containers are not needed
# 1. 'make build_local' - prepared images (recommended, otherwise there might be surprises if image fails to build during `make up`)
# 2. 'make up' - spawns podman-compose
# 3. 'pytest -v tests/*' - run tests
# 4. 'make down' - once the containers are not needed
AUTO_DETECT_MODE := $(shell if test -e ../workflow-pr/workflow-pr; then echo .local; else echo .package; fi)

57
integration/Makefile.txt Normal file
View File

@@ -0,0 +1,57 @@
+-------------------------------------------------------------------------------------------------+
| Makefile Targets |
+-------------------------------------------------------------------------------------------------+
| |
| [Default Test Workflow] |
| test (Auto-detects mode: .local or .package) |
| > build_container |
| > test_container |
| |
| [Specific Test Workflows - Topology 1: Privileged Container] |
| test_package (Mode A1: Bots from official packages) |
| > build_container |
| > test_container |
| |
| test_local (Mode B1: Bots from local binaries) |
| > build_container |
| > test_container |
| |
| build_container |
| - Action: Builds the `autogits_integration` privileged container image. |
| - Purpose: Prepares an environment for running tests within a single container. |
| |
| test_container |
| - Action: Runs `autogits_integration` container, executes `make build`, `make up`, and |
| `pytest -v tests/*` inside it. |
| - Purpose: Executes the full test suite in Topology 1 (privileged container). |
| |
| [Build & Orchestration Workflows - Topology 2: podman-compose] |
| |
| build_package (Mode A: Builds service images from official packages) |
| > build |
| |
| build_local (Mode B: Builds service images from local binaries) |
| > build |
| |
| build |
| - Action: Pulls `rabbitmq` image and iterates through `podman-compose.yml` services |
| to build each one. |
| - Purpose: Prepares all necessary service images for Topology 2 deployment. |
| |
| up |
| - Action: Starts all services defined in `podman-compose.yml` in detached mode. |
| - Purpose: Deploys the application topology (containers) for testing or development. |
| |
| down |
| - Action: Stops and removes all services started by `up`. |
| - Purpose: Cleans up the deployed application topology. |
| |
| up-bots-package (Mode A: Spawns Topology 2 with official package bots) |
| - Action: Calls `podman-compose up -d` with `GIWTF_IMAGE_SUFFIX=.package`. |
| - Purpose: Specifically brings up the environment using official package bots. |
| |
| up-bots-local (Mode B: Spawns Topology 2 with local binaries) |
| - Action: Calls `podman-compose up -d` with `GIWTF_IMAGE_SUFFIX=.local`. |
| - Purpose: Specifically brings up the environment using local binaries. |
| |
+-------------------------------------------------------------------------------------------------+

View File

@@ -0,0 +1,77 @@
+-------------------------------------------------------------------------------------------------+
| Podman-Compose Services Diagram |
+-------------------------------------------------------------------------------------------------+
| |
| [Network] |
| gitea-network (Bridge network for inter-service communication) |
| |
|-------------------------------------------------------------------------------------------------|
| |
| [Service: gitea] |
| Description: Self-hosted Git service, central hub for repositories and code management. |
| Container Name: gitea-test |
| Image: Built from ./gitea Dockerfile |
| Ports: 3000 (HTTP), 3022 (SSH) |
| Volumes: ./gitea-data (for persistent data), ./gitea-logs (for logs) |
| Network: gitea-network |
| |
|-------------------------------------------------------------------------------------------------|
| |
| [Service: rabbitmq] |
| Description: Message broker for asynchronous communication between services. |
| Container Name: rabbitmq-test |
| Image: rabbitmq:3.13.7-management |
| Ports: 5671 (AMQP), 15672 (Management UI) |
| Volumes: ./rabbitmq-data (for persistent data), ./rabbitmq-config/certs (TLS certs), |
| ./rabbitmq-config/rabbitmq.conf (config), ./rabbitmq-config/definitions.json (exchanges)|
| Healthcheck: Ensures RabbitMQ is running and healthy. |
| Network: gitea-network |
| |
|-------------------------------------------------------------------------------------------------|
| |
| [Service: gitea-publisher] |
| Description: Publishes events from Gitea to the RabbitMQ message queue. |
| Container Name: gitea-publisher |
| Image: Built from ../gitea-events-rabbitmq-publisher/Dockerfile (local/package) |
| Dependencies: gitea (started), rabbitmq (healthy) |
| Environment: RABBITMQ_HOST, RABBITMQ_USERNAME, RABBITMQ_PASSWORD, SSL_CERT_FILE |
| Command: Listens for Gitea events, publishes to 'suse' topic, debug enabled. |
| Network: gitea-network |
| |
|-------------------------------------------------------------------------------------------------|
| |
| [Service: workflow-pr] |
| Description: Manages pull request workflows, likely consuming events from RabbitMQ and |
| interacting with Gitea. |
| Container Name: workflow-pr |
| Image: Built from ../workflow-pr/Dockerfile (local/package) |
| Dependencies: gitea (started), rabbitmq (healthy) |
| Environment: AMQP_USERNAME, AMQP_PASSWORD, SSL_CERT_FILE |
| Volumes: ./gitea-data (read-only), ./workflow-pr/workflow-pr.json (config), |
| ./workflow-pr-repos (for repositories) |
| Command: Configures Gitea/RabbitMQ URLs, enables debug, manages repositories. |
| Network: gitea-network |
| |
|-------------------------------------------------------------------------------------------------|
| |
| [Service: mock-obs] |
| Description: A mock (simulated) service for the Open Build Service (OBS) for testing. |
| Container Name: mock-obs |
| Image: Built from ./mock-obs Dockerfile |
| Ports: 8080 |
| Volumes: ./mock-obs/responses (for mock API responses) |
| Network: gitea-network |
| |
|-------------------------------------------------------------------------------------------------|
| |
| [Service: obs-staging-bot] |
| Description: A bot that interacts with Gitea and the mock OBS, likely for staging processes. |
| Container Name: obs-staging-bot |
| Image: Built from ../obs-staging-bot/Dockerfile (local/package) |
| Dependencies: gitea (started), mock-obs (started) |
| Environment: OBS_USER, OBS_PASSWORD |
| Volumes: ./gitea-data (read-only) |
| Command: Configures Gitea/OBS URLs, enables debug. |
| Network: gitea-network |
| |
+-------------------------------------------------------------------------------------------------+

10
integration/pytest.ini Normal file
View File

@@ -0,0 +1,10 @@
[pytest]
markers =
t001: Test case 001
t002: Test case 002
t003: Test case 003
t004: Test case 004
t005: Test case 005
t006: Test case 006
t007: Test case 007
dependency: pytest-dependency marker

View File

@@ -31,7 +31,7 @@ def gitea_env():
# Wait for Gitea to be available
print(f"Waiting for Gitea at {gitea_url}...")
max_retries = 30
max_retries = 5
for i in range(max_retries):
try:
# Check a specific API endpoint that indicates readiness
@@ -41,8 +41,8 @@ def gitea_env():
break
except requests.exceptions.ConnectionError:
pass
print(f"Gitea not ready ({response.status_code if 'response' in locals() else 'ConnectionError'}), retrying in 5 seconds... ({i+1}/{max_retries})")
time.sleep(5)
print(f"Gitea not ready ({response.status_code if 'response' in locals() else 'ConnectionError'}), retrying in 1 seconds... ({i+1}/{max_retries})")
time.sleep(1)
else:
raise Exception("Gitea did not become available within the expected time.")
@@ -59,6 +59,23 @@ def gitea_env():
# The add_submodules method also creates workflow.config and staging.config
client.add_submodules("products", "SLFO")
time.sleep(1)
workflow_config_content = """{
"Workflows": ["pr"],
"GitProjectName": "products/SLFO#main",
"Organization": "pool",
"Branch": "main",
"ManualMergeProject": true,
"Reviewers": [ "-autogits_obs_staging_bot" ]
}"""
client.create_file("products", "SLFO", "workflow.config", workflow_config_content)
staging_config_content = """{
"ObsProject": "openSUSE:Leap:16.0",
"StagingProject": "openSUSE:Leap:16.0:PullRequest"
}"""
client.create_file("products", "SLFO", "staging.config", staging_config_content)
client.add_collaborator("products", "SLFO", "autogits_obs_staging_bot", "write")
client.add_collaborator("products", "SLFO", "workflow-pr", "write")
@@ -69,10 +86,635 @@ def gitea_env():
client.update_repo_settings("pool", "pkgA")
client.update_repo_settings("pool", "pkgB")
print("--- Gitea Dummy Data Setup Complete ---")
time.sleep(5) # Add a small delay for Gitea to fully process changes
time.sleep(1) # Give workflow-pr bot time to become fully active
yield client
@pytest.fixture(scope="session")
def configured_dev_branch_env(gitea_env: GiteaAPIClient, request):
"""
Fixture to set up a 'dev' branch in products/SLFO and pool/pkgA,
and configure workflow.config in products/SLFO#dev with specific content.
Yields (gitea_env, test_full_repo_name, dev_branch_name).
"""
test_org_name = "products"
test_repo_name = "SLFO"
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
dev_branch_name = "dev"
workflow_config_content = request.param # Get config content from parametrization
print(f"--- Setting up 'dev' branch and workflow.config in {test_full_repo_name}#{dev_branch_name} ---")
# Get the latest commit SHA of the main branch
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
# Create 'dev' branch from 'main' in products/SLFO
gitea_env.create_branch(test_org_name, test_repo_name, dev_branch_name, main_branch_sha)
# Create 'dev' branch in pool/pkgA as well
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
gitea_env.create_branch("pool", "pkgA", dev_branch_name, pool_pkga_main_sha)
# Create 'dev' branch in pool/pkgB as well
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
gitea_env.create_branch("pool", "pkgB", dev_branch_name, pool_pkgb_main_sha)
# Create/update workflow.config with the provided content
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", workflow_config_content, branch=dev_branch_name)
print(f"Created workflow.config with specific content in {test_full_repo_name}#{dev_branch_name}")
# Restart workflow-pr service to pick up new project config
gitea_env.restart_service("workflow-pr")
time.sleep(1) # Give the service time to restart and re-initialize
yield gitea_env, test_full_repo_name, dev_branch_name
# Teardown (optional, depending on test strategy)
# For now, we'll leave resources for inspection. If a clean slate is needed for each test,
# this fixture's scope would be 'function' and teardown logic would be added here.
@pytest.fixture(scope="session")
def no_project_git_pr_env(gitea_env: GiteaAPIClient):
"""
Sets up 'dev' branch in products/SLFO and pool/pkgA,
and configures workflow.config in products/SLFO#dev with NoProjectGitPR: true.
"""
test_org_name = "products"
test_repo_name = "SLFO"
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
dev_branch_name = "dev"
print(f"--- Setting up workflow.config in {test_full_repo_name}#{dev_branch_name} for No Project PR ---")
# Get the latest commit SHA of the main branch
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
# Create 'dev' branch from 'main' in products/SLFO
try:
gitea_env.create_branch(test_org_name, test_repo_name, dev_branch_name, main_branch_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
# Create 'dev' branch in pool/pkgA as well
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
try:
gitea_env.create_branch("pool", "pkgA", dev_branch_name, pool_pkga_main_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
# Create 'dev' branch in pool/pkgB as well
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
try:
gitea_env.create_branch("pool", "pkgB", dev_branch_name, pool_pkgb_main_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
# Setup workflow.config to have "NoProjectGitPR": true
workflow_config_content_no_project_pr = f"""{{
"Workflows": ["pr"],
"GitProjectName": "{test_full_repo_name}#{dev_branch_name}",
"Organization": "pool",
"Branch": "dev",
"ManualMergeProject": true,
"Reviewers": [ "-autogits_obs_staging_bot" ],
"NoProjectGitPR": true
}}"""
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", workflow_config_content_no_project_pr, branch=dev_branch_name)
print(f"Created workflow.config with NoProjectGitPR: true in {test_full_repo_name}#{dev_branch_name}")
# Restart workflow-pr service
gitea_env.restart_service("workflow-pr")
time.sleep(1) # Give the service time to restart and re-initialize
return gitea_env, test_full_repo_name, dev_branch_name
@pytest.fixture(scope="session")
def test_user_client(gitea_env: GiteaAPIClient):
"""
Creates a new unique user and returns a GiteaAPIClient instance for them using sudo.
This user should not have write permissions to the test repositories by default.
"""
username = f"user-{int(time.time())}"
password = "password123"
email = f"{username}@example.com"
gitea_env.create_user(username, password, email)
# Grant write access to pool/pkgA
gitea_env.add_collaborator("pool", "pkgA", username, "write")
# Use admin token with Sudo header
admin_token = gitea_env.headers["Authorization"].split(" ")[1]
return GiteaAPIClient(base_url=gitea_env.base_url, token=admin_token, sudo=username)
def setup_users_from_config(client: GiteaAPIClient, workflow_config: str, maintainership_config: str):
"""
Parses workflow.config and _maintainership.json, creates users, and adds them as collaborators.
"""
import json
wf = json.loads(workflow_config)
mt = json.loads(maintainership_config)
all_users = set()
# Extract from workflow.config Reviewers
reviewers = wf.get("Reviewers", [])
for r in reviewers:
# Strip +, - prefixes
username = r.lstrip("+-")
if username and username not in ["autogits_obs_staging_bot", "workflow-pr"]:
all_users.add(username)
# Extract from maintainership
for pkg, users in mt.items():
for username in users:
all_users.add(username)
# Create all users
for username in all_users:
client.create_user(username, "password123", f"{username}@example.com")
# Global maintainers (empty key) get write access to everything
# Actually, let's just make them collaborators on SLFO, pkgA, pkgB for simplicity in tests
client.add_collaborator("products", "SLFO", username, "write")
# Set specific repository permissions based on maintainership
for pkg, users in mt.items():
repo_name = pkg if pkg else None
for username in users:
if not repo_name:
# Global maintainer - already added to SLFO, add to pkgA/pkgB
client.add_collaborator("pool", "pkgA", username, "write")
client.add_collaborator("pool", "pkgB", username, "write")
else:
client.add_collaborator("pool", repo_name, username, "write")
@pytest.fixture(scope="session")
def gitea_env():
"""
Sets up the Gitea environment with dummy data and provides a GiteaAPIClient instance.
"""
gitea_url = "http://127.0.0.1:3000"
# Read admin token
admin_token_path = "./gitea-data/admin.token" # Corrected path
admin_token = None
try:
with open(admin_token_path, "r") as f:
admin_token = f.read().strip()
except FileNotFoundError:
raise Exception(f"Admin token file not found at {admin_token_path}. Ensure it's generated and accessible.")
# Headers for authenticated requests
auth_headers = {"Authorization": f"token {admin_token}", "Content-Type": "application/json"}
# Wait for Gitea to be available
print(f"Waiting for Gitea at {gitea_url}...")
max_retries = 5
for i in range(max_retries):
try:
# Check a specific API endpoint that indicates readiness
response = requests.get(f"{gitea_url}/api/v1/version", headers=auth_headers, timeout=5)
if response.status_code == 200:
print("Gitea API is available.")
break
except requests.exceptions.ConnectionError:
pass
print(f"Gitea not ready ({response.status_code if 'response' in locals() else 'ConnectionError'}), retrying in 1 seconds... ({i+1}/{max_retries})")
time.sleep(1)
else:
raise Exception("Gitea did not become available within the expected time.")
client = GiteaAPIClient(base_url=gitea_url, token=admin_token)
# Setup dummy data
print("--- Starting Gitea Dummy Data Setup from Pytest Fixture ---")
client.create_org("products")
client.create_org("pool")
client.create_repo("products", "SLFO")
client.create_repo("pool", "pkgA")
client.create_repo("pool", "pkgB")
# The add_submodules method also creates workflow.config and staging.config
client.add_submodules("products", "SLFO")
time.sleep(1)
workflow_config_content = """{
"Workflows": ["pr"],
"GitProjectName": "products/SLFO#main",
"Organization": "pool",
"Branch": "main",
"ManualMergeProject": true,
"Reviewers": [ "-autogits_obs_staging_bot" ]
}"""
client.create_file("products", "SLFO", "workflow.config", workflow_config_content)
staging_config_content = """{
"ObsProject": "openSUSE:Leap:16.0",
"StagingProject": "openSUSE:Leap:16.0:PullRequest"
}"""
client.create_file("products", "SLFO", "staging.config", staging_config_content)
maintainership_content = """{
"": ["ownerX","ownerY"],
"pkgA": ["ownerA"],
"pkgB": ["ownerB","ownerBB"]
}"""
# Create users from default main config
setup_users_from_config(client, workflow_config_content, maintainership_content)
client.add_collaborator("products", "SLFO", "autogits_obs_staging_bot", "write")
client.add_collaborator("products", "SLFO", "workflow-pr", "write")
client.add_collaborator("pool", "pkgA", "workflow-pr", "write")
client.add_collaborator("pool", "pkgB", "workflow-pr", "write")
client.update_repo_settings("products", "SLFO")
client.update_repo_settings("pool", "pkgA")
client.update_repo_settings("pool", "pkgB")
print("--- Gitea Dummy Data Setup Complete ---")
time.sleep(1) # Give workflow-pr bot time to become fully active
yield client
@pytest.fixture(scope="session")
def configured_dev_branch_env(gitea_env: GiteaAPIClient, request):
"""
Fixture to set up a 'dev' branch in products/SLFO and pool/pkgA,
and configure workflow.config in products/SLFO#dev with specific content.
Yields (gitea_env, test_full_repo_name, dev_branch_name).
"""
test_org_name = "products"
test_repo_name = "SLFO"
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
dev_branch_name = "dev"
workflow_config_content = request.param # Get config content from parametrization
print(f"--- Setting up 'dev' branch and workflow.config in {test_full_repo_name}#{dev_branch_name} ---")
# Get the latest commit SHA of the main branch
gitea_env.ensure_branch_exists(test_org_name, test_repo_name, "main")
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
# Create 'dev' branch from 'main' in products/SLFO
gitea_env.create_branch(test_org_name, test_repo_name, dev_branch_name, main_branch_sha)
# Create 'dev' branch in pool/pkgA as well
gitea_env.ensure_branch_exists("pool", "pkgA", "main")
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
gitea_env.create_branch("pool", "pkgA", dev_branch_name, pool_pkga_main_sha)
# Create 'dev' branch in pool/pkgB as well
gitea_env.ensure_branch_exists("pool", "pkgB", "main")
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
gitea_env.create_branch("pool", "pkgB", dev_branch_name, pool_pkgb_main_sha)
# Create/update workflow.config with the provided content
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", workflow_config_content, branch=dev_branch_name)
# For this fixture, we use default maintainership as we don't receive it in request.param
maintainership_content = """{
"": ["ownerX","ownerY"],
"pkgA": ["ownerA"],
"pkgB": ["ownerB","ownerBB"]
}"""
setup_users_from_config(gitea_env, workflow_config_content, maintainership_content)
print(f"Created workflow.config with specific content in {test_full_repo_name}#{dev_branch_name}")
# Restart workflow-pr service to pick up new project config
gitea_env.restart_service("workflow-pr")
time.sleep(1) # Give the service time to restart and re-initialize
yield gitea_env, test_full_repo_name, dev_branch_name
@pytest.fixture(scope="session")
def no_project_git_pr_env(gitea_env: GiteaAPIClient):
"""
Sets up 'dev' branch in products/SLFO and pool/pkgA,
and configures workflow.config in products/SLFO#dev with NoProjectGitPR: true.
"""
test_org_name = "products"
test_repo_name = "SLFO"
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
dev_branch_name = "dev"
print(f"--- Setting up workflow.config in {test_full_repo_name}#{dev_branch_name} for No Project PR ---")
# Get the latest commit SHA of the main branch
gitea_env.ensure_branch_exists(test_org_name, test_repo_name, "main")
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
# Create 'dev' branch from 'main' in products/SLFO
try:
gitea_env.create_branch(test_org_name, test_repo_name, dev_branch_name, main_branch_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
# Create 'dev' branch in pool/pkgA as well
gitea_env.ensure_branch_exists("pool", "pkgA", "main")
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
try:
gitea_env.create_branch("pool", "pkgA", dev_branch_name, pool_pkga_main_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
# Create 'dev' branch in pool/pkgB as well
gitea_env.ensure_branch_exists("pool", "pkgB", "main")
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
try:
gitea_env.create_branch("pool", "pkgB", dev_branch_name, pool_pkgb_main_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
# Setup workflow.config to have "NoProjectGitPR": true
workflow_config_content = f"""{{
"Workflows": ["pr"],
"GitProjectName": "{test_full_repo_name}#{dev_branch_name}",
"Organization": "pool",
"Branch": "dev",
"ManualMergeProject": true,
"Reviewers": [ "-autogits_obs_staging_bot" ],
"NoProjectGitPR": true
}}"""
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", workflow_config_content, branch=dev_branch_name)
maintainership_content = """{
"": ["ownerX","ownerY"],
"pkgA": ["ownerA"],
"pkgB": ["ownerB","ownerBB"]
}"""
setup_users_from_config(gitea_env, workflow_config_content, maintainership_content)
print(f"Created workflow.config with NoProjectGitPR: true in {test_full_repo_name}#{dev_branch_name}")
# Restart workflow-pr service
gitea_env.restart_service("workflow-pr")
time.sleep(1) # Give the service time to restart and re-initialize
return gitea_env, test_full_repo_name, dev_branch_name
@pytest.fixture(scope="session")
def test_user_client(gitea_env: GiteaAPIClient):
"""
Creates a new unique user and returns a GiteaAPIClient instance for them using sudo.
This user should not have write permissions to the test repositories by default.
"""
username = f"user-{int(time.time())}"
password = "password123"
email = f"{username}@example.com"
gitea_env.create_user(username, password, email)
# Grant write access to pool/pkgA
gitea_env.add_collaborator("pool", "pkgA", username, "write")
# Use admin token with Sudo header
admin_token = gitea_env.headers["Authorization"].split(" ")[1]
return GiteaAPIClient(base_url=gitea_env.base_url, token=admin_token, sudo=username)
@pytest.fixture(scope="session")
def automerge_env(gitea_env: GiteaAPIClient):
"""
Sets up 'merge' branch and custom workflow.config for automerge tests.
"""
test_org_name = "products"
test_repo_name = "SLFO"
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
merge_branch_name = "merge"
print(f"--- Setting up '{merge_branch_name}' branch and workflow.config in {test_full_repo_name} ---")
# Get the latest commit SHA of the main branch
gitea_env.ensure_branch_exists(test_org_name, test_repo_name, "main")
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
# Create 'merge' branch from 'main' in products/SLFO
try:
gitea_env.create_branch(test_org_name, test_repo_name, merge_branch_name, main_branch_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
# Create 'merge' branch in pool/pkgA as well
gitea_env.ensure_branch_exists("pool", "pkgA", "main")
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
try:
gitea_env.create_branch("pool", "pkgA", merge_branch_name, pool_pkga_main_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
# Create 'merge' branch in pool/pkgB as well
gitea_env.ensure_branch_exists("pool", "pkgB", "main")
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
try:
gitea_env.create_branch("pool", "pkgB", merge_branch_name, pool_pkgb_main_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
custom_workflow_config = f"""{{
"Workflows": ["pr"],
"GitProjectName": "{test_full_repo_name}#{merge_branch_name}",
"Organization": "pool",
"Branch": "{merge_branch_name}",
"Reviewers": [ "+usera", "+userb", "-autogits_obs_staging_bot" ]
}}"""
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", custom_workflow_config, branch=merge_branch_name)
maintainership_content = """{
"": ["ownerX","ownerY"],
"pkgA": ["ownerA"],
"pkgB": ["ownerB","ownerBB"]
}"""
gitea_env.create_file(test_org_name, test_repo_name, "_maintainership.json", maintainership_content, branch=merge_branch_name)
setup_users_from_config(gitea_env, custom_workflow_config, maintainership_content)
# Restart workflow-pr service
gitea_env.restart_service("workflow-pr")
time.sleep(1)
return gitea_env, test_full_repo_name, merge_branch_name
@pytest.fixture(scope="session")
def maintainer_env(gitea_env: GiteaAPIClient):
"""
Sets up 'maintainer-merge' branch and workflow.config without mandatory reviewers.
"""
test_org_name = "products"
test_repo_name = "SLFO"
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
branch_name = "maintainer-merge"
print(f"--- Setting up '{branch_name}' branch and workflow.config in {test_full_repo_name} ---")
# Get the latest commit SHA of the main branch
gitea_env.ensure_branch_exists(test_org_name, test_repo_name, "main")
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
# Create branch in products/SLFO
try:
gitea_env.create_branch(test_org_name, test_repo_name, branch_name, main_branch_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
# Create branch in pool/pkgA
gitea_env.ensure_branch_exists("pool", "pkgA", "main")
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
try:
gitea_env.create_branch("pool", "pkgA", branch_name, pool_pkga_main_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
# Create branch in pool/pkgB
gitea_env.ensure_branch_exists("pool", "pkgB", "main")
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
try:
gitea_env.create_branch("pool", "pkgB", branch_name, pool_pkgb_main_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
custom_workflow_config = f"""{{
"Workflows": ["pr"],
"GitProjectName": "{test_full_repo_name}#{branch_name}",
"Organization": "pool",
"Branch": "{branch_name}",
"Reviewers": [ "-autogits_obs_staging_bot" ]
}}"""
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", custom_workflow_config, branch=branch_name)
maintainership_content = """{
"": ["ownerX","ownerY"],
"pkgA": ["ownerA"],
"pkgB": ["ownerB","ownerBB"]
}"""
gitea_env.create_file(test_org_name, test_repo_name, "_maintainership.json", maintainership_content, branch=branch_name)
setup_users_from_config(gitea_env, custom_workflow_config, maintainership_content)
gitea_env.add_collaborator(test_org_name, test_repo_name, "autogits_obs_staging_bot", "write")
# Restart workflow-pr service
gitea_env.restart_service("workflow-pr")
time.sleep(1)
return gitea_env, test_full_repo_name, branch_name
@pytest.fixture(scope="session")
def review_required_env(gitea_env: GiteaAPIClient):
"""
Sets up 'review-required' branch and workflow.config with ReviewRequired: true.
"""
test_org_name = "products"
test_repo_name = "SLFO"
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
branch_name = "review-required"
print(f"--- Setting up '{branch_name}' branch and workflow.config in {test_full_repo_name} ---")
# Get the latest commit SHA of the main branch
gitea_env.ensure_branch_exists(test_org_name, test_repo_name, "main")
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
# Create branch in products/SLFO
try:
gitea_env.create_branch(test_org_name, test_repo_name, branch_name, main_branch_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
# Create branch in pool/pkgA
gitea_env.ensure_branch_exists("pool", "pkgA", "main")
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
try:
gitea_env.create_branch("pool", "pkgA", branch_name, pool_pkga_main_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
# Create branch in pool/pkgB
gitea_env.ensure_branch_exists("pool", "pkgB", "main")
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
try:
gitea_env.create_branch("pool", "pkgB", branch_name, pool_pkgb_main_sha)
except Exception as e:
if "already exists" not in str(e).lower():
raise
custom_workflow_config = f"""{{
"Workflows": ["pr"],
"GitProjectName": "{test_full_repo_name}#{branch_name}",
"Organization": "pool",
"Branch": "{branch_name}",
"Reviewers": [ "-autogits_obs_staging_bot" ],
"ReviewRequired": true
}}"""
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", custom_workflow_config, branch=branch_name)
maintainership_content = """{
"": ["ownerX","ownerY"],
"pkgA": ["ownerA"],
"pkgB": ["ownerB","ownerBB"]
}"""
gitea_env.create_file(test_org_name, test_repo_name, "_maintainership.json", maintainership_content, branch=branch_name)
setup_users_from_config(gitea_env, custom_workflow_config, maintainership_content)
gitea_env.add_collaborator(test_org_name, test_repo_name, "autogits_obs_staging_bot", "write")
# Restart workflow-pr service
gitea_env.restart_service("workflow-pr")
time.sleep(1)
return gitea_env, test_full_repo_name, branch_name
@pytest.fixture(scope="session")
def ownerA_client(gitea_env: GiteaAPIClient):
"""
Returns a GiteaAPIClient instance for ownerA.
"""
admin_token = gitea_env.headers["Authorization"].split(" ")[1]
return GiteaAPIClient(base_url=gitea_env.base_url, token=admin_token, sudo="ownerA")
@pytest.fixture(scope="session")
def ownerB_client(gitea_env: GiteaAPIClient):
"""
Returns a GiteaAPIClient instance for ownerB.
"""
admin_token = gitea_env.headers["Authorization"].split(" ")[1]
return GiteaAPIClient(base_url=gitea_env.base_url, token=admin_token, sudo="ownerB")
@pytest.fixture(scope="session")
def ownerBB_client(gitea_env: GiteaAPIClient):
"""
Returns a GiteaAPIClient instance for ownerBB.
"""
admin_token = gitea_env.headers["Authorization"].split(" ")[1]
return GiteaAPIClient(base_url=gitea_env.base_url, token=admin_token, sudo="ownerBB")

View File

@@ -6,6 +6,7 @@ import json
import xml.etree.ElementTree as ET
from pathlib import Path
import base64
import subprocess
TEST_DATA_DIR = Path(__file__).parent.parent / "data"
BUILD_RESULT_TEMPLATE = TEST_DATA_DIR / "build_result.xml.template"
@@ -43,9 +44,11 @@ def mock_build_result():
class GiteaAPIClient:
def __init__(self, base_url, token):
def __init__(self, base_url, token, sudo=None):
self.base_url = base_url
self.headers = {"Authorization": f"token {token}", "Content-Type": "application/json"}
if sudo:
self.headers["Sudo"] = sudo
def _request(self, method, path, **kwargs):
url = f"{self.base_url}/api/v1/{path}"
@@ -58,6 +61,48 @@ class GiteaAPIClient:
raise
return response
def get_file_info(self, owner: str, repo: str, file_path: str, branch: str = "main"):
url = f"repos/{owner}/{repo}/contents/{file_path}"
if branch and branch != "main":
url += f"?ref={branch}"
try:
response = self._request("GET", url)
return response.json()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
return None
raise
def create_user(self, username, password, email):
print(f"--- Creating user: {username} ---")
data = {
"username": username,
"password": password,
"email": email,
"must_change_password": False,
"send_notify": False
}
try:
self._request("POST", "admin/users", json=data)
print(f"User '{username}' created.")
except requests.exceptions.HTTPError as e:
if e.response.status_code == 422: # Already exists
print(f"User '{username}' already exists. Updating password...")
# Update password to be sure it matches our expectation
self._request("PATCH", f"admin/users/{username}", json={"password": password, "login_name": username})
else:
raise
def get_user_token(self, username, password, token_name="test-token"):
print(f"--- Getting token for user: {username} ---")
url = f"{self.base_url}/api/v1/users/{username}/tokens"
# Create new token using Basic Auth
response = requests.post(url, auth=(username, password), json={"name": token_name})
if response.status_code == 201:
return response.json()["sha1"]
response.raise_for_status()
def create_org(self, org_name):
print(f"--- Checking organization: {org_name} ---")
try:
@@ -71,6 +116,18 @@ class GiteaAPIClient:
print(f"Organization '{org_name}' created.")
else:
raise
print(f"--- Checking organization: {org_name} ---")
try:
self._request("GET", f"orgs/{org_name}")
print(f"Organization '{org_name}' already exists.")
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
print(f"Creating organization '{org_name}'...")
data = {"username": org_name, "full_name": org_name}
self._request("POST", "orgs", json=data)
print(f"Organization '{org_name}' created.")
else:
raise
def create_repo(self, org_name, repo_name):
print(f"--- Checking repository: {org_name}/{repo_name} ---")
@@ -91,7 +148,7 @@ class GiteaAPIClient:
}
self._request("POST", f"orgs/{org_name}/repos", json=data)
print(f"Repository '{org_name}/{repo_name}' created with a README.")
time.sleep(1) # Added delay to allow Git operations to become available
time.sleep(0.1) # Added delay to allow Git operations to become available
else:
raise
@@ -147,30 +204,8 @@ index 0000000..{pkg_b_sha}
+++ b/pkgB
@@ -0,0 +1 @@
+Subproject commit {pkg_b_sha}
diff --git a/workflow.config b/workflow.config
new file mode 100644
--- /dev/null
+++ b/workflow.config
@@ -0,0 +7 @@
+{{
+ "Workflows": ["pr"],
+ "GitProjectName": "products/SLFO#main",
+ "Organization": "pool",
+ "Branch": "main",
+ "ManualMergeProject": true,
+ "Reviewers": [ "-autogits_obs_staging_bot" ]
+}}
diff --git a/staging.config b/staging.config
new file mode 100644
--- /dev/null
+++ b/staging.config
@@ -0,0 +3 @@
+{{
+ "ObsProject": "openSUSE:Leap:16.0",
+ "StagingProject": "openSUSE:Leap:16.0:PullRequest"
+}}
"""
message = "Add pkgA and pkgB as submodules and config files"
message = "Add pkgA and pkgB as submodules"
data = {
"branch": "main",
"content": diff_content,
@@ -192,56 +227,145 @@ new file mode 100644
print(f"Repository settings for '{org_name}/{repo_name}' updated.")
def create_gitea_pr(self, repo_full_name: str, diff_content: str, title: str):
owner, repo = repo_full_name.split("/")
url = f"repos/{owner}/{repo}/pulls"
base_branch = "main"
# Create a new branch for the PR
new_branch_name = f"pr-branch-{int(time.time())}"
def create_file(self, owner: str, repo: str, file_path: str, content: str, branch: str = "main", message: str = "Add file"):
file_info = self.get_file_info(owner, repo, file_path, branch=branch)
# Get the latest commit SHA of the base branch
base_commit_sha = self._request("GET", f"repos/{owner}/{repo}/branches/{base_branch}").json()["commit"]["id"]
# Create the new branch
self._request("POST", f"repos/{owner}/{repo}/branches", json={
"new_branch_name": new_branch_name,
"old_ref": base_commit_sha # Use the commit SHA directly
})
# Create a new file or modify an existing one in the new branch
file_path = f"test-file-{int(time.time())}.txt"
file_content = "This is a test file for the PR."
self._request("POST", f"repos/{owner}/{repo}/contents/{file_path}", json={
"content": base64.b64encode(file_content.encode('utf-8')).decode('ascii'),
"message": "Add test file",
"branch": new_branch_name
})
# Now create the PR
data = {
"head": new_branch_name, # Use the newly created branch as head
"content": base64.b64encode(content.encode('utf-8')).decode('ascii'),
"branch": branch,
"message": message
}
if file_info:
print(f"--- Updating file {file_path} in {owner}/{repo} ---")
# Re-fetch file_info to get the latest SHA right before update
latest_file_info = self.get_file_info(owner, repo, file_path, branch=branch)
if not latest_file_info:
raise Exception(f"File {file_path} disappeared during update attempt.")
data["sha"] = latest_file_info["sha"]
data["message"] = f"Update {file_path}"
method = "PUT"
else:
print(f"--- Creating file {file_path} in {owner}/{repo} ---")
method = "POST"
url = f"repos/{owner}/{repo}/contents/{file_path}"
self._request(method, url, json=data)
print(f"File {file_path} {'updated' if file_info else 'created'} in {owner}/{repo}.")
def create_gitea_pr(self, repo_full_name: str, diff_content: str, title: str, use_fork: bool, base_branch: str = "main", body: str = ""):
owner, repo = repo_full_name.split("/")
head_owner, head_repo = owner, repo
if use_fork:
sudo_user = self.headers.get("Sudo")
head_owner = sudo_user
head_repo = repo
new_branch_name = f"pr-branch-{int(time.time()*1000)}"
print(f"--- Forking {repo_full_name} ---")
try:
self._request("POST", f"repos/{owner}/{repo}/forks", json={})
print(f"--- Forked to {head_owner}/{head_repo} ---")
time.sleep(0.5) # Give more time for fork to be ready
except requests.exceptions.HTTPError as e:
if e.response.status_code == 409: # Already forked
print(f"--- Already forked to {head_owner}/{head_repo} ---")
else:
raise
# Create a unique branch in the FORK
base_commit_sha = self._request("GET", f"repos/{owner}/{repo}/branches/{base_branch}").json()["commit"]["id"]
print(f"--- Creating branch {new_branch_name} in {head_owner}/{head_repo} from {base_branch} ({base_commit_sha}) ---")
self._request("POST", f"repos/{head_owner}/{head_repo}/branches", json={
"new_branch_name": new_branch_name,
"old_ref": base_commit_sha
})
else:
new_branch_name = f"pr-branch-{int(time.time()*1000)}"
# Get the latest commit SHA of the base branch from the ORIGINAL repo
base_commit_sha = self._request("GET", f"repos/{owner}/{repo}/branches/{base_branch}").json()["commit"]["id"]
# Try to create the branch in the ORIGINAL repo
print(f"--- Creating branch {new_branch_name} in {repo_full_name} ---")
self._request("POST", f"repos/{owner}/{repo}/branches", json={
"new_branch_name": new_branch_name,
"old_ref": base_commit_sha
})
# Apply the diff using diffpatch in the branch (wherever it is)
print(f"--- Applying diff to {head_owner}/{head_repo} branch {new_branch_name} ---")
self._request("POST", f"repos/{head_owner}/{head_repo}/diffpatch", json={
"branch": new_branch_name,
"content": diff_content,
"message": title
})
# Now create the PR in the ORIGINAL repo
data = {
"head": f"{head_owner}:{new_branch_name}" if head_owner != owner else new_branch_name,
"base": base_branch,
"title": title,
"body": "Test Pull Request"
"body": body,
"allow_maintainer_edit": True
}
response = self._request("POST", url, json=data)
print(f"--- Creating PR in {repo_full_name} from {data['head']} ---")
response = self._request("POST", f"repos/{owner}/{repo}/pulls", json=data)
return response.json()
def create_branch(self, owner: str, repo: str, new_branch_name: str, old_ref: str):
print(f"--- Checking branch '{new_branch_name}' in {owner}/{repo} ---")
try:
self._request("GET", f"repos/{owner}/{repo}/branches/{new_branch_name}")
print(f"Branch '{new_branch_name}' already exists.")
return
except requests.exceptions.HTTPError as e:
if e.response.status_code != 404:
raise # Re-raise other HTTP errors
print(f"--- Creating branch '{new_branch_name}' in {owner}/{repo} from {old_ref} ---")
url = f"repos/{owner}/{repo}/branches"
data = {
"new_branch_name": new_branch_name,
"old_ref": old_ref
}
self._request("POST", url, json=data)
print(f"Branch '{new_branch_name}' created in {owner}/{repo}.")
def ensure_branch_exists(self, owner: str, repo: str, branch: str = "main", timeout: int = 10):
print(f"--- Ensuring branch '{branch}' exists in {owner}/{repo} ---")
start_time = time.time()
while time.time() - start_time < timeout:
try:
self._request("GET", f"repos/{owner}/{repo}/branches/{branch}")
print(f"Branch '{branch}' confirmed in {owner}/{repo}.")
return
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
print(f"Branch '{branch}' not found yet in {owner}/{repo}. Retrying...")
time.sleep(1)
continue
raise
raise Exception(f"Timeout waiting for branch {branch} in {owner}/{repo}")
def modify_gitea_pr(self, repo_full_name: str, pr_number: int, diff_content: str, message: str):
owner, repo = repo_full_name.split("/")
# Get PR details to find the head branch
# Get PR details to find the head branch AND head repo
pr_details = self._request("GET", f"repos/{owner}/{repo}/pulls/{pr_number}").json()
head_branch = pr_details["head"]["ref"]
head_repo_owner = pr_details["head"]["repo"]["owner"]["login"]
head_repo_name = pr_details["head"]["repo"]["name"]
file_path = f"modified-file-{int(time.time())}.txt"
file_content = "This is a modified test file for the PR."
self._request("POST", f"repos/{owner}/{repo}/contents/{file_path}", json={
"content": base64.b64encode(file_content.encode('utf-8')).decode('ascii'),
"message": message,
"branch": head_branch
# Apply the diff using diffpatch
print(f"--- Modifying PR #{pr_number} in {head_repo_owner}/{head_repo_name} branch {head_branch} ---")
self._request("POST", f"repos/{head_repo_owner}/{head_repo_name}/diffpatch", json={
"branch": head_branch,
"content": diff_content,
"message": message
})
def update_gitea_pr_properties(self, repo_full_name: str, pr_number: int, **kwargs):
@@ -261,12 +385,12 @@ new file mode 100644
timeline_events = response.json()
if timeline_events: # Check if timeline_events list is not empty
return timeline_events
print(f"Attempt {i+1}: Timeline for PR {pr_number} is empty. Retrying in 3 seconds...")
time.sleep(3)
print(f"Attempt {i+1}: Timeline for PR {pr_number} is empty. Retrying in 1 seconds...")
time.sleep(1)
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
print(f"Attempt {i+1}: Timeline for PR {pr_number} not found yet. Retrying in 3 seconds...")
time.sleep(3)
print(f"Attempt {i+1}: Timeline for PR {pr_number} not found yet. Retrying in 1 seconds...")
time.sleep(1)
else:
raise # Re-raise other HTTP errors
raise Exception(f"Failed to retrieve timeline for PR {pr_number} after multiple retries.")
@@ -283,12 +407,12 @@ new file mode 100644
print(f"Attempt {i+1}: Comments for PR {pr_number} received: {comments}") # Added debug print
if comments: # Check if comments list is not empty
return comments
print(f"Attempt {i+1}: Comments for PR {pr_number} are empty. Retrying in 3 seconds...")
time.sleep(3)
print(f"Attempt {i+1}: Comments for PR {pr_number} are empty. Retrying in 1 seconds...")
time.sleep(1)
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
print(f"Attempt {i+1}: Comments for PR {pr_number} not found yet. Retrying in 3 seconds...")
time.sleep(3)
print(f"Attempt {i+1}: Comments for PR {pr_number} not found yet. Retrying in 1 seconds...")
time.sleep(1)
else:
raise # Re-raise other HTTP errors
raise Exception(f"Failed to retrieve comments for PR {pr_number} after multiple retries.")
@@ -299,3 +423,87 @@ new file mode 100644
response = self._request("GET", url)
return response.json()
def create_review(self, repo_full_name: str, pr_number: int, event: str = "APPROVED", body: str = "LGTM"):
owner, repo = repo_full_name.split("/")
# Check if this user already has an APPROVED review to avoid 422
current_user = self.headers.get("Sudo") or "admin" # simplified
existing_reviews = self.list_reviews(repo_full_name, pr_number)
for r in existing_reviews:
if r["user"]["login"] == current_user and r["state"] == "APPROVED" and event == "APPROVED":
print(f"User {current_user} already has an APPROVED review for {repo_full_name} PR #{pr_number}")
return r
url = f"repos/{owner}/{repo}/pulls/{pr_number}/reviews"
data = {
"event": event,
"body": body
}
print(f"--- Creating and submitting review ({event}) for {repo_full_name} PR #{pr_number} as {current_user} ---")
try:
response = self._request("POST", url, json=data)
review = response.json()
except requests.exceptions.HTTPError as e:
# If it fails with 422, it might be because a review is already pending or something else
print(f"Failed to create review: {e.response.text}")
# Try to find a pending review to submit
existing_reviews = self.list_reviews(repo_full_name, pr_number)
pending_review = next((r for r in existing_reviews if r["user"]["login"] == current_user and r["state"] == "PENDING"), None)
if pending_review:
review = pending_review
else:
raise
# If the state is PENDING, we submit it.
if review.get("state") == "PENDING":
review_id = review["id"]
submit_url = f"repos/{owner}/{repo}/pulls/{pr_number}/reviews/{review_id}"
submit_data = {
"event": event,
"body": body
}
try:
self._request("POST", submit_url, json=submit_data)
print(f"--- Review {review_id} submitted ---")
except requests.exceptions.HTTPError as e:
if "already" in e.response.text.lower() or "stay pending" in e.response.text.lower():
print(f"Review {review_id} could not be submitted further: {e.response.text}")
else:
raise
return review
def list_reviews(self, repo_full_name: str, pr_number: int):
owner, repo = repo_full_name.split("/")
url = f"repos/{owner}/{repo}/pulls/{pr_number}/reviews"
response = self._request("GET", url)
return response.json()
def approve_requested_reviews(self, repo_full_name: str, pr_number: int):
print(f"--- Checking for REQUEST_REVIEW state in {repo_full_name} PR #{pr_number} ---")
reviews = self.list_reviews(repo_full_name, pr_number)
requested_reviews = [r for r in reviews if r["state"] == "REQUEST_REVIEW"]
if not requested_reviews:
print(f"No reviews in REQUEST_REVIEW state found for {repo_full_name} PR #{pr_number}")
return
admin_token = self.headers["Authorization"].split(" ")[1]
for r in requested_reviews:
reviewer_username = r["user"]["login"]
print(f"Reacting on REQUEST_REVIEW for user {reviewer_username} by approving...")
reviewer_client = GiteaAPIClient(base_url=self.base_url, token=admin_token, sudo=reviewer_username)
time.sleep(1) # give a chance to avoid possible concurrency issues with reviews request/approval
reviewer_client.create_review(repo_full_name, pr_number, event="APPROVED", body="Approving requested review")
def restart_service(self, service_name: str):
print(f"--- Restarting service: {service_name} ---")
try:
# Assumes podman-compose.yml is in the parent directory of tests/lib
subprocess.run(["podman-compose", "restart", service_name], check=True, cwd=os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)))
print(f"Service {service_name} restarted successfully.")
except subprocess.CalledProcessError as e:
print(f"Error restarting service {service_name}: {e}")
raise

View File

@@ -17,7 +17,7 @@ from tests.lib.common_test_utils import (
def test_pr_workflow_succeeded(gitea_env, mock_build_result):
"""End-to-end test for a successful PR workflow."""
diff = "diff --git a/test.txt b/test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR - should succeed")
pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR - should succeed", False)
initial_pr_number = pr["number"]
compose_dir = Path(__file__).parent.parent
@@ -87,7 +87,7 @@ def test_pr_workflow_succeeded(gitea_env, mock_build_result):
def test_pr_workflow_failed(gitea_env, mock_build_result):
"""End-to-end test for a failed PR workflow."""
diff = "diff --git a/another_test.txt b/another_test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR - should fail")
pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR - should fail", False)
initial_pr_number = pr["number"]
compose_dir = Path(__file__).parent.parent

View File

@@ -0,0 +1,82 @@
import pytest
import re
import time
from pathlib import Path
from tests.lib.common_test_utils import GiteaAPIClient
@pytest.mark.t001
@pytest.mark.xfail(reason="The bot sometimes re-request reviews despite having all the approvals")
def test_001_automerge(automerge_env, test_user_client):
"""
Test scenario:
1. Setup custom workflow.config with mandatory reviewers (+usera, +userb).
2. Create a package PR in 'merge' branch.
3. Make sure the workflow-pr service created related project PR in 'merge' branch.
4. React on 'requested' reviews by approving them.
5. Make sure both PRs are merged automatically by the workflow-pr service.
"""
gitea_env, test_full_repo_name, merge_branch_name = automerge_env
# 1. Create a package PR
diff = """diff --git a/merge_test_fixture.txt b/merge_test_fixture.txt
new file mode 100644
index 0000000..e69de29
"""
print(f"--- Creating package PR in pool/pkgA on branch {merge_branch_name} ---")
package_pr = test_user_client.create_gitea_pr("pool/pkgA", diff, "Test Automerge Fixture", False, base_branch=merge_branch_name)
package_pr_number = package_pr["number"]
print(f"Created package PR pool/pkgA#{package_pr_number}")
# 2. Make sure the workflow-pr service created related project PR
project_pr_number = None
print(f"Polling pool/pkgA PR #{package_pr_number} timeline for forwarded PR event...")
for _ in range(40):
time.sleep(1)
timeline_events = gitea_env.get_timeline_events("pool/pkgA", package_pr_number)
for event in timeline_events:
if event.get("type") == "pull_ref":
if not (ref_issue := event.get("ref_issue")):
continue
url_to_check = ref_issue.get("html_url", "")
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
if match:
project_pr_number = int(match.group(1))
break
if project_pr_number:
break
assert project_pr_number is not None, "Workflow bot did not create a project PR."
print(f"Found project PR: products/SLFO#{project_pr_number}")
# 4. Make sure both PRs are merged automatically by the workflow-pr service
print("Polling for PR merge status and reacting on REQUEST_REVIEW...")
package_merged = False
project_merged = False
for i in range(15): # Poll for up to 15 seconds
# Package PR
if not package_merged:
pkg_details = gitea_env.get_pr_details("pool/pkgA", package_pr_number)
if pkg_details.get("merged"):
package_merged = True
print(f"Package PR pool/pkgA#{package_pr_number} merged.")
else:
gitea_env.approve_requested_reviews("pool/pkgA", package_pr_number)
# Project PR
if not project_merged:
prj_details = gitea_env.get_pr_details("products/SLFO", project_pr_number)
if prj_details.get("merged"):
project_merged = True
print(f"Project PR products/SLFO#{project_pr_number} merged.")
else:
gitea_env.approve_requested_reviews("products/SLFO", project_pr_number)
if package_merged and project_merged:
break
time.sleep(1)
assert package_merged, f"Package PR pool/pkgA#{package_pr_number} was not merged automatically."
assert project_merged, f"Project PR products/SLFO#{project_pr_number} was not merged automatically."
print("Both PRs merged successfully.")

View File

@@ -0,0 +1,346 @@
import pytest
import re
import time
import base64
from pathlib import Path
from tests.lib.common_test_utils import GiteaAPIClient
@pytest.mark.t004
@pytest.mark.xfail(reason="the bot sometimes re-requests review from autogits_obs_staging_bot despite having the approval")
def test_004_maintainer(maintainer_env, ownerA_client):
"""
Test scenario:
1. workflow.config will not have users with '+' sign.
2. The package PR is opened by the package maintainer (ownerA for pkgA).
3. Do not submit any review approval.
4. Check that both PRs are automatically merged anyway.
"""
gitea_env, test_full_repo_name, branch_name = maintainer_env
# 0. Smoke test ownerA_client
print(f"--- Smoke testing ownerA_client ---")
ownerA_client._request("GET", "users/admin")
print(f"ownerA_client smoke test passed")
# 0.1 Verify all users from config exist
print("--- Verifying all users from config exist ---")
import json
wf_file = gitea_env.get_file_info("products", "SLFO", "workflow.config", branch=branch_name)
wf = json.loads(base64.b64decode(wf_file["content"]).decode("utf-8"))
mt_file = gitea_env.get_file_info("products", "SLFO", "_maintainership.json", branch=branch_name)
mt = json.loads(base64.b64decode(mt_file["content"]).decode("utf-8"))
expected_users = set()
for r in wf.get("Reviewers", []):
username = r.lstrip("+-")
if username and username not in ["autogits_obs_staging_bot", "workflow-pr"]:
expected_users.add(username)
for pkg_users in mt.values():
for username in pkg_users:
expected_users.add(username)
for username in expected_users:
gitea_env._request("GET", f"users/{username}")
print(f"Verified user exists: {username}")
# 1. Create a package PR as ownerA
diff = """diff --git a/maintainer_test_fixture.txt b/maintainer_test_fixture.txt
new file mode 100644
index 0000000..e69de29
"""
print(f"--- Creating package PR in pool/pkgA on branch {branch_name} as ownerA ---")
package_pr = ownerA_client.create_gitea_pr("pool/pkgA", diff, "Test Maintainer Merge", True, base_branch=branch_name)
package_pr_number = package_pr["number"]
print(f"Created package PR pool/pkgA#{package_pr_number}")
# 2. Make sure the workflow-pr service created related project PR
project_pr_number = None
print(f"Polling pool/pkgA PR #{package_pr_number} timeline for forwarded PR event...")
for _ in range(40):
time.sleep(1)
timeline_events = gitea_env.get_timeline_events("pool/pkgA", package_pr_number)
for event in timeline_events:
if event.get("type") == "pull_ref":
if not (ref_issue := event.get("ref_issue")):
continue
url_to_check = ref_issue.get("html_url", "")
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
if match:
project_pr_number = int(match.group(1))
break
if project_pr_number:
break
assert project_pr_number is not None, "Workflow bot did not create a project PR."
print(f"Found project PR: products/SLFO#{project_pr_number}")
# 3. Make sure both PRs are merged automatically WITHOUT manual approvals
print("Polling for PR merge status (only bot approval allowed)...")
package_merged = False
project_merged = False
for i in range(15): # Poll for up to 15 seconds
# Package PR
if not package_merged:
pkg_details = gitea_env.get_pr_details("pool/pkgA", package_pr_number)
if pkg_details.get("merged"):
package_merged = True
print(f"Package PR pool/pkgA#{package_pr_number} merged.")
else:
# Approve ONLY bot if requested
reviews = gitea_env.list_reviews("pool/pkgA", package_pr_number)
if any(r["state"] == "REQUEST_REVIEW" and r["user"]["login"] == "autogits_obs_staging_bot" for r in reviews):
gitea_env.approve_requested_reviews("pool/pkgA", package_pr_number)
# Project PR
if not project_merged:
prj_details = gitea_env.get_pr_details("products/SLFO", project_pr_number)
if prj_details.get("merged"):
project_merged = True
print(f"Project PR products/SLFO#{project_pr_number} merged.")
else:
# Approve ONLY bot if requested
reviews = gitea_env.list_reviews("products/SLFO", project_pr_number)
if any(r["state"] == "REQUEST_REVIEW" and r["user"]["login"] == "autogits_obs_staging_bot" for r in reviews):
gitea_env.approve_requested_reviews("products/SLFO", project_pr_number)
if package_merged and project_merged:
break
time.sleep(1)
assert package_merged, f"Package PR pool/pkgA#{package_pr_number} was not merged automatically."
assert project_merged, f"Project PR products/SLFO#{project_pr_number} was not merged automatically."
print("Both PRs merged successfully by maintainer rule.")
@pytest.mark.t005
# @pytest.mark.xfail(reason="TBD troubleshoot")
def test_005_any_maintainer_approval_sufficient(maintainer_env, ownerA_client, ownerBB_client):
"""
Test scenario:
1. The package PR for pkgB is opened by ownerA (who is not a maintainer of pkgB).
2. Check that review request comes to both ownerB and ownerBB.
3. ownerB doesn't leave review.
4. check that review from ownerBB was enough to get both PRs merged.
"""
gitea_env, test_full_repo_name, branch_name = maintainer_env
# 1. Create a package PR for pool/pkgB as ownerA
diff = """diff --git a/pkgB_test_fixture.txt b/pkgB_test_fixture.txt
new file mode 100644
index 0000000..e69de29
"""
print(f"--- Creating package PR in pool/pkgB on branch {branch_name} as ownerA ---")
package_pr = ownerA_client.create_gitea_pr("pool/pkgB", diff, "Test Single Maintainer Merge", True, base_branch=branch_name)
package_pr_number = package_pr["number"]
print(f"Created package PR pool/pkgB#{package_pr_number}")
# 2. Make sure the workflow-pr service created related project PR
project_pr_number = None
print(f"Polling pool/pkgB PR #{package_pr_number} timeline for forwarded PR event...")
for _ in range(40):
time.sleep(1)
timeline_events = gitea_env.get_timeline_events("pool/pkgB", package_pr_number)
for event in timeline_events:
if event.get("type") == "pull_ref":
if not (ref_issue := event.get("ref_issue")):
continue
url_to_check = ref_issue.get("html_url", "")
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
if match:
project_pr_number = int(match.group(1))
break
if project_pr_number:
break
assert project_pr_number is not None, "Workflow bot did not create a project PR."
print(f"Found project PR: products/SLFO#{project_pr_number}")
# 3. Check that review requests came to ownerB and ownerBB
print("Checking for review requests from ownerB and ownerBB...")
reviewers_requested = set()
for _ in range(20):
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
if "ownerB" in reviewers_requested and "ownerBB" in reviewers_requested:
break
time.sleep(1)
assert "ownerB" in reviewers_requested, f"ownerB was not requested for review. Requested: {reviewers_requested}"
assert "ownerBB" in reviewers_requested, f"ownerBB was not requested for review. Requested: {reviewers_requested}"
print(f"Confirmed: ownerB and ownerBB were requested for review.")
# 4. ownerBB leaves review, ownerB does not.
print("ownerBB approving the PR...")
ownerBB_client.create_review("pool/pkgB", package_pr_number, event="APPROVED", body="Approval from ownerBB")
# 5. Check that both PRs are merged automatically
print("Polling for PR merge status (only bot approval allowed for project PR)...")
package_merged = False
project_merged = False
for i in range(15): # Poll for up to 15 seconds
# Package PR
if not package_merged:
pkg_details = gitea_env.get_pr_details("pool/pkgB", package_pr_number)
if pkg_details.get("merged"):
package_merged = True
print(f"Package PR pool/pkgB#{package_pr_number} merged.")
# Project PR
if not project_merged:
prj_details = gitea_env.get_pr_details("products/SLFO", project_pr_number)
if prj_details.get("merged"):
project_merged = True
print(f"Project PR products/SLFO#{project_pr_number} merged.")
else:
# Approve ONLY bot if requested
reviews = gitea_env.list_reviews("products/SLFO", project_pr_number)
if any(r["state"] == "REQUEST_REVIEW" and r["user"]["login"] == "autogits_obs_staging_bot" for r in reviews):
gitea_env.approve_requested_reviews("products/SLFO", project_pr_number)
if package_merged and project_merged:
break
time.sleep(1)
assert package_merged, f"Package PR pool/pkgB#{package_pr_number} was not merged automatically."
assert project_merged, f"Project PR products/SLFO#{project_pr_number} was not merged automatically."
print("Both PRs merged successfully with only one maintainer approval.")
@pytest.mark.t006
def test_006_maintainer_rejection_removes_other_requests(maintainer_env, ownerA_client, ownerBB_client):
"""
Test scenario:
1. The package PR for pkgB is opened by ownerA (who is not a maintainer of pkgB).
2. Check that review request comes to both ownerB and ownerBB.
3. ownerBB rejects the PR (REQUEST_CHANGES).
4. Check that review request for ownerB is removed.
"""
gitea_env, test_full_repo_name, branch_name = maintainer_env
# 1. Create a package PR for pool/pkgB as ownerA
diff = """diff --git a/pkgB_rejection_test.txt b/pkgB_rejection_test.txt
new file mode 100644
index 0000000..e69de29
"""
print(f"--- Creating package PR in pool/pkgB on branch {branch_name} as ownerA ---")
package_pr = ownerA_client.create_gitea_pr("pool/pkgB", diff, "Test Maintainer Rejection", True, base_branch=branch_name)
package_pr_number = package_pr["number"]
print(f"Created package PR pool/pkgB#{package_pr_number}")
# 2. Check that review requests came to ownerB and ownerBB
print("Checking for review requests from ownerB and ownerBB...")
for _ in range(20):
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
if "ownerB" in reviewers_requested and "ownerBB" in reviewers_requested:
break
time.sleep(1)
else:
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
pytest.fail(f"ownerB and ownerBB were not both requested. Got: {reviewers_requested}")
# 3. ownerBB rejects the PR
print("ownerBB rejecting the PR...")
ownerBB_client.create_review("pool/pkgB", package_pr_number, event="REQUEST_CHANGES", body="Rejecting from ownerBB")
# 4. Check that review request for ownerB is removed
print("Checking if ownerB's review request is removed...")
for _ in range(20):
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
if "ownerB" not in reviewers_requested:
print("Confirmed: ownerB's review request was removed.")
break
time.sleep(1)
else:
pytest.fail("ownerB's review request was not removed after ownerBB rejection.")
@pytest.mark.t007
@pytest.mark.xfail(reason="TBD troubleshoot")
def test_007_review_required_needs_all_approvals(review_required_env, ownerA_client, ownerBB_client):
"""
Test scenario:
1. it uses new fixture with "ReviewRequired = true" in the workflow.config.
2. Package PR for pkgB opened by ownerA.
3. Check review request comes to both ownerB and ownerBB.
4. ownerBB approves.
5. make sure that review is not merged automatically and the request for ownerB is not removed.
"""
gitea_env, test_full_repo_name, branch_name = review_required_env
# 0. Smoke test ownerA_client
print(f"--- Smoke testing ownerA_client ---")
ownerA_client._request("GET", "users/admin")
print(f"ownerA_client smoke test passed")
# 1. Create a package PR for pool/pkgB as ownerA
diff = """diff --git a/pkgB_review_required_test.txt b/pkgB_review_required_test.txt
new file mode 100644
index 0000000..e69de29
"""
print(f"--- Creating package PR in pool/pkgB on branch {branch_name} as ownerA ---")
package_pr = ownerA_client.create_gitea_pr("pool/pkgB", diff, "Test Review Required", True, base_branch=branch_name)
package_pr_number = package_pr["number"]
print(f"Created package PR pool/pkgB#{package_pr_number}")
# 2. Make sure the workflow-pr service created related project PR
project_pr_number = None
print(f"Polling pool/pkgB PR #{package_pr_number} timeline for forwarded PR event...")
for _ in range(40):
time.sleep(1)
timeline_events = gitea_env.get_timeline_events("pool/pkgB", package_pr_number)
for event in timeline_events:
if event.get("type") == "pull_ref":
if not (ref_issue := event.get("ref_issue")):
continue
url_to_check = ref_issue.get("html_url", "")
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
if match:
project_pr_number = int(match.group(1))
break
if project_pr_number:
break
assert project_pr_number is not None, "Workflow bot did not create a project PR."
print(f"Found project PR: products/SLFO#{project_pr_number}")
# 3. Check that review requests came to ownerB and ownerBB
print("Checking for review requests from ownerB and ownerBB...")
for _ in range(20):
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
if "ownerB" in reviewers_requested and "ownerBB" in reviewers_requested:
break
time.sleep(1)
else:
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
pytest.fail(f"ownerB and ownerBB were not both requested. Got: {reviewers_requested}")
# 4. ownerBB leaves review, ownerB does not.
print("ownerBB approving the PR...")
ownerBB_client.create_review("pool/pkgB", package_pr_number, event="APPROVED", body="Approval from ownerBB")
# 5. Check that the PR is NOT merged automatically and ownerB request remains
print("Waiting to ensure PR is NOT merged and ownerB request remains...")
for i in range(10):
pkg_details = gitea_env.get_pr_details("pool/pkgB", package_pr_number)
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
review_states = [(r["user"]["login"], r["state"]) for r in reviews]
print(f"Attempt {i+1}: Merged={pkg_details.get('merged')}, Reviews={review_states}")
time.sleep(2)
pkg_details = gitea_env.get_pr_details("pool/pkgB", package_pr_number)
assert not pkg_details.get("merged"), "Package PR was merged automatically but it should NOT have been (ReviewRequired=true)."
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
assert "ownerB" in reviewers_requested, f"ownerB's review request was removed, but it should have remained. All reviews: {[(r['user']['login'], r['state']) for r in reviews]}"
print("Confirmed: PR not merged and ownerB review request remains as expected.")

View File

@@ -18,11 +18,12 @@ pytest.initial_pr_number = None
pytest.forwarded_pr_number = None
@pytest.mark.t001
@pytest.mark.dependency()
def test_001_project_pr(gitea_env):
"""Forwarded PR correct title"""
diff = "diff --git a/another_test.txt b/another_test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
pytest.pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR")
pytest.pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR", False)
pytest.initial_pr_number = pytest.pr["number"]
time.sleep(5) # Give Gitea some time to process the PR and make the timeline available
@@ -56,6 +57,7 @@ def test_001_project_pr(gitea_env):
), "Forwarded PR correct title"
@pytest.mark.t002
@pytest.mark.dependency(depends=["test_001_project_pr"])
def test_002_updated_project_pr(gitea_env):
"""Forwarded PR head is updated"""
@@ -76,6 +78,7 @@ def test_002_updated_project_pr(gitea_env):
assert sha_changed, "Forwarded PR has sha updated"
@pytest.mark.t003
@pytest.mark.dependency(depends=["test_001_project_pr"])
def test_003_wip(gitea_env):
"""WIP flag set for PR"""
@@ -115,3 +118,209 @@ def test_003_wip(gitea_env):
wip_flag_removed = True
break
assert wip_flag_removed, "WIP flag was not removed from the forwarded PR."
@pytest.mark.t005
@pytest.mark.xfail(reason="works only in ibs_state branch?")
@pytest.mark.dependency()
def test_005_NoProjectGitPR_edits_disabled(no_project_git_pr_env, test_user_client):
"""
Reworked test: Sets workflow.config with NoProjectGitPR: true and creates a Package PR.
Verifies that no Project PR is created, then manually creates one and checks for bot warning.
"""
gitea_env, test_full_repo_name, dev_branch_name = no_project_git_pr_env
# 1. Create a Package PR (without "Allow edits from maintainers" enabled)
initial_diff = """diff --git a/first_file.txt b/first_file.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/first_file.txt
@@ -0,0 +1 @@
+Initial content
"""
package_pr = test_user_client.create_gitea_pr("pool/pkgA", initial_diff, "Test PR for No Project PR, No Edits", False, base_branch=dev_branch_name)
package_pr_number = package_pr["number"]
print(f"Created Package PR #{package_pr_number}")
# 2. Verify that the workflow-pr bot did not create a Project PR
project_pr_created = False
for i in range(10): # Poll for some time
time.sleep(2)
timeline_events = gitea_env.get_timeline_events("pool/pkgA", package_pr_number)
for event in timeline_events:
if event.get("type") == "pull_ref":
if not (ref_issue := event.get("ref_issue")):
continue
url_to_check = ref_issue.get("html_url", "")
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
if match:
project_pr_created = True
break
if project_pr_created:
break
assert not project_pr_created, "Workflow bot unexpectedly created a Project PR in products/SLFO."
print("Verification complete: No Project PR was created by the bot.")
# 3. Manually create the Project PR
pkgA_main_sha = gitea_env._request("GET", f"repos/pool/pkgA/branches/{dev_branch_name}").json()["commit"]["id"]
package_pr_details = gitea_env.get_pr_details("pool/pkgA", package_pr_number)
pkgA_pr_head_sha = package_pr_details["head"]["sha"]
project_pr_title = "Forwarded PRs: pkgA (Manual)"
project_pr_body = f"Manual Project PR for NoProjectGitPR. \nPR: pool/pkgA!{package_pr_number}"
project_pr_diff = f"""diff --git a/pkgA b/pkgA
index {pkgA_main_sha[:7]}..{pkgA_pr_head_sha[:7]} 160000
--- a/pkgA
+++ b/pkgA
@@ -1 +1 @@
-Subproject commit {pkgA_main_sha}
+Subproject commit {pkgA_pr_head_sha}
"""
manual_project_pr = test_user_client.create_gitea_pr(test_full_repo_name, project_pr_diff, project_pr_title, True, base_branch=dev_branch_name, body=project_pr_body)
manual_project_pr_number = manual_project_pr["number"]
# Verify and set allow_maintainer_edit to False
test_user_client.update_gitea_pr_properties(test_full_repo_name, manual_project_pr_number, allow_maintainer_edit=False)
# Verify that allow_maintainer_edit is now disabled
updated_pr = gitea_env.get_pr_details(test_full_repo_name, manual_project_pr_number)
assert updated_pr.get("allow_maintainer_edit") is False, "Expected allow_maintainer_edit to be False after update"
print(f"Manually created Project PR #{manual_project_pr_number} in {test_full_repo_name}")
# 4. Trigger an update on the Package PR to prompt the bot to react to the manual Project PR
new_diff_content = """diff --git a/trigger_bot.txt b/trigger_bot.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/trigger_bot.txt
@@ -0,0 +1 @@
+Trigger content
"""
test_user_client.modify_gitea_pr("pool/pkgA", package_pr_number, new_diff_content, "Trigger bot update")
# 5. Verify that the bot adds a warning comment because it cannot update the manual PR (edits disabled)
warning_found = False
print(f"Polling Package PR #{package_pr_number} for warning comment...")
for _ in range(20):
time.sleep(3)
comments = gitea_env.get_comments("pool/pkgA", package_pr_number)
for comment in comments:
# According to test-plan.md, the warning explains that it cannot update the PR.
if "cannot update" in comment.get("body", "").lower():
warning_found = True
print(f"Warning comment found: {comment.get('body')}")
break
if warning_found:
break
# assert warning_found, "Bot did not post the expected warning comment on the Package PR."
# print("Verification complete: Bot posted a warning comment as expected.")
@pytest.mark.t006
@pytest.mark.xfail(reason="works only in ibs_state branch?")
@pytest.mark.dependency()
def test_006_NoProjectGitPR_edits_enabled(no_project_git_pr_env, test_user_client):
"""
Verify that no project PR is created when "NoProjectGitPR" is true
and "Allow edits from maintainers" is enabled, using a dev branch.
"""
gitea_env, test_full_repo_name, dev_branch_name = no_project_git_pr_env
# 2. Create a Package PR with "Allow edits from maintainers" enabled
diff = """diff --git a/new_feature.txt b/new_feature.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/new_feature.txt
@@ -0,0 +1 @@
+New feature content
"""
package_pr = test_user_client.create_gitea_pr("pool/pkgA", diff, "Test PR for NoProjectGitPR", False, base_branch=dev_branch_name)
package_pr_number = package_pr["number"]
# Enable "Allow edits from maintainers"
test_user_client.update_gitea_pr_properties("pool/pkgA", package_pr_number, allow_maintainer_edit=True)
print(f"Created Package PR #{package_pr_number} and enabled 'Allow edits from maintainers'.")
# Get SHAs needed for the manual Project PR diff
pkgA_main_sha = gitea_env._request("GET", f"repos/pool/pkgA/branches/{dev_branch_name}").json()["commit"]["id"]
package_pr_details = gitea_env.get_pr_details("pool/pkgA", package_pr_number)
pkgA_pr_head_sha = package_pr_details["head"]["sha"]
# 3. Assert that the workflow-pr bot did not create a Project PR in the products/SLFO repository
project_pr_created = False
for i in range(20): # Poll for a reasonable time
time.sleep(2) # Wait a bit longer to be sure
timeline_events = gitea_env.get_timeline_events("pool/pkgA", package_pr_number)
for event in timeline_events:
if event.get("type") == "pull_ref":
if not (ref_issue := event.get("ref_issue")):
continue
url_to_check = ref_issue.get("html_url", "")
# Regex now searches for products/SLFO/pulls/(\d+)
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
if match:
project_pr_created = True
break
if project_pr_created:
break
assert not project_pr_created, "Workflow bot unexpectedly created a Project PR in products/SLFO."
print("Verification complete: No Project PR was created in products/SLFO as expected.")
# 1. Create that Project PR from the test code.
project_pr_title = "Forwarded PRs: pkgA"
project_pr_body = f"Test Project PR for NoProjectGitPR. \nPR: pool/pkgA!{package_pr_number}"
project_pr_diff = f"""diff --git a/pkgA b/pkgA
index {pkgA_main_sha[:7]}..{pkgA_pr_head_sha[:7]} 160000
--- a/pkgA
+++ b/pkgA
@@ -1 +1 @@
-Subproject commit {pkgA_main_sha}
+Subproject commit {pkgA_pr_head_sha}
"""
manual_project_pr = test_user_client.create_gitea_pr(test_full_repo_name, project_pr_diff, project_pr_title, True, base_branch=dev_branch_name, body=project_pr_body)
manual_project_pr_number = manual_project_pr["number"]
# Explicitly ensure allow_maintainer_edit is True (it should be by default now, but just in case)
test_user_client.update_gitea_pr_properties(test_full_repo_name, manual_project_pr_number, allow_maintainer_edit=True)
print(f"Manually created Project PR #{manual_project_pr_number} in {test_full_repo_name}")
time.sleep(5) # Give the bot time to potentially react or for the PR to settle
# Get initial SHA of the manually created Project PR
initial_project_pr_details = gitea_env.get_pr_details(test_full_repo_name, manual_project_pr_number)
initial_head_sha = initial_project_pr_details["head"]["sha"]
print(f"Manually created Project PR initial head SHA: {initial_head_sha}")
# 2. Add new commit to the package PR.
new_diff_content = """diff --git a/another_file.txt b/another_file.txt
new file mode 100644
index 0000000..f587a12
--- /dev/null
+++ b/another_file.txt
@@ -0,0 +1 @@
+Another file content
"""
test_user_client.modify_gitea_pr("pool/pkgA", package_pr_number, new_diff_content, "Add another file to Package PR")
print(f"Added new commit to Package PR #{package_pr_number}.")
time.sleep(5) # Give the bot time to react
# 3. Make sure the project PR is properly updated by the bot
project_pr_updated = False
print(f"Polling manually created Project PR #{manual_project_pr_number} for update...")
for _ in range(20): # Poll for a reasonable time
time.sleep(2) # Wait a bit longer to be sure
current_project_pr_details = gitea_env.get_pr_details(test_full_repo_name, manual_project_pr_number)
current_head_sha = current_project_pr_details["head"]["sha"]
if current_head_sha != initial_head_sha:
project_pr_updated = True
print(f"Manually created Project PR updated. New head SHA: {current_head_sha}")
break
assert project_pr_updated, "Manually created Project PR was not updated by the bot."
print("Verification complete: Manually created Project PR was updated by the bot as expected.")

View File

@@ -6,7 +6,7 @@ COPY integration/rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/git
RUN update-ca-certificates
# Install git and ssh
RUN zypper -n in git-core openssh-clients binutils
RUN zypper -n in git-core openssh-clients binutils git-lfs
# Copy the pre-built binary into the container
COPY workflow-pr/workflow-pr /usr/local/bin/workflow-pr

View File

@@ -2,14 +2,14 @@
FROM registry.suse.com/bci/bci-base:15.7
# Add the custom CA to the trust store
COPY rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
COPY integration/rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
RUN update-ca-certificates
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
RUN zypper --gpg-auto-import-keys ref
# Install git and ssh
RUN zypper -n in git-core openssh-clients autogits-workflow-pr binutils
RUN zypper -n in git-core openssh-clients autogits-workflow-pr binutils git-lfs
COPY integration/workflow-pr/entrypoint.sh /usr/local/bin/entrypoint.sh
RUN chmod +4755 /usr/local/bin/entrypoint.sh

View File

@@ -1,3 +1,7 @@
[
"products/SLFO#main"
"products/SLFO#main",
"products/SLFO#dev",
"products/SLFO#merge",
"products/SLFO#maintainer-merge",
"products/SLFO#review-required"
]

View File

@@ -50,6 +50,10 @@ const (
var runId uint
var GitWorkTreeAllocate func(string, string, string) (common.GitHandlerGenerator, error) = func(basePath, gitAuthor, email string) (common.GitHandlerGenerator, error) {
return common.AllocateGitWorkTree(basePath, gitAuthor, email)
}
func FetchPrGit(git common.Git, pr *models.PullRequest) error {
// clone PR head via base (target) repo
cloneURL := pr.Base.Repo.CloneURL
@@ -144,9 +148,9 @@ func ProcessBuildStatus(project *common.BuildResultList) BuildStatusSummary {
func ProcessRepoBuildStatus(results []*common.PackageBuildStatus) (status BuildStatusSummary) {
PackageBuildStatusSorter := func(a, b *common.PackageBuildStatus) int {
return strings.Compare(a.Package, b.Package)
}
PackageBuildStatusSorter := func(a, b *common.PackageBuildStatus) int {
return strings.Compare(a.Package, b.Package)
}
common.LogDebug("******* RESULTS: ")
data, _ := xml.MarshalIndent(results, "", " ")
@@ -191,24 +195,23 @@ func GetPackageBuildStatus(project *common.BuildResultList, packageName string)
return true, BuildStatusSummaryUnknown // true for 'missing'
}
// Check for any failures
// Check for any unfinished builds
for _, pkgStatus := range packageStatuses {
res, ok := common.ObsBuildStatusDetails[pkgStatus.Code]
if !ok {
common.LogInfo("unknown package result code:", pkgStatus.Code, "for package:", pkgStatus.Package)
return false, BuildStatusSummaryUnknown
}
if !res.Success {
return false, BuildStatusSummaryFailed
if !res.Finished {
return false, BuildStatusSummaryBuilding
}
}
// Check for any unfinished builds
// Check for any failures
for _, pkgStatus := range packageStatuses {
res, _ := common.ObsBuildStatusDetails[pkgStatus.Code]
// 'ok' is already checked in the loop above
if !res.Finished {
return false, BuildStatusSummaryBuilding
if !res.Success {
return false, BuildStatusSummaryFailed
}
}
@@ -216,7 +219,7 @@ func GetPackageBuildStatus(project *common.BuildResultList, packageName string)
return false, BuildStatusSummarySuccess
}
func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string, stagingMasterPrj string) (*common.ProjectMeta, error) {
func GenerateObsPrjMeta(obs common.ObsClientInterface, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string, stagingMasterPrj string) (*common.ProjectMeta, error) {
common.LogDebug("repo content fetching ...")
err := FetchPrGit(git, pr)
if err != nil {
@@ -260,13 +263,13 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
}
common.LogDebug("Trying first staging master project: ", stagingMasterPrj)
meta, err := ObsClient.GetProjectMeta(stagingMasterPrj)
meta, err := obs.GetProjectMeta(stagingMasterPrj)
if err == nil {
// success, so we use that staging master project as our build project
buildPrj = stagingMasterPrj
} else {
common.LogInfo("error fetching project meta for ", stagingMasterPrj, ". Fall Back to ", buildPrj)
meta, err = ObsClient.GetProjectMeta(buildPrj)
meta, err = obs.GetProjectMeta(buildPrj)
}
if err != nil {
common.LogError("error fetching project meta for", buildPrj, ". Err:", err)
@@ -330,10 +333,10 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
// stagingProject:$buildProject
// ^- stagingProject:$buildProject:$subProjectName (based on templateProject)
func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingProject, templateProject, subProjectName string, buildDisableRepos []string) error {
func CreateQASubProject(obs common.ObsClientInterface, stagingConfig *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingProject, templateProject, subProjectName string, buildDisableRepos []string) error {
common.LogDebug("Setup QA sub projects")
common.LogDebug("reading templateProject ", templateProject)
templateMeta, err := ObsClient.GetProjectMeta(templateProject)
templateMeta, err := obs.GetProjectMeta(templateProject)
if err != nil {
common.LogError("error fetching template project meta for", templateProject, ":", err)
return err
@@ -343,10 +346,10 @@ func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, git
templateMeta.Name = stagingProject + ":" + subProjectName
// freeze tag for now
if len(templateMeta.ScmSync) > 0 {
repository, err := url.Parse(templateMeta.ScmSync)
if err != nil {
panic(err)
}
repository, err := url.Parse(templateMeta.ScmSync)
if err != nil {
panic(err)
}
common.LogDebug("getting data for ", repository.EscapedPath())
split := strings.Split(repository.EscapedPath(), "/")
@@ -354,12 +357,12 @@ func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, git
common.LogDebug("getting commit for ", org, " repo ", repo, " fragment ", repository.Fragment)
branch, err := gitea.GetCommit(org, repo, repository.Fragment)
if err != nil {
panic(err)
}
if err != nil {
panic(err)
}
// set expanded commit url
repository.Fragment = branch.SHA
repository.Fragment = branch.SHA
templateMeta.ScmSync = repository.String()
common.LogDebug("Setting scmsync url to ", templateMeta.ScmSync)
}
@@ -406,11 +409,11 @@ func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, git
templateMeta.Repositories[idx].Paths[pidx].Project = templateMeta.Name
} else
// Check for path prefixes against a template project inside of template project area
if strings.HasPrefix(path.Project, stagingConfig.StagingProject + ":") {
if strings.HasPrefix(path.Project, stagingConfig.StagingProject+":") {
newProjectName := stagingProject
// find project name
for _, setup := range stagingConfig.QA {
if setup.Origin == path.Project {
if setup.Origin == path.Project {
common.LogDebug(" Match:", setup.Origin)
newProjectName = newProjectName + ":" + setup.Name
common.LogDebug(" New:", newProjectName)
@@ -418,14 +421,14 @@ func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, git
}
}
templateMeta.Repositories[idx].Paths[pidx].Project = newProjectName
common.LogDebug(" Matched prefix")
common.LogDebug(" Matched prefix")
}
common.LogDebug(" Path using project ", templateMeta.Repositories[idx].Paths[pidx].Project)
}
}
if !IsDryRun {
err = ObsClient.SetProjectMeta(templateMeta)
err = obs.SetProjectMeta(templateMeta)
if err != nil {
common.LogError("cannot create project:", templateMeta.Name, err)
x, _ := xml.MarshalIndent(templateMeta, "", " ")
@@ -439,10 +442,10 @@ func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, git
return nil
}
func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest) (RequestModification, error) {
func StartOrUpdateBuild(obs common.ObsClientInterface, config *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest) (RequestModification, error) {
common.LogDebug("fetching OBS project Meta")
obsPrProject := GetObsProjectAssociatedWithPr(config, ObsClient.HomeProject, pr)
meta, err := ObsClient.GetProjectMeta(obsPrProject)
obsPrProject := GetObsProjectAssociatedWithPr(config, obs.GetHomeProject(), pr)
meta, err := obs.GetProjectMeta(obsPrProject)
if err != nil {
common.LogError("error fetching project meta for", obsPrProject, ":", err)
return RequestModificationNoChange, err
@@ -467,7 +470,7 @@ func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea comm
if meta == nil {
// new build
common.LogDebug(" Staging master:", config.StagingProject)
meta, err = GenerateObsPrjMeta(git, gitea, pr, obsPrProject, config.ObsProject, config.StagingProject)
meta, err = GenerateObsPrjMeta(obs, git, gitea, pr, obsPrProject, config.ObsProject, config.StagingProject)
if err != nil {
return RequestModificationNoChange, err
}
@@ -479,7 +482,7 @@ func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea comm
common.LogDebug("Creating build project:")
common.LogDebug(" meta:", string(x))
} else {
err = ObsClient.SetProjectMeta(meta)
err = obs.SetProjectMeta(meta)
if err != nil {
x, _ := xml.MarshalIndent(meta, "", " ")
common.LogDebug(" meta:", string(x))
@@ -550,7 +553,7 @@ func ParseNotificationToPR(thread *models.NotificationThread) (org string, repo
return
}
func ProcessPullNotification(gitea common.Gitea, thread *models.NotificationThread) {
func ProcessPullNotification(obs common.ObsClientInterface, gitea common.Gitea, thread *models.NotificationThread) {
defer func() {
err := recover()
if err != nil {
@@ -566,7 +569,7 @@ func ProcessPullNotification(gitea common.Gitea, thread *models.NotificationThre
}
common.LogInfo("processing PR:", org, "/", repo, "#", num)
done, err := ProcessPullRequest(gitea, org, repo, num)
done, err := ProcessPullRequest(obs, gitea, org, repo, num)
if !IsDryRun && err == nil && done {
gitea.SetNotificationRead(thread.ID)
} else if err != nil {
@@ -576,7 +579,7 @@ func ProcessPullNotification(gitea common.Gitea, thread *models.NotificationThre
var CleanedUpIssues []int64 = []int64{}
func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThread) (CleanupComplete bool) {
func CleanupPullNotification(obs common.ObsClientInterface, gitea common.Gitea, thread *models.NotificationThread) (CleanupComplete bool) {
defer func() {
err := recover()
if err != nil {
@@ -643,8 +646,8 @@ func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThre
return false
}
stagingProject := GetObsProjectAssociatedWithPr(config, ObsClient.HomeProject, pr)
if prj, err := ObsClient.GetProjectMeta(stagingProject); err != nil {
stagingProject := GetObsProjectAssociatedWithPr(config, obs.GetHomeProject(), pr)
if prj, err := obs.GetProjectMeta(stagingProject); err != nil {
common.LogError("Failed fetching meta for project:", stagingProject, ". Not cleaning up")
return false
} else if prj == nil && err == nil {
@@ -658,13 +661,13 @@ func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThre
project := stagingProject + ":" + qa.Name
common.LogDebug("Cleaning up QA staging", project)
if !IsDryRun {
if err := ObsClient.DeleteProject(project); err != nil {
if err := obs.DeleteProject(project); err != nil {
common.LogError("Failed to cleanup QA staging", project, err)
}
}
}
if !IsDryRun {
if err := ObsClient.DeleteProject(stagingProject); err != nil {
if err := obs.DeleteProject(stagingProject); err != nil {
common.LogError("Failed to cleanup staging", stagingProject, err)
}
}
@@ -685,7 +688,7 @@ func SetStatus(gitea common.Gitea, org, repo, hash string, status *models.Commit
return err
}
func commentOnPackagePR(gitea common.Gitea, org string, repo string, prNum int64, msg string) {
func CommentPROnce(gitea common.Gitea, org string, repo string, prNum int64, msg string) {
if IsDryRun {
common.LogInfo("Would comment on package PR %s/%s#%d: %s", org, repo, prNum, msg)
return
@@ -697,6 +700,18 @@ func commentOnPackagePR(gitea common.Gitea, org string, repo string, prNum int64
return
}
timeline, err := gitea.GetTimeline(org, repo, prNum)
if err != nil {
common.LogError("Failed to get timeline for PR %s/%s#%d: %v", org, repo, prNum, err)
return
}
for _, t := range timeline {
if t.User != nil && t.User.UserName == BotUser && t.Type == common.TimelineCommentType_Comment && t.Body == msg {
return
}
}
err = gitea.AddComment(pr, msg)
if err != nil {
common.LogError("Failed to comment on package PR %s/%s#%d: %v", org, repo, prNum, err)
@@ -704,20 +719,21 @@ func commentOnPackagePR(gitea common.Gitea, org string, repo string, prNum int64
}
// Create and remove QA projects
func ProcessQaProjects(stagingConfig *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingProject string) []string {
func ProcessQaProjects(obs common.ObsClientInterface, stagingConfig *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingProject string) ([]string, string) {
usedQAprojects := make([]string, 0)
prLabelNames := make(map[string]int)
for _, label := range pr.Labels {
prLabelNames[label.Name] = 1
}
msg := ""
var qa_projects []string
for _, setup := range stagingConfig.QA {
QAproject := stagingProject + ":" + setup.Name
if len(setup.Label) > 0 {
if _, ok := prLabelNames[setup.Label]; !ok {
if !IsDryRun {
// blindly remove, will fail when not existing
ObsClient.DeleteProject(QAproject)
obs.DeleteProject(QAproject)
}
common.LogInfo("QA project ", setup.Name, "has no matching Label")
continue
@@ -726,24 +742,25 @@ func ProcessQaProjects(stagingConfig *common.StagingConfig, git common.Git, gite
usedQAprojects = append(usedQAprojects, QAproject)
// check for existens first, no error, but no meta is a 404
if meta, err := ObsClient.GetProjectMeta(QAproject); meta == nil && err == nil {
if meta, err := obs.GetProjectMeta(QAproject); meta == nil && err == nil {
common.LogInfo("Create QA project ", QAproject)
CreateQASubProject(stagingConfig, git, gitea, pr,
CreateQASubProject(obs, stagingConfig, git, gitea, pr,
stagingProject,
setup.Origin,
setup.Name,
setup.BuildDisableRepos)
msg = msg + "QA Project added: " + ObsWebHost + "/project/show/" +
QAproject + "\n"
qa_projects = append(qa_projects, ObsWebHost+"/project/show/"+QAproject)
}
}
if len(msg) > 1 {
gitea.AddComment(pr, msg)
if len(qa_projects) > 0 {
msg = "Additional QA builds:\n" + strings.Join(qa_projects, "\n")
}
return usedQAprojects
return usedQAprojects, msg
}
func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, error) {
func ProcessPullRequest(obs common.ObsClientInterface, gitea common.Gitea, org, repo string, id int64) (bool, error) {
dir, err := os.MkdirTemp(os.TempDir(), BotName)
common.PanicOnError(err)
if IsDryRun {
@@ -752,7 +769,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
defer os.RemoveAll(dir)
}
gh, err := common.AllocateGitWorkTree(dir, GitAuthor, "noaddress@suse.de")
gh, err := GitWorkTreeAllocate(dir, GitAuthor, "noaddress@suse.de")
common.PanicOnError(err)
git, err := gh.CreateGitHandler(org)
@@ -797,7 +814,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
if err != nil {
common.LogError("Staging config", common.StagingConfigFile, "not found in PR to the project. Aborting.")
if !IsDryRun {
_, err = gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find project config in PR: "+common.ProjectConfigFile)
_, _ = gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find project config in PR: "+common.ProjectConfigFile)
}
return true, err
}
@@ -817,7 +834,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
return true, nil
}
meta, err := ObsClient.GetProjectMeta(stagingConfig.ObsProject)
meta, err := obs.GetProjectMeta(stagingConfig.ObsProject)
if err != nil || meta == nil {
common.LogError("Cannot find reference project meta:", stagingConfig.ObsProject, err)
if !IsDryRun && err == nil {
@@ -946,8 +963,8 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
}
common.LogDebug("ObsProject:", stagingConfig.ObsProject)
stagingProject := GetObsProjectAssociatedWithPr(stagingConfig, ObsClient.HomeProject, pr)
change, err := StartOrUpdateBuild(stagingConfig, git, gitea, pr)
stagingProject := GetObsProjectAssociatedWithPr(stagingConfig, obs.GetHomeProject(), pr)
change, err := StartOrUpdateBuild(obs, stagingConfig, git, gitea, pr)
status := &models.CommitStatus{
Context: BotName,
Description: "OBS Staging build",
@@ -978,11 +995,8 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
SetStatus(gitea, org, repo, pr.Head.Sha, status)
}
if change != RequestModificationNoChange && !IsDryRun {
gitea.AddComment(pr, msg)
}
stagingResult, err := ObsClient.BuildStatus(stagingProject)
stagingResult, err := obs.BuildStatus(stagingProject)
if err != nil {
common.LogError("failed fetching stage project status for", stagingProject, ":", err)
}
@@ -990,7 +1004,14 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
_, packagePRs := common.ExtractDescriptionAndPRs(bufio.NewScanner(strings.NewReader(pr.Body)))
// always update QA projects because Labels can change
qaProjects := ProcessQaProjects(stagingConfig, git, gitea, pr, stagingProject)
qaProjects, qaProjectMsg := ProcessQaProjects(obs, stagingConfig, git, gitea, pr, stagingProject)
if change != RequestModificationNoChange && !IsDryRun {
if len(qaProjectMsg) > 0 {
msg += "\n" + qaProjectMsg
}
CommentPROnce(gitea, org, repo, id, msg)
}
done := false
overallBuildStatus := ProcessBuildStatus(stagingResult)
@@ -998,7 +1019,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
if len(qaProjects) > 0 && overallBuildStatus == BuildStatusSummarySuccess {
seperator := " in "
for _, qaProject := range qaProjects {
qaResult, err := ObsClient.BuildStatus(qaProject)
qaResult, err := obs.BuildStatus(qaProject)
if err != nil {
common.LogError("failed fetching stage project status for", qaProject, ":", err)
}
@@ -1058,7 +1079,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
default:
continue
}
commentOnPackagePR(gitea, packagePR.Org, packagePR.Repo, packagePR.Num, msg)
CommentPROnce(gitea, packagePR.Org, packagePR.Repo, packagePR.Num, msg)
}
if len(missingPkgs) > 0 {
@@ -1068,10 +1089,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
msg = msg + " - " + pkg + "\n"
}
common.LogInfo(msg)
err := gitea.AddComment(pr, msg)
if err != nil {
common.LogError(err)
}
CommentPROnce(gitea, org, repo, id, msg)
}
}
@@ -1090,8 +1108,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
return false, nil
}
func PollWorkNotifications(giteaUrl string) {
gitea := common.AllocateGiteaTransport(giteaUrl)
func PollWorkNotifications(obs common.ObsClientInterface, gitea common.Gitea) {
data, err := gitea.GetNotifications(common.GiteaNotificationType_Pull, nil)
if err != nil {
@@ -1107,7 +1124,7 @@ func PollWorkNotifications(giteaUrl string) {
if !ListPullNotificationsOnly {
switch notification.Subject.Type {
case "Pull":
ProcessPullNotification(gitea, notification)
ProcessPullNotification(obs, gitea, notification)
default:
if !IsDryRun {
gitea.SetNotificationRead(notification.ID)
@@ -1130,7 +1147,7 @@ func PollWorkNotifications(giteaUrl string) {
continue
}
cleanupFinished = CleanupPullNotification(gitea, n) && cleanupFinished
cleanupFinished = CleanupPullNotification(obs, gitea, n) && cleanupFinished
}
} else if err != nil {
common.LogError(err)
@@ -1144,7 +1161,8 @@ var ObsApiHost string
var ObsWebHost string
var IsDryRun bool
var ProcessPROnly string
var ObsClient *common.ObsClient
var ObsClient common.ObsClientInterface
var BotUser string
func ObsWebHostFromApiHost(apihost string) string {
u, err := url.Parse(apihost)
@@ -1209,9 +1227,18 @@ func main() {
}
if len(*buildRoot) > 0 {
ObsClient.HomeProject = *buildRoot
ObsClient.SetHomeProject(*buildRoot)
}
gitea := common.AllocateGiteaTransport(GiteaUrl)
user, err := gitea.GetCurrentUser()
if err != nil {
common.LogError("Cannot fetch current user:", err)
return
}
BotUser = user.UserName
if len(*ProcessPROnly) > 0 {
rx := regexp.MustCompile("^([^/#]+)/([^/#]+)#([0-9]+)$")
m := rx.FindStringSubmatch(*ProcessPROnly)
@@ -1220,15 +1247,14 @@ func main() {
return
}
gitea := common.AllocateGiteaTransport(GiteaUrl)
id, _ := strconv.ParseInt(m[3], 10, 64)
ProcessPullRequest(gitea, m[1], m[2], id)
ProcessPullRequest(ObsClient, gitea, m[1], m[2], id)
return
}
for {
PollWorkNotifications(GiteaUrl)
PollWorkNotifications(ObsClient, gitea)
common.LogInfo("Poll cycle finished")
time.Sleep(5 * time.Minute)
}

File diff suppressed because it is too large Load Diff

View File

@@ -5,7 +5,7 @@ After=network-online.target
[Service]
Type=exec
ExecStart=/usr/bin/workflow-direct
EnvironmentFile=-/etc/default/%i/workflow-direct.env
EnvironmentFile=/etc/default/%i/workflow-direct.env
#DynamicUser=yes
NoNewPrivileges=yes
ProtectSystem=strict

View File

@@ -5,7 +5,7 @@ After=network-online.target
[Service]
Type=exec
ExecStart=/usr/bin/workflow-pr
EnvironmentFile=-/etc/default/%i/workflow-pr.env
EnvironmentFile=/etc/default/%i/workflow-pr.env
#DynamicUser=yes
NoNewPrivileges=yes
ProtectSystem=strict

View File

@@ -37,8 +37,10 @@ Main Tasks
| ManualMergeOnly | true | Both PackageGit PR and ProjectGit PR are merged upon an allowed package maintainer or project maintainer commenting “merge ok” in the PackageGit PR. |
| ManualMergeOnly and ManualMergeProject | false | Both ProjectGit and PackageGit PRs are merged as soon as all reviews are completed in both PrjGit and PkgGit PRs. |
Config file
-----------
Project specific config file
----------------------------
This is the ProjectGit config file. For runtime config file, see bottom.
* Filename: `workflow.config`
* Location: ProjectGit
@@ -156,8 +158,44 @@ NOTE: Project Maintainers have these permissions automatically.
Server configuration
--------------------------
**Configuration file:**
The configuration file is a JSON file that consists of a list of project git locations
that are then consulted for their `workflow.config` config files.
```
[]ProjectGit = {
"org" | "org/repo" | "org/repo#branch"
}
default repo = _ObsPrj
default branch = as specified in Gitea
```
For example,
```
[ "org", "openSUSE/Leap", "openSUSE/Leap#16.0" ]
```
Are all valid entries. These are then resolved to,
* For `org`, it's assumed that default repository of `_ObsPrj` in `org` organization and using Gitea's default branch
* For `openSUSE/Leap`, the repository "Leap" using Gitea's default branch in `openSUSE` organization.
* For `openSUSE/Leap#16.0`, the repository "Leap" with branch "16.0" in `openSUSE` organization.
For each of these project gits, `workflow.config` is read.
**Runtime Options**
| Option | Default | Environmental Default | Notes |
|---------------|----------------------------|-----------------------|------------------------------------|
| git-author | AutoGits PR Review Bot | AUTOGITS_GIT_AUTHOR | Name of author for bot created commits |
| git-email | noone@suse.de | AUTOGITS_GIT_EMAIL | Email for the bot created commits |
| config | | AUTOGITS_CONFIG | Path to above config file |
| gitea-url | https://src.opensuse.org | AUTOGITS_GITEA_URL | Gitea's URL instance |
| rabbit-url | amqps://rabbit.opensuse.org| AUTOGITS_RABBIT_URL | RabbitMQ's URL instance |
| debug | false | AUTOGITS_DEBUG | Extra logging |
| check-on-start| false | AUTOGITS_CHECK_ON_START| Whether to check all projects for consistency on start. Can take a while |
| check-interval| 5 | | Consistency check interval |
| repo-path | Uses temp directory | AUTOGITS_REPO_PATH | Path where to store repositories. |
| Field | Type | Notes |
| ----- | ----- | ----- |
| root | Array of string | Format **org/repo\#branch** |

View File

@@ -44,39 +44,30 @@ var CurrentUser *models.User
var GitHandler common.GitHandlerGenerator
var Gitea common.Gitea
func getEnvOverrideString(env, def string) string {
if envValue := os.Getenv(env); len(envValue) != 0 {
return envValue
}
return def
}
func getEnvOverrideBool(env string, def bool) bool {
if envValue := os.Getenv(env); len(envValue) != 0 {
if value, err := strconv.Atoi(envValue); err == nil && value > 0 {
return true
}
}
return def
}
func main() {
flag.StringVar(&GitAuthor, "git-author", "AutoGits PR Review Bot", "Git commit author")
flag.StringVar(&GitEmail, "git-email", "amajer+devel-git@suse.de", "Git commit email")
flag.StringVar(&GitAuthor, "git-author", common.GetEnvOverrideString(os.Getenv("AUTOGITS_GIT_AUTHOR"), "AutoGits PR Review Bot"), "Git commit author")
flag.StringVar(&GitEmail, "git-email", common.GetEnvOverrideString(os.Getenv("AUTOGITS_GIT_EMAIL"), "noone@suse.de"), "Git commit email")
workflowConfig := flag.String("config", getEnvOverrideString("AUTOGITS_CONFIG", ""), "Repository and workflow definition file")
giteaUrl := flag.String("gitea-url", "https://src.opensuse.org", "Gitea instance")
rabbitUrl := flag.String("url", "amqps://rabbit.opensuse.org", "URL for RabbitMQ instance")
debugMode := flag.Bool("debug", getEnvOverrideBool("AUTOGITS_DEBUG", false), "Extra debugging information")
checkOnStart := flag.Bool("check-on-start", getEnvOverrideBool("AUTOGITS_CHECK_ON_START", false), "Check all repositories for consistency on start, without delays")
workflowConfig := flag.String("config", common.GetEnvOverrideString(os.Getenv("AUTOGITS_CONFIG"), ""), "Repository and workflow definition file")
giteaUrl := flag.String("gitea-url", common.GetEnvOverrideString(os.Getenv("AUTOGITS_GITEA_URL"), "https://src.opensuse.org"), "Gitea instance")
legacyRabbitUrl := flag.String("url", "", "Legacy. Use rabbit-url") /* TO BE REMOVED */
rabbitUrl := flag.String("rabbit-url", common.GetEnvOverrideString(os.Getenv("AUTOGITS_RABBIT_URL"), "amqps://rabbit.opensuse.org"), "URL for RabbitMQ instance")
debugMode := flag.Bool("debug", common.GetEnvOverrideBool(os.Getenv("AUTOGITS_DEBUG"), false), "Extra debugging information")
checkOnStart := flag.Bool("check-on-start", common.GetEnvOverrideBool(os.Getenv("AUTOGITS_CHECK_ON_START"), false), "Check all repositories for consistency on start, without delays")
checkIntervalHours := flag.Float64("check-interval", 5, "Check interval (+-random delay) for repositories for consitency, in hours")
flag.BoolVar(&ListPROnly, "list-prs-only", false, "Only lists PRs without acting on them")
flag.Int64Var(&PRID, "id", -1, "Process only the specific ID and ignore the rest. Use for debugging")
basePath := flag.String("repo-path", getEnvOverrideString("AUTOGITS_REPO_PATH", ""), "Repository path. Default is temporary directory")
basePath := flag.String("repo-path", common.GetEnvOverrideString(os.Getenv("AUTOGITS_REPO_PATH"), ""), "Repository path. Default is temporary directory")
pr := flag.String("only-pr", "", "Only specific PR to process. For debugging")
flag.BoolVar(&common.IsDryRun, "dry", false, "Dry mode. Do not push changes to remote repo.")
flag.Parse()
if len(*legacyRabbitUrl) > 0 {
*rabbitUrl = *legacyRabbitUrl
}
common.SetLoggingLevel(common.LogLevelInfo)
if *debugMode {
common.SetLoggingLevel(common.LogLevelDebug)