forked from git-workflow/autogits
Compare commits
31 Commits
ibs_state
...
staging-up
| Author | SHA256 | Date | |
|---|---|---|---|
| e1825dc658 | |||
| 59965e7b5c | |||
| 24a4a592a7 | |||
| d3d9d66797 | |||
| 7a2f7a6ee7 | |||
| 34a3a4795b | |||
| bb5daebdfa | |||
| 70bba5e239 | |||
| 5793391586 | |||
| d923db3f87 | |||
| fc4547f9a9 | |||
| 6fa57fc4d4 | |||
| 82d4e2ed5d | |||
| 8920644792 | |||
| 06772ca662 | |||
| 643e0d2522 | |||
| 603e5c67e7 | |||
| 17b67b8133 | |||
| 9bc290af01 | |||
| c16d1f9940 | |||
|
|
3e1b3c5c84 | ||
|
|
fc4899b75a | ||
| 0b479bcbfa | |||
| 9f9a4660e9 | |||
| cb2f17a287 | |||
| 3125df4d6a | |||
| 06600813b4 | |||
| 3b510182d6 | |||
|
|
d1bcc222ce | ||
|
|
b632952f62 | ||
|
|
1b90299d94
|
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,2 +1,7 @@
|
||||
*.osc
|
||||
*.conf
|
||||
/integration/gitea-data
|
||||
/integration/gitea-logs
|
||||
/integration/rabbitmq-data
|
||||
/integration/workflow-pr-repos
|
||||
__pycache__/
|
||||
|
||||
4
Makefile
Normal file
4
Makefile
Normal file
@@ -0,0 +1,4 @@
|
||||
MODULES := devel-importer utils/hujson utils/maintainer-update gitea-events-rabbitmq-publisher gitea_status_proxy group-review obs-forward-bot obs-staging-bot obs-status-service workflow-direct workflow-pr
|
||||
|
||||
build:
|
||||
for m in $(MODULES); do go build -C $$m -buildmode=pie || exit 1 ; done
|
||||
@@ -181,7 +181,10 @@ install -D -m0755 obs-status-service/obs-status-service
|
||||
install -D -m0644 systemd/obs-status-service.service %{buildroot}%{_unitdir}/obs-status-service.service
|
||||
install -D -m0755 workflow-direct/workflow-direct %{buildroot}%{_bindir}/workflow-direct
|
||||
install -D -m0644 systemd/workflow-direct@.service %{buildroot}%{_unitdir}/workflow-direct@.service
|
||||
install -D -m0644 systemd/workflow-direct.target %{buildroot}%{_unitdir}/workflow-direct.target
|
||||
install -D -m0755 workflow-pr/workflow-pr %{buildroot}%{_bindir}/workflow-pr
|
||||
install -D -m0644 systemd/workflow-pr@.service %{buildroot}%{_unitdir}/workflow-pr@.service
|
||||
install -D -m0644 systemd/workflow-pr.target %{buildroot}%{_unitdir}/workflow-pr.target
|
||||
install -D -m0755 utils/hujson/hujson %{buildroot}%{_bindir}/hujson
|
||||
install -D -m0755 utils/maintainer-update/maintainer-update %{buildroot}%{_bindir}/maintainer-update
|
||||
|
||||
@@ -233,17 +236,29 @@ install -D -m0755 utils/maintainer-update/maintainer-update
|
||||
%postun obs-status-service
|
||||
%service_del_postun obs-status-service.service
|
||||
|
||||
%pre workflow-direct
|
||||
%service_add_pre workflow-direct.target
|
||||
|
||||
%post workflow-direct
|
||||
%service_add_post workflow-direct.target
|
||||
|
||||
%preun workflow-direct
|
||||
%service_del_preun workflow-direct.target
|
||||
|
||||
%postun workflow-direct
|
||||
%service_del_postun workflow-direct.target
|
||||
|
||||
%pre workflow-pr
|
||||
%service_add_pre workflow-direct@.service
|
||||
%service_add_pre workflow-pr.target
|
||||
|
||||
%post workflow-pr
|
||||
%service_add_post workflow-direct@.service
|
||||
%service_add_post workflow-pr.target
|
||||
|
||||
%preun workflow-pr
|
||||
%service_del_preun workflow-direct@.service
|
||||
%service_del_preun workflow-pr.target
|
||||
|
||||
%postun workflow-pr
|
||||
%service_del_postun workflow-direct@.service
|
||||
%service_del_postun workflow-pr.target
|
||||
|
||||
%files devel-importer
|
||||
%license COPYING
|
||||
@@ -297,9 +312,12 @@ install -D -m0755 utils/maintainer-update/maintainer-update
|
||||
%doc workflow-direct/README.md
|
||||
%{_bindir}/workflow-direct
|
||||
%{_unitdir}/workflow-direct@.service
|
||||
%{_unitdir}/workflow-direct.target
|
||||
|
||||
%files workflow-pr
|
||||
%license COPYING
|
||||
%doc workflow-pr/README.md
|
||||
%{_bindir}/workflow-pr
|
||||
%{_unitdir}/workflow-pr@.service
|
||||
%{_unitdir}/workflow-pr.target
|
||||
|
||||
|
||||
@@ -83,3 +83,260 @@ func (c *MockObsStatusFetcherWithStateBuildStatusWithStateCall) DoAndReturn(f fu
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// MockObsClientInterface is a mock of ObsClientInterface interface.
|
||||
type MockObsClientInterface struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockObsClientInterfaceMockRecorder
|
||||
isgomock struct{}
|
||||
}
|
||||
|
||||
// MockObsClientInterfaceMockRecorder is the mock recorder for MockObsClientInterface.
|
||||
type MockObsClientInterfaceMockRecorder struct {
|
||||
mock *MockObsClientInterface
|
||||
}
|
||||
|
||||
// NewMockObsClientInterface creates a new mock instance.
|
||||
func NewMockObsClientInterface(ctrl *gomock.Controller) *MockObsClientInterface {
|
||||
mock := &MockObsClientInterface{ctrl: ctrl}
|
||||
mock.recorder = &MockObsClientInterfaceMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockObsClientInterface) EXPECT() *MockObsClientInterfaceMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// BuildStatus mocks base method.
|
||||
func (m *MockObsClientInterface) BuildStatus(project string, packages ...string) (*common.BuildResultList, error) {
|
||||
m.ctrl.T.Helper()
|
||||
varargs := []any{project}
|
||||
for _, a := range packages {
|
||||
varargs = append(varargs, a)
|
||||
}
|
||||
ret := m.ctrl.Call(m, "BuildStatus", varargs...)
|
||||
ret0, _ := ret[0].(*common.BuildResultList)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// BuildStatus indicates an expected call of BuildStatus.
|
||||
func (mr *MockObsClientInterfaceMockRecorder) BuildStatus(project any, packages ...any) *MockObsClientInterfaceBuildStatusCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
varargs := append([]any{project}, packages...)
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BuildStatus", reflect.TypeOf((*MockObsClientInterface)(nil).BuildStatus), varargs...)
|
||||
return &MockObsClientInterfaceBuildStatusCall{Call: call}
|
||||
}
|
||||
|
||||
// MockObsClientInterfaceBuildStatusCall wrap *gomock.Call
|
||||
type MockObsClientInterfaceBuildStatusCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockObsClientInterfaceBuildStatusCall) Return(arg0 *common.BuildResultList, arg1 error) *MockObsClientInterfaceBuildStatusCall {
|
||||
c.Call = c.Call.Return(arg0, arg1)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockObsClientInterfaceBuildStatusCall) Do(f func(string, ...string) (*common.BuildResultList, error)) *MockObsClientInterfaceBuildStatusCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockObsClientInterfaceBuildStatusCall) DoAndReturn(f func(string, ...string) (*common.BuildResultList, error)) *MockObsClientInterfaceBuildStatusCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DeleteProject mocks base method.
|
||||
func (m *MockObsClientInterface) DeleteProject(project string) error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "DeleteProject", project)
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// DeleteProject indicates an expected call of DeleteProject.
|
||||
func (mr *MockObsClientInterfaceMockRecorder) DeleteProject(project any) *MockObsClientInterfaceDeleteProjectCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteProject", reflect.TypeOf((*MockObsClientInterface)(nil).DeleteProject), project)
|
||||
return &MockObsClientInterfaceDeleteProjectCall{Call: call}
|
||||
}
|
||||
|
||||
// MockObsClientInterfaceDeleteProjectCall wrap *gomock.Call
|
||||
type MockObsClientInterfaceDeleteProjectCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockObsClientInterfaceDeleteProjectCall) Return(arg0 error) *MockObsClientInterfaceDeleteProjectCall {
|
||||
c.Call = c.Call.Return(arg0)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockObsClientInterfaceDeleteProjectCall) Do(f func(string) error) *MockObsClientInterfaceDeleteProjectCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockObsClientInterfaceDeleteProjectCall) DoAndReturn(f func(string) error) *MockObsClientInterfaceDeleteProjectCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// GetHomeProject mocks base method.
|
||||
func (m *MockObsClientInterface) GetHomeProject() string {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetHomeProject")
|
||||
ret0, _ := ret[0].(string)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// GetHomeProject indicates an expected call of GetHomeProject.
|
||||
func (mr *MockObsClientInterfaceMockRecorder) GetHomeProject() *MockObsClientInterfaceGetHomeProjectCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHomeProject", reflect.TypeOf((*MockObsClientInterface)(nil).GetHomeProject))
|
||||
return &MockObsClientInterfaceGetHomeProjectCall{Call: call}
|
||||
}
|
||||
|
||||
// MockObsClientInterfaceGetHomeProjectCall wrap *gomock.Call
|
||||
type MockObsClientInterfaceGetHomeProjectCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockObsClientInterfaceGetHomeProjectCall) Return(arg0 string) *MockObsClientInterfaceGetHomeProjectCall {
|
||||
c.Call = c.Call.Return(arg0)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockObsClientInterfaceGetHomeProjectCall) Do(f func() string) *MockObsClientInterfaceGetHomeProjectCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockObsClientInterfaceGetHomeProjectCall) DoAndReturn(f func() string) *MockObsClientInterfaceGetHomeProjectCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// GetProjectMeta mocks base method.
|
||||
func (m *MockObsClientInterface) GetProjectMeta(project string) (*common.ProjectMeta, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetProjectMeta", project)
|
||||
ret0, _ := ret[0].(*common.ProjectMeta)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// GetProjectMeta indicates an expected call of GetProjectMeta.
|
||||
func (mr *MockObsClientInterfaceMockRecorder) GetProjectMeta(project any) *MockObsClientInterfaceGetProjectMetaCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectMeta", reflect.TypeOf((*MockObsClientInterface)(nil).GetProjectMeta), project)
|
||||
return &MockObsClientInterfaceGetProjectMetaCall{Call: call}
|
||||
}
|
||||
|
||||
// MockObsClientInterfaceGetProjectMetaCall wrap *gomock.Call
|
||||
type MockObsClientInterfaceGetProjectMetaCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockObsClientInterfaceGetProjectMetaCall) Return(arg0 *common.ProjectMeta, arg1 error) *MockObsClientInterfaceGetProjectMetaCall {
|
||||
c.Call = c.Call.Return(arg0, arg1)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockObsClientInterfaceGetProjectMetaCall) Do(f func(string) (*common.ProjectMeta, error)) *MockObsClientInterfaceGetProjectMetaCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockObsClientInterfaceGetProjectMetaCall) DoAndReturn(f func(string) (*common.ProjectMeta, error)) *MockObsClientInterfaceGetProjectMetaCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// SetHomeProject mocks base method.
|
||||
func (m *MockObsClientInterface) SetHomeProject(project string) {
|
||||
m.ctrl.T.Helper()
|
||||
m.ctrl.Call(m, "SetHomeProject", project)
|
||||
}
|
||||
|
||||
// SetHomeProject indicates an expected call of SetHomeProject.
|
||||
func (mr *MockObsClientInterfaceMockRecorder) SetHomeProject(project any) *MockObsClientInterfaceSetHomeProjectCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetHomeProject", reflect.TypeOf((*MockObsClientInterface)(nil).SetHomeProject), project)
|
||||
return &MockObsClientInterfaceSetHomeProjectCall{Call: call}
|
||||
}
|
||||
|
||||
// MockObsClientInterfaceSetHomeProjectCall wrap *gomock.Call
|
||||
type MockObsClientInterfaceSetHomeProjectCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockObsClientInterfaceSetHomeProjectCall) Return() *MockObsClientInterfaceSetHomeProjectCall {
|
||||
c.Call = c.Call.Return()
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockObsClientInterfaceSetHomeProjectCall) Do(f func(string)) *MockObsClientInterfaceSetHomeProjectCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockObsClientInterfaceSetHomeProjectCall) DoAndReturn(f func(string)) *MockObsClientInterfaceSetHomeProjectCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// SetProjectMeta mocks base method.
|
||||
func (m *MockObsClientInterface) SetProjectMeta(meta *common.ProjectMeta) error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "SetProjectMeta", meta)
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// SetProjectMeta indicates an expected call of SetProjectMeta.
|
||||
func (mr *MockObsClientInterfaceMockRecorder) SetProjectMeta(meta any) *MockObsClientInterfaceSetProjectMetaCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetProjectMeta", reflect.TypeOf((*MockObsClientInterface)(nil).SetProjectMeta), meta)
|
||||
return &MockObsClientInterfaceSetProjectMetaCall{Call: call}
|
||||
}
|
||||
|
||||
// MockObsClientInterfaceSetProjectMetaCall wrap *gomock.Call
|
||||
type MockObsClientInterfaceSetProjectMetaCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockObsClientInterfaceSetProjectMetaCall) Return(arg0 error) *MockObsClientInterfaceSetProjectMetaCall {
|
||||
c.Call = c.Call.Return(arg0)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockObsClientInterfaceSetProjectMetaCall) Do(f func(*common.ProjectMeta) error) *MockObsClientInterfaceSetProjectMetaCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockObsClientInterfaceSetProjectMetaCall) DoAndReturn(f func(*common.ProjectMeta) error) *MockObsClientInterfaceSetProjectMetaCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
@@ -46,6 +46,15 @@ type ObsStatusFetcherWithState interface {
|
||||
BuildStatusWithState(project string, opts *BuildResultOptions, packages ...string) (*BuildResultList, error)
|
||||
}
|
||||
|
||||
type ObsClientInterface interface {
|
||||
GetProjectMeta(project string) (*ProjectMeta, error)
|
||||
SetProjectMeta(meta *ProjectMeta) error
|
||||
DeleteProject(project string) error
|
||||
BuildStatus(project string, packages ...string) (*BuildResultList, error)
|
||||
GetHomeProject() string
|
||||
SetHomeProject(project string)
|
||||
}
|
||||
|
||||
type ObsClient struct {
|
||||
baseUrl *url.URL
|
||||
client *http.Client
|
||||
@@ -57,6 +66,14 @@ type ObsClient struct {
|
||||
HomeProject string
|
||||
}
|
||||
|
||||
func (c *ObsClient) GetHomeProject() string {
|
||||
return c.HomeProject
|
||||
}
|
||||
|
||||
func (c *ObsClient) SetHomeProject(project string) {
|
||||
c.HomeProject = project
|
||||
}
|
||||
|
||||
func NewObsClient(host string) (*ObsClient, error) {
|
||||
baseUrl, err := url.Parse(host)
|
||||
if err != nil {
|
||||
|
||||
@@ -26,6 +26,7 @@ import (
|
||||
"net/url"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
@@ -286,3 +287,30 @@ func TrimRemovedBranchSuffix(branchName string) string {
|
||||
|
||||
return branchName
|
||||
}
|
||||
|
||||
func GetEnvOverrideString(envValue, def string) string {
|
||||
if len(envValue) != 0 {
|
||||
return envValue
|
||||
}
|
||||
return def
|
||||
}
|
||||
|
||||
func GetEnvOverrideBool(envValue string, def bool) bool {
|
||||
if len(envValue) == 0 {
|
||||
return def
|
||||
}
|
||||
|
||||
if value, err := strconv.Atoi(envValue); err == nil {
|
||||
if value > 0 {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
envValue = strings.TrimSpace(strings.ToLower(envValue))
|
||||
switch envValue {
|
||||
case "t", "true", "yes", "y", "on":
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -222,6 +222,60 @@ func TestRemovedBranchName(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitStringNoEmpty(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
sep string
|
||||
expected []string
|
||||
}{
|
||||
{"Empty string", "", ",", []string{}},
|
||||
{"Only separators", ",,,", ",", []string{}},
|
||||
{"Spaces and separators", " , , ", ",", []string{}},
|
||||
{"Normal split", "a,b,c", ",", []string{"a", "b", "c"}},
|
||||
{"Leading/trailing spaces", " a , b ", ",", []string{"a", "b"}},
|
||||
{"Multiple separators", "a,,b", ",", []string{"a", "b"}},
|
||||
{"Newlines", "line1\n\nline2", "\n", []string{"line1", "line2"}},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
res := common.SplitStringNoEmpty(test.input, test.sep)
|
||||
if !reflect.DeepEqual(res, test.expected) {
|
||||
t.Errorf("SplitStringNoEmpty(%q, %q) = %v; want %v", test.input, test.sep, res, test.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTranslateHttpsToSshUrl(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected string
|
||||
err bool
|
||||
}{
|
||||
{"Opensuse HTTPS", "https://src.opensuse.org/org/repo", "ssh://gitea@src.opensuse.org/org/repo", false},
|
||||
{"Suse HTTPS", "https://src.suse.de/org/repo", "ssh://gitea@src.suse.de/org/repo", false},
|
||||
{"Already SSH", "ssh://gitea@src.opensuse.org/org/repo", "ssh://gitea@src.opensuse.org/org/repo", false},
|
||||
{"Native SSH", "gitea@src.opensuse.org:org/repo", "gitea@src.opensuse.org:org/repo", false},
|
||||
{"Unknown URL", "https://github.com/org/repo", "", true},
|
||||
{"Empty URL", "", "", true},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
res, err := common.TranslateHttpsToSshUrl(test.input)
|
||||
if (err != nil) != test.err {
|
||||
t.Errorf("TranslateHttpsToSshUrl(%q) error = %v; want error %v", test.input, err, test.err)
|
||||
}
|
||||
if res != test.expected {
|
||||
t.Errorf("TranslateHttpsToSshUrl(%q) = %q; want %q", test.input, res, test.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewPackageIssueParsing(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
@@ -241,7 +295,7 @@ func TestNewPackageIssueParsing(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Default branch and junk lines and approval for maintainership",
|
||||
name: "Default branch and junk lines and approval for maintainership",
|
||||
input: "\n\nsome comments\n\norg1/repo2\n\nmaintainership: yes",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
@@ -251,7 +305,7 @@ func TestNewPackageIssueParsing(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Default branch and junk lines and no maintainership",
|
||||
name: "Default branch and junk lines and no maintainership",
|
||||
input: "\n\nsome comments\n\norg1/repo2\n\nmaintainership: NEVER",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
@@ -260,7 +314,7 @@ func TestNewPackageIssueParsing(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "3 repos with comments and maintainership",
|
||||
name: "3 repos with comments and maintainership",
|
||||
input: "\n\nsome comments for org1/repo2 are here and more\n\norg1/repo2#master\n org2/repo3#master\n some/repo3#m\nMaintainer ok",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
@@ -272,11 +326,11 @@ func TestNewPackageIssueParsing(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Invalid repos with spaces",
|
||||
name: "Invalid repos with spaces",
|
||||
input: "or g/repo#branch\norg/r epo#branch\norg/repo#br anch\norg/repo#branch As foo ++",
|
||||
},
|
||||
{
|
||||
name: "Valid repos with spaces",
|
||||
name: "Valid repos with spaces",
|
||||
input: " org / repo # branch",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
@@ -285,7 +339,7 @@ func TestNewPackageIssueParsing(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Package name is not repo name",
|
||||
name: "Package name is not repo name",
|
||||
input: " org / repo # branch as repo++ \nmaintainer true",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
@@ -305,3 +359,58 @@ func TestNewPackageIssueParsing(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetEnvOverride(t *testing.T) {
|
||||
t.Run("GetEnvOverrideString", func(t *testing.T) {
|
||||
tests := []struct {
|
||||
envValue string
|
||||
def string
|
||||
expected string
|
||||
}{
|
||||
{"", "default", "default"},
|
||||
{"override", "default", "override"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
if res := common.GetEnvOverrideString(test.envValue, test.def); res != test.expected {
|
||||
t.Errorf("GetEnvOverrideString(%q, %q) = %q; want %q", test.envValue, test.def, res, test.expected)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("GetEnvOverrideBool", func(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
envValue string
|
||||
def bool
|
||||
expected bool
|
||||
}{
|
||||
{"Empty env value, default false", "", false, false},
|
||||
{"Empty env value, default true", "", true, true},
|
||||
{"Env '1', default false", "1", false, true},
|
||||
{"Env '2', default false", "2", false, true},
|
||||
{"Env '0', default false", "0", false, false},
|
||||
{"Env 'invalid', default true", "abc", true, false},
|
||||
{"Env 'true', default false", "true", false, true},
|
||||
{"Env 'YES', default false", "YES", false, true},
|
||||
{"Env '0', default true", "0", true, false},
|
||||
{"Env 'false', default true", "false", true, false},
|
||||
{"Env 'FALSE', default true", "FALSE", true, false},
|
||||
{"Env ' true ', default false", " true ", false, true},
|
||||
{"Env 'no', default true", "no", true, false},
|
||||
{"Env 'NO', default true", "NO", true, false},
|
||||
{"Env 'off', default true", "off", true, false},
|
||||
{"Env 'on', default false", "on", false, true},
|
||||
{"Env 'invalid', default false", "tbc", false, false},
|
||||
{"Env 'garbage', default false", "!@#$", false, false},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
if res := common.GetEnvOverrideBool(test.envValue, test.def); res != test.expected {
|
||||
t.Errorf("GetEnvOverrideBool(%q, %v) = %v; want %v", test.envValue, test.def, res, test.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
11
integration/Dockerfile
Normal file
11
integration/Dockerfile
Normal file
@@ -0,0 +1,11 @@
|
||||
|
||||
FROM opensuse/tumbleweed
|
||||
ENV container=podman
|
||||
|
||||
ENV LANG=en_US.UTF-8
|
||||
|
||||
RUN zypper -vvvn install podman podman-compose vim make python3-pytest python3-requests python3-pytest-dependency
|
||||
|
||||
COPY . /opt/project/
|
||||
|
||||
WORKDIR /opt/project/integration
|
||||
76
integration/Makefile
Normal file
76
integration/Makefile
Normal file
@@ -0,0 +1,76 @@
|
||||
# We want to be able to test in two **modes**:
|
||||
# A. bots are used from official packages as defined in */Dockerfile.package
|
||||
# B. bots are just picked up from binaries that are placed in corresponding parent directory.
|
||||
|
||||
# The topology is defined in podman-compose file and can be spawned in two ways:
|
||||
# 1. Privileged container (needs no additional dependancies)
|
||||
# 2. podman-compose on a local machine (needs dependencies as defined in the Dockerfile)
|
||||
|
||||
|
||||
# Typical workflow:
|
||||
# A1: - run 'make test_package'
|
||||
# B1: - run 'make test_local' (make sure that the go binaries in parent folder are built)
|
||||
# A2:
|
||||
# 1. 'make build_package' - prepares images (recommended, otherwise there might be surprises if image fails to build during `make up`)
|
||||
# 2. 'make up' - spawns podman-compose
|
||||
# 3. 'pytest -v tests/*' - run tests
|
||||
# 4. 'make down' - once the containers are not needed
|
||||
# B2: (make sure the go binaries in the parent folder are built)
|
||||
# 1. 'make build_local' - prepared images (recommended, otherwise there might be surprises if image fails to build during `make up`)
|
||||
# 2. 'make up' - spawns podman-compose
|
||||
# 3. 'pytest -v tests/*' - run tests
|
||||
# 4. 'make down' - once the containers are not needed
|
||||
|
||||
|
||||
AUTO_DETECT_MODE := $(shell if test -e ../workflow-pr/workflow-pr; then echo .local; else echo .package; fi)
|
||||
|
||||
# try to detect mode B1, otherwise mode A1
|
||||
test: GIWTF_IMAGE_SUFFIX=$(AUTO_DETECT_MODE)
|
||||
test: build_container test_container
|
||||
|
||||
# mode A1
|
||||
test_package: GIWTF_IMAGE_SUFFIX=.package
|
||||
test_package: build_container test_container
|
||||
|
||||
# mode B1
|
||||
test_local: GIWTF_IMAGE_SUFFIX=.local
|
||||
test_local: build_container test_container
|
||||
|
||||
MODULES := gitea-events-rabbitmq-publisher obs-staging-bot workflow-pr
|
||||
|
||||
# Prepare topology 1
|
||||
build_container:
|
||||
podman build ../ -f integration/Dockerfile -t autogits_integration
|
||||
|
||||
# Run tests in topology 1
|
||||
test_container:
|
||||
podman run --rm --privileged -t --network integration_gitea-network -e GIWTF_IMAGE_SUFFIX=$(GIWTF_IMAGE_SUFFIX) autogits_integration /usr/bin/bash -c "make build && make up && sleep 25 && pytest -v tests/*"
|
||||
|
||||
|
||||
build_local: AUTO_DETECT_MODE=.local
|
||||
build_local: build
|
||||
|
||||
build_package: AUTO_DETECT_MODE=.package
|
||||
build_package: build
|
||||
|
||||
# parse all service images from podman-compose and build them (topology 2)
|
||||
build:
|
||||
podman pull docker.io/library/rabbitmq:3.13.7-management
|
||||
for i in $$(grep -A 1000 services: podman-compose.yml | grep -oE '^ [^: ]+'); do GIWTF_IMAGE_SUFFIX=$(AUTO_DETECT_MODE) podman-compose build $$i || exit 1; done
|
||||
|
||||
# this will spawn prebuilt containers (topology 2)
|
||||
up:
|
||||
podman-compose up -d
|
||||
|
||||
# tear down (topology 2)
|
||||
down:
|
||||
podman-compose down
|
||||
|
||||
# mode A
|
||||
up-bots-package:
|
||||
GIWTF_IMAGE_SUFFIX=.package podman-compose up -d
|
||||
|
||||
# mode B
|
||||
up-bots-local:
|
||||
GIWTF_IMAGE_SUFFIX=.local podman-compose up -d
|
||||
|
||||
57
integration/Makefile.txt
Normal file
57
integration/Makefile.txt
Normal file
@@ -0,0 +1,57 @@
|
||||
+-------------------------------------------------------------------------------------------------+
|
||||
| Makefile Targets |
|
||||
+-------------------------------------------------------------------------------------------------+
|
||||
| |
|
||||
| [Default Test Workflow] |
|
||||
| test (Auto-detects mode: .local or .package) |
|
||||
| └─> build_container |
|
||||
| └─> test_container |
|
||||
| |
|
||||
| [Specific Test Workflows - Topology 1: Privileged Container] |
|
||||
| test_package (Mode A1: Bots from official packages) |
|
||||
| └─> build_container |
|
||||
| └─> test_container |
|
||||
| |
|
||||
| test_local (Mode B1: Bots from local binaries) |
|
||||
| └─> build_container |
|
||||
| └─> test_container |
|
||||
| |
|
||||
| build_container |
|
||||
| - Action: Builds the `autogits_integration` privileged container image. |
|
||||
| - Purpose: Prepares an environment for running tests within a single container. |
|
||||
| |
|
||||
| test_container |
|
||||
| - Action: Runs `autogits_integration` container, executes `make build`, `make up`, and |
|
||||
| `pytest -v tests/*` inside it. |
|
||||
| - Purpose: Executes the full test suite in Topology 1 (privileged container). |
|
||||
| |
|
||||
| [Build & Orchestration Workflows - Topology 2: podman-compose] |
|
||||
| |
|
||||
| build_package (Mode A: Builds service images from official packages) |
|
||||
| └─> build |
|
||||
| |
|
||||
| build_local (Mode B: Builds service images from local binaries) |
|
||||
| └─> build |
|
||||
| |
|
||||
| build |
|
||||
| - Action: Pulls `rabbitmq` image and iterates through `podman-compose.yml` services |
|
||||
| to build each one. |
|
||||
| - Purpose: Prepares all necessary service images for Topology 2 deployment. |
|
||||
| |
|
||||
| up |
|
||||
| - Action: Starts all services defined in `podman-compose.yml` in detached mode. |
|
||||
| - Purpose: Deploys the application topology (containers) for testing or development. |
|
||||
| |
|
||||
| down |
|
||||
| - Action: Stops and removes all services started by `up`. |
|
||||
| - Purpose: Cleans up the deployed application topology. |
|
||||
| |
|
||||
| up-bots-package (Mode A: Spawns Topology 2 with official package bots) |
|
||||
| - Action: Calls `podman-compose up -d` with `GIWTF_IMAGE_SUFFIX=.package`. |
|
||||
| - Purpose: Specifically brings up the environment using official package bots. |
|
||||
| |
|
||||
| up-bots-local (Mode B: Spawns Topology 2 with local binaries) |
|
||||
| - Action: Calls `podman-compose up -d` with `GIWTF_IMAGE_SUFFIX=.local`. |
|
||||
| - Purpose: Specifically brings up the environment using local binaries. |
|
||||
| |
|
||||
+-------------------------------------------------------------------------------------------------+
|
||||
1
integration/clean.sh
Executable file
1
integration/clean.sh
Executable file
@@ -0,0 +1 @@
|
||||
sudo rm -rf gitea-data/ gitea-logs/ rabbitmq-data/ workflow-pr-repos/
|
||||
1
integration/gitea-events-rabbitmq-publisher/Dockerfile
Symbolic link
1
integration/gitea-events-rabbitmq-publisher/Dockerfile
Symbolic link
@@ -0,0 +1 @@
|
||||
Dockerfile.package
|
||||
15
integration/gitea-events-rabbitmq-publisher/Dockerfile.local
Normal file
15
integration/gitea-events-rabbitmq-publisher/Dockerfile.local
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
# Add the custom CA to the trust store
|
||||
COPY integration/rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||
RUN update-ca-certificates
|
||||
|
||||
RUN zypper -n in which binutils
|
||||
|
||||
# Copy the pre-built binary into the container
|
||||
# The user will build this and place it in the same directory as this Dockerfile
|
||||
COPY gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publisher /usr/local/bin/
|
||||
COPY integration/gitea-events-rabbitmq-publisher/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
@@ -0,0 +1,15 @@
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
# Add the custom CA to the trust store
|
||||
COPY integration/rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||
RUN update-ca-certificates
|
||||
|
||||
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
|
||||
RUN zypper --gpg-auto-import-keys ref
|
||||
|
||||
RUN zypper -n in git-core curl autogits-gitea-events-rabbitmq-publisher binutils
|
||||
|
||||
COPY integration/gitea-events-rabbitmq-publisher/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
13
integration/gitea-events-rabbitmq-publisher/entrypoint.sh
Normal file
13
integration/gitea-events-rabbitmq-publisher/entrypoint.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
exe=$(which gitea-events-rabbitmq-publisher 2>/dev/null) || :
|
||||
exe=${exe:-/usr/local/bin/gitea-events-rabbitmq-publisher}
|
||||
|
||||
package=$(rpm -qa | grep autogits-gitea-events-rabbitmq-publisher) || :
|
||||
|
||||
echo "!!!!!!!!!!!!!!!! using binary $exe; installed package: $package"
|
||||
which strings > /dev/null 2>&1 && strings "$exe" | grep -A 2 vcs.revision= | head -4 || :
|
||||
echo "RABBITMQ_HOST: $RABBITMQ_HOST"
|
||||
|
||||
exec $exe "$@"
|
||||
25
integration/gitea/Dockerfile
Normal file
25
integration/gitea/Dockerfile
Normal file
@@ -0,0 +1,25 @@
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
RUN zypper ar --repo https://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo \
|
||||
&& zypper -n --gpg-auto-import-keys refresh
|
||||
|
||||
RUN zypper -n install \
|
||||
git \
|
||||
sqlite3 \
|
||||
curl \
|
||||
gawk \
|
||||
openssh \
|
||||
jq \
|
||||
devel_Factory_git-workflow:gitea \
|
||||
&& rm -rf /var/cache/zypp/*
|
||||
|
||||
# Copy the minimal set of required files from the local 'container-files' directory
|
||||
COPY container-files/ /
|
||||
|
||||
RUN chmod -R 777 /etc/gitea/conf
|
||||
|
||||
# Make the setup and entrypoint scripts executable
|
||||
RUN chmod +x /opt/setup/setup-gitea.sh && chmod +x /opt/setup/entrypoint.sh && chmod +x /opt/setup/setup-webhook.sh && chmod +x /opt/setup/setup-dummy-data.sh
|
||||
|
||||
# Use the new entrypoint script to start the container
|
||||
ENTRYPOINT ["/opt/setup/entrypoint.sh"]
|
||||
42
integration/gitea/container-files/etc/gitea/conf/app.ini
Normal file
42
integration/gitea/container-files/etc/gitea/conf/app.ini
Normal file
@@ -0,0 +1,42 @@
|
||||
WORK_PATH = /var/lib/gitea
|
||||
|
||||
[server]
|
||||
CERT_FILE = /etc/gitea/https/cert.pem
|
||||
KEY_FILE = /etc/gitea/https/key.pem
|
||||
STATIC_ROOT_PATH = /usr/share/gitea
|
||||
APP_DATA_PATH = /var/lib/gitea/data
|
||||
PPROF_DATA_PATH = /var/lib/gitea/data/tmp/pprof
|
||||
PROTOCOL = http
|
||||
DOMAIN = gitea-test
|
||||
SSH_DOMAIN = gitea-test
|
||||
ROOT_URL = http://gitea-test:3000/
|
||||
HTTP_PORT = 3000
|
||||
DISABLE_SSH = false
|
||||
START_SSH_SERVER = true
|
||||
SSH_PORT = 3022
|
||||
LFS_START_SERVER = true
|
||||
|
||||
[lfs]
|
||||
PATH = /var/lib/gitea/data/lfs
|
||||
|
||||
[database]
|
||||
DB_TYPE = sqlite3
|
||||
PATH = /var/lib/gitea/data/gitea.db
|
||||
|
||||
[security]
|
||||
INSTALL_LOCK = true
|
||||
|
||||
[oauth2]
|
||||
ENABLED = false
|
||||
|
||||
[log]
|
||||
ROOT_PATH = /var/log/gitea
|
||||
MODE = console, file
|
||||
; Either "Trace", "Debug", "Info", "Warn", "Error" or "None", default is "Info"
|
||||
LEVEL = Debug
|
||||
|
||||
[service]
|
||||
ENABLE_BASIC_AUTHENTICATION = true
|
||||
|
||||
[webhook]
|
||||
ALLOWED_HOST_LIST = gitea-publisher
|
||||
19
integration/gitea/container-files/opt/setup/entrypoint.sh
Normal file
19
integration/gitea/container-files/opt/setup/entrypoint.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Run setup to ensure permissions, migrations, and the admin user are ready.
|
||||
# The setup script is now idempotent.
|
||||
/opt/setup/setup-gitea.sh
|
||||
|
||||
# Start the webhook setup script in the background.
|
||||
# It will wait for the main Gitea process to be ready before creating the webhook.
|
||||
/opt/setup/setup-webhook.sh &
|
||||
|
||||
echo "Starting Gitea..."
|
||||
|
||||
# The original systemd service ran as user 'gitea' and group 'gitea'
|
||||
# with a working directory of '/var/lib/gitea'.
|
||||
# We will switch to that user and run the web command.
|
||||
# Using exec means Gitea will become PID 1, allowing it to receive signals correctly.
|
||||
cd /var/lib/gitea
|
||||
exec su -s /bin/bash gitea -c "/usr/bin/gitea web --config /etc/gitea/conf/app.ini"
|
||||
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
# This script is now empty as dummy data setup is handled by pytest fixtures.
|
||||
100
integration/gitea/container-files/opt/setup/setup-gitea.sh
Normal file
100
integration/gitea/container-files/opt/setup/setup-gitea.sh
Normal file
@@ -0,0 +1,100 @@
|
||||
#!/bin/bash
|
||||
set -x
|
||||
set -e
|
||||
|
||||
# Set ownership on the volume mounts. This allows the 'gitea' user to write to them.
|
||||
# We use -R to ensure all subdirectories (like /var/lib/gitea/data) are covered.
|
||||
chown -R gitea:gitea /var/lib/gitea /var/log/gitea
|
||||
|
||||
# Set ownership on the config directory.
|
||||
chown -R gitea:gitea /etc/gitea
|
||||
|
||||
# Run database migrations to initialize the sqlite3 db based on app.ini.
|
||||
su -s /bin/bash gitea -c 'gitea migrate'
|
||||
|
||||
# Create a default admin user if it doesn't exist
|
||||
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "admin" {found=1} END {exit !found}'; then
|
||||
echo "Creating admin user..."
|
||||
su -s /bin/bash gitea -c 'gitea admin user create --username admin --password opensuse --email admin@example.com --must-change-password=false --admin'
|
||||
else
|
||||
echo "Admin user already exists."
|
||||
fi
|
||||
|
||||
# Generate an access token for the admin user
|
||||
ADMIN_TOKEN_FILE="/var/lib/gitea/admin.token"
|
||||
if [ -f "$ADMIN_TOKEN_FILE" ]; then
|
||||
echo "Admin token already exists at $ADMIN_TOKEN_FILE."
|
||||
else
|
||||
echo "Generating admin token..."
|
||||
ADMIN_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u admin -t admin-token")
|
||||
if [ -n "$ADMIN_TOKEN" ]; then
|
||||
printf "%s" "$ADMIN_TOKEN" > "$ADMIN_TOKEN_FILE"
|
||||
chmod 777 "$ADMIN_TOKEN_FILE"
|
||||
chown gitea:gitea "$ADMIN_TOKEN_FILE"
|
||||
echo "Admin token generated and saved to $ADMIN_TOKEN_FILE."
|
||||
else
|
||||
echo "Failed to generate admin token."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Generate SSH key for the admin user if it doesn't exist
|
||||
SSH_KEY_DIR="/var/lib/gitea/ssh-keys"
|
||||
mkdir -p "$SSH_KEY_DIR"
|
||||
if [ ! -f "$SSH_KEY_DIR/id_ed25519" ]; then
|
||||
echo "Generating SSH key for admin user..."
|
||||
ssh-keygen -t ed25519 -N "" -f "$SSH_KEY_DIR/id_ed25519"
|
||||
chown -R gitea:gitea "$SSH_KEY_DIR"
|
||||
chmod 700 "$SSH_KEY_DIR"
|
||||
chmod 600 "$SSH_KEY_DIR/id_ed25519"
|
||||
chmod 644 "$SSH_KEY_DIR/id_ed25519.pub"
|
||||
fi
|
||||
|
||||
# Create a autogits_obs_staging_bot user if it doesn't exist
|
||||
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "autogits_obs_staging_bot" {found=1} END {exit !found}'; then
|
||||
echo "Creating autogits_obs_staging_bot user..."
|
||||
su -s /bin/bash gitea -c 'gitea admin user create --username autogits_obs_staging_bot --password opensuse --email autogits_obs_staging_bot@example.com --must-change-password=false'
|
||||
else
|
||||
echo "autogits_obs_staging_bot user already exists."
|
||||
fi
|
||||
|
||||
# Generate an access token for the autogits_obs_staging_bot user
|
||||
BOT_TOKEN_FILE="/var/lib/gitea/autogits_obs_staging_bot.token"
|
||||
if [ -f "$BOT_TOKEN_FILE" ]; then
|
||||
echo "autogits_obs_staging_bot token already exists at $BOT_TOKEN_FILE."
|
||||
else
|
||||
echo "Generating autogits_obs_staging_bot token..."
|
||||
BOT_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u autogits_obs_staging_bot -t autogits_obs_staging_bot-token")
|
||||
if [ -n "$BOT_TOKEN" ]; then
|
||||
printf "%s" "$BOT_TOKEN" > "$BOT_TOKEN_FILE"
|
||||
chmod 666 "$BOT_TOKEN_FILE"
|
||||
chown gitea:gitea "$BOT_TOKEN_FILE"
|
||||
echo "autogits_obs_staging_bot token generated and saved to $BOT_TOKEN_FILE."
|
||||
else
|
||||
echo "Failed to generate autogits_obs_staging_bot token."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create a workflow-pr user if it doesn't exist
|
||||
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "workflow-pr" {found=1} END {exit !found}'; then
|
||||
echo "Creating workflow-pr user..."
|
||||
su -s /bin/bash gitea -c 'gitea admin user create --username workflow-pr --password opensuse --email workflow-pr@example.com --must-change-password=false'
|
||||
else
|
||||
echo "workflow-pr user already exists."
|
||||
fi
|
||||
|
||||
# Generate an access token for the workflow-pr user
|
||||
BOT_TOKEN_FILE="/var/lib/gitea/workflow-pr.token"
|
||||
if [ -f "$BOT_TOKEN_FILE" ]; then
|
||||
echo "workflow-pr token already exists at $BOT_TOKEN_FILE."
|
||||
else
|
||||
echo "Generating workflow-pr token..."
|
||||
BOT_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u workflow-pr -t workflow-pr-token")
|
||||
if [ -n "$BOT_TOKEN" ]; then
|
||||
printf "%s" "$BOT_TOKEN" > "$BOT_TOKEN_FILE"
|
||||
chmod 666 "$BOT_TOKEN_FILE"
|
||||
chown gitea:gitea "$BOT_TOKEN_FILE"
|
||||
echo "workflow-pr token generated and saved to $BOT_TOKEN_FILE."
|
||||
else
|
||||
echo "Failed to generate workflow-pr token."
|
||||
fi
|
||||
fi
|
||||
92
integration/gitea/container-files/opt/setup/setup-webhook.sh
Normal file
92
integration/gitea/container-files/opt/setup/setup-webhook.sh
Normal file
@@ -0,0 +1,92 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
GITEA_URL="http://localhost:3000"
|
||||
WEBHOOK_URL="http://gitea-publisher:8002/rabbitmq-forwarder"
|
||||
TOKEN_NAME="webhook-creator"
|
||||
|
||||
echo "Webhook setup script started in background."
|
||||
|
||||
# Wait 10s for the main Gitea process to start
|
||||
sleep 10
|
||||
|
||||
# Wait for Gitea API to be ready
|
||||
echo "Waiting for Gitea API at $GITEA_URL..."
|
||||
while ! curl -s -f "$GITEA_URL/api/v1/version" > /dev/null; do
|
||||
echo "Gitea API not up yet, waiting 5s..."
|
||||
sleep 5
|
||||
done
|
||||
echo "Gitea API is up."
|
||||
|
||||
# The `gitea admin` command needs to be run as the gitea user.
|
||||
# The -raw flag gives us the token directly.
|
||||
echo "Generating or retrieving admin token..."
|
||||
TOKEN_FILE="/var/lib/gitea/admin.token"
|
||||
|
||||
if [ -f "$TOKEN_FILE" ]; then
|
||||
TOKEN=$(cat "$TOKEN_FILE" | tr -d '\n\r ')
|
||||
echo "Admin token loaded from $TOKEN_FILE."
|
||||
else
|
||||
TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u admin -t $TOKEN_NAME")
|
||||
if [ -n "$TOKEN" ]; then
|
||||
printf "%s" "$TOKEN" > "$TOKEN_FILE"
|
||||
chmod 666 "$TOKEN_FILE"
|
||||
chown gitea:gitea "$TOKEN_FILE"
|
||||
echo "Admin token generated and saved to $TOKEN_FILE."
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$TOKEN" ]; then
|
||||
echo "Failed to generate or retrieve admin token. This might be because the token already exists in Gitea but not in $TOKEN_FILE. Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run the dummy data setup script
|
||||
/opt/setup/setup-dummy-data.sh "$GITEA_URL" "$TOKEN"
|
||||
|
||||
# Add SSH key via API
|
||||
PUB_KEY_FILE="/var/lib/gitea/ssh-keys/id_ed25519.pub"
|
||||
if [ -f "$PUB_KEY_FILE" ]; then
|
||||
echo "Checking for existing SSH key 'bot-key'..."
|
||||
KEYS_URL="$GITEA_URL/api/v1/admin/users/workflow-pr/keys"
|
||||
EXISTING_KEYS=$(curl -s -X GET -H "Authorization: token $TOKEN" "$KEYS_URL")
|
||||
|
||||
if ! echo "$EXISTING_KEYS" | grep -q "\"title\":\"bot-key\""; then
|
||||
echo "Registering SSH key 'bot-key' via API..."
|
||||
KEY_CONTENT=$(cat "$PUB_KEY_FILE")
|
||||
curl -s -X POST "$KEYS_URL" \
|
||||
-H "Authorization: token $TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{
|
||||
\"key\": \"$KEY_CONTENT\",
|
||||
\"read_only\": false,
|
||||
\"title\": \"bot-key\"
|
||||
}"
|
||||
echo -e "\nSSH key registered."
|
||||
else
|
||||
echo "SSH key 'bot-key' already registered."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if the webhook already exists
|
||||
echo "Checking for existing system webhook..."
|
||||
DB_PATH="/var/lib/gitea/data/gitea.db"
|
||||
EXISTS=$(su -s /bin/bash gitea -c "sqlite3 '$DB_PATH' \"SELECT 1 FROM webhook WHERE url = '$WEBHOOK_URL' AND is_system_webhook = 1 LIMIT 1;\"")
|
||||
|
||||
if [ "$EXISTS" = "1" ]; then
|
||||
echo "System webhook for $WEBHOOK_URL already exists. Exiting."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Creating Gitea system webhook for $WEBHOOK_URL via direct database INSERT..."
|
||||
# The events JSON requires escaped double quotes for the sqlite3 command.
|
||||
EVENTS_JSON='{\"push_only\":false,\"send_everything\":true,\"choose_events\":false,\"branch_filter\":\"*\",\"events\":{\"create\":false,\"delete\":false,\"fork\":false,\"issue_assign\":false,\"issue_comment\":false,\"issue_label\":false,\"issue_milestone\":false,\"issues\":false,\"package\":false,\"pull_request\":false,\"pull_request_assign\":false,\"pull_request_comment\":false,\"pull_request_label\":false,\"pull_request_milestone\":false,\"pull_request_review\":false,\"pull_request_review_request\":false,\"pull_request_sync\":false,\"push\":false,\"release\":false,\"repository\":false,\"status\":false,\"wiki\":false,\"workflow_job\":false,\"workflow_run\":false}}'
|
||||
NOW_UNIX=$(date +%s)
|
||||
|
||||
INSERT_CMD="INSERT INTO webhook (repo_id, owner_id, is_system_webhook, url, http_method, content_type, events, is_active, type, meta, created_unix, updated_unix) VALUES (0, 0, 1, '$WEBHOOK_URL', 'POST', 1, '$EVENTS_JSON', 1, 'gitea', '', $NOW_UNIX, $NOW_UNIX);"
|
||||
|
||||
su -s /bin/bash gitea -c "sqlite3 '$DB_PATH' \"$INSERT_CMD\""
|
||||
|
||||
echo "System webhook created successfully."
|
||||
|
||||
exit 0
|
||||
14
integration/mock-obs/Dockerfile
Normal file
14
integration/mock-obs/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
# Use a base Python image
|
||||
FROM registry.suse.com/bci/python:3.11
|
||||
|
||||
# Set the working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the server script
|
||||
COPY server.py .
|
||||
|
||||
# Expose the port the server will run on
|
||||
EXPOSE 8080
|
||||
|
||||
# Command to run the server
|
||||
CMD ["python3", "-u", "server.py"]
|
||||
@@ -0,0 +1,18 @@
|
||||
<project name="openSUSE:Leap:16.0:PullRequest">
|
||||
<title>Leap 16.0 PullRequest area</title>
|
||||
<description>Base project to define the pull request builds</description>
|
||||
<person userid="autogits_obs_staging_bot" role="maintainer"/>
|
||||
<person userid="maxlin_factory" role="maintainer"/>
|
||||
<group groupid="maintenance-opensuse.org" role="maintainer"/>
|
||||
<debuginfo>
|
||||
<enable/>
|
||||
</debuginfo>
|
||||
<repository name="standard">
|
||||
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||
<arch>x86_64</arch>
|
||||
<arch>i586</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
</project>
|
||||
@@ -0,0 +1,59 @@
|
||||
<project name="openSUSE:Leap:16.0">
|
||||
<title>openSUSE Leap 16.0 based on SLFO</title>
|
||||
<description>Leap 16.0 based on SLES 16.0 (specifically SLFO:1.2)</description>
|
||||
<link project="openSUSE:Backports:SLE-16.0"/>
|
||||
<scmsync>http://gitea-test:3000/products/SLFO#main</scmsync>
|
||||
<person userid="dimstar_suse" role="maintainer"/>
|
||||
<person userid="lkocman-factory" role="maintainer"/>
|
||||
<person userid="maxlin_factory" role="maintainer"/>
|
||||
<person userid="factory-auto" role="reviewer"/>
|
||||
<person userid="licensedigger" role="reviewer"/>
|
||||
<group groupid="autobuild-team" role="maintainer"/>
|
||||
<group groupid="factory-maintainers" role="maintainer"/>
|
||||
<group groupid="maintenance-opensuse.org" role="maintainer"/>
|
||||
<group groupid="factory-staging" role="reviewer"/>
|
||||
<build>
|
||||
<disable repository="ports"/>
|
||||
</build>
|
||||
<debuginfo>
|
||||
<enable/>
|
||||
</debuginfo>
|
||||
<repository name="standard" rebuild="local">
|
||||
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||
<arch>local</arch>
|
||||
<arch>i586</arch>
|
||||
<arch>x86_64</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
<repository name="product">
|
||||
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="product" trigger="manual"/>
|
||||
<path project="openSUSE:Leap:16.0:NonFree" repository="standard"/>
|
||||
<path project="openSUSE:Leap:16.0" repository="images"/>
|
||||
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||
<arch>local</arch>
|
||||
<arch>i586</arch>
|
||||
<arch>x86_64</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
<repository name="ports">
|
||||
<arch>armv7l</arch>
|
||||
</repository>
|
||||
<repository name="images">
|
||||
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="images" trigger="manual"/>
|
||||
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||
<arch>i586</arch>
|
||||
<arch>x86_64</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
</project>
|
||||
140
integration/mock-obs/server.py
Normal file
140
integration/mock-obs/server.py
Normal file
@@ -0,0 +1,140 @@
|
||||
import http.server
|
||||
import socketserver
|
||||
import os
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
import fnmatch
|
||||
|
||||
PORT = 8080
|
||||
RESPONSE_DIR = "/app/responses"
|
||||
STATE_DIR = "/tmp/mock_obs_state"
|
||||
|
||||
class MockOBSHandler(http.server.SimpleHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
logging.info(f"GET request for: {self.path}")
|
||||
path_without_query = self.path.split('?')[0]
|
||||
|
||||
# Check for state stored by a PUT request first
|
||||
sanitized_put_path = 'PUT' + path_without_query.replace('/', '_')
|
||||
state_file_path = os.path.join(STATE_DIR, sanitized_put_path)
|
||||
if os.path.exists(state_file_path):
|
||||
logging.info(f"Found stored PUT state for {self.path} at {state_file_path}")
|
||||
self.send_response(200)
|
||||
self.send_header("Content-type", "application/xml")
|
||||
file_size = os.path.getsize(state_file_path)
|
||||
self.send_header("Content-Length", str(file_size))
|
||||
self.end_headers()
|
||||
with open(state_file_path, 'rb') as f:
|
||||
self.wfile.write(f.read())
|
||||
return
|
||||
|
||||
# If no PUT state file, fall back to the glob/exact match logic
|
||||
self.handle_request('GET')
|
||||
|
||||
def do_PUT(self):
|
||||
logging.info(f"PUT request for: {self.path}")
|
||||
logging.info(f"Headers: {self.headers}")
|
||||
path_without_query = self.path.split('?')[0]
|
||||
|
||||
body = b''
|
||||
if self.headers.get('Transfer-Encoding', '').lower() == 'chunked':
|
||||
logging.info("Chunked transfer encoding detected")
|
||||
while True:
|
||||
line = self.rfile.readline().strip()
|
||||
if not line:
|
||||
break
|
||||
chunk_length = int(line, 16)
|
||||
if chunk_length == 0:
|
||||
self.rfile.readline()
|
||||
break
|
||||
body += self.rfile.read(chunk_length)
|
||||
self.rfile.read(2) # Read the trailing CRLF
|
||||
else:
|
||||
content_length = int(self.headers.get('Content-Length', 0))
|
||||
body = self.rfile.read(content_length)
|
||||
|
||||
logging.info(f"Body: {body.decode('utf-8')}")
|
||||
sanitized_path = 'PUT' + path_without_query.replace('/', '_')
|
||||
state_file_path = os.path.join(STATE_DIR, sanitized_path)
|
||||
|
||||
logging.info(f"Saving state for {self.path} to {state_file_path}")
|
||||
os.makedirs(os.path.dirname(state_file_path), exist_ok=True)
|
||||
with open(state_file_path, 'wb') as f:
|
||||
f.write(body)
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header("Content-type", "text/plain")
|
||||
response_body = b"OK"
|
||||
self.send_header("Content-Length", str(len(response_body)))
|
||||
self.end_headers()
|
||||
self.wfile.write(response_body)
|
||||
|
||||
def do_POST(self):
|
||||
logging.info(f"POST request for: {self.path}")
|
||||
self.handle_request('POST')
|
||||
|
||||
def do_DELETE(self):
|
||||
logging.info(f"DELETE request for: {self.path}")
|
||||
self.handle_request('DELETE')
|
||||
|
||||
def handle_request(self, method):
|
||||
path_without_query = self.path.split('?')[0]
|
||||
sanitized_request_path = method + path_without_query.replace('/', '_')
|
||||
logging.info(f"Handling request, looking for match for: {sanitized_request_path}")
|
||||
|
||||
response_file = None
|
||||
# Check for glob match first
|
||||
if os.path.exists(RESPONSE_DIR):
|
||||
for filename in os.listdir(RESPONSE_DIR):
|
||||
if fnmatch.fnmatch(sanitized_request_path, filename):
|
||||
response_file = os.path.join(RESPONSE_DIR, filename)
|
||||
logging.info(f"Found matching response file (glob): {response_file}")
|
||||
break
|
||||
|
||||
# Fallback to exact match if no glob match
|
||||
if response_file is None:
|
||||
exact_file = os.path.join(RESPONSE_DIR, sanitized_request_path)
|
||||
if os.path.exists(exact_file):
|
||||
response_file = exact_file
|
||||
logging.info(f"Found matching response file (exact): {response_file}")
|
||||
|
||||
if response_file:
|
||||
logging.info(f"Serving content from {response_file}")
|
||||
self.send_response(200)
|
||||
self.send_header("Content-type", "application/xml")
|
||||
file_size = os.path.getsize(response_file)
|
||||
self.send_header("Content-Length", str(file_size))
|
||||
self.end_headers()
|
||||
with open(response_file, 'rb') as f:
|
||||
self.wfile.write(f.read())
|
||||
else:
|
||||
logging.info(f"Response file not found for {sanitized_request_path}. Sending 404.")
|
||||
self.send_response(404)
|
||||
self.send_header("Content-type", "text/plain")
|
||||
body = f"Mock response not found for {sanitized_request_path}".encode('utf-8')
|
||||
self.send_header("Content-Length", str(len(body)))
|
||||
self.end_headers()
|
||||
self.wfile.write(body)
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s')
|
||||
|
||||
if not os.path.exists(STATE_DIR):
|
||||
logging.info(f"Creating state directory: {STATE_DIR}")
|
||||
os.makedirs(STATE_DIR)
|
||||
if not os.path.exists(RESPONSE_DIR):
|
||||
os.makedirs(RESPONSE_DIR)
|
||||
|
||||
with socketserver.TCPServer(("", PORT), MockOBSHandler) as httpd:
|
||||
logging.info(f"Serving mock OBS API on port {PORT}")
|
||||
|
||||
def graceful_shutdown(sig, frame):
|
||||
logging.info("Received SIGTERM, shutting down gracefully...")
|
||||
threading.Thread(target=httpd.shutdown).start()
|
||||
|
||||
signal.signal(signal.SIGTERM, graceful_shutdown)
|
||||
|
||||
httpd.serve_forever()
|
||||
logging.info("Server has shut down.")
|
||||
1
integration/obs-staging-bot/Dockerfile
Symbolic link
1
integration/obs-staging-bot/Dockerfile
Symbolic link
@@ -0,0 +1 @@
|
||||
./Dockerfile.package
|
||||
18
integration/obs-staging-bot/Dockerfile.local
Normal file
18
integration/obs-staging-bot/Dockerfile.local
Normal file
@@ -0,0 +1,18 @@
|
||||
# Use a base Python image
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
# Install any necessary dependencies for the bot
|
||||
# e.g., git, curl, etc.
|
||||
RUN zypper -n in git-core curl binutils
|
||||
|
||||
# Copy the bot binary and its entrypoint script
|
||||
COPY obs-staging-bot/obs-staging-bot /usr/local/bin/obs-staging-bot
|
||||
COPY integration/obs-staging-bot/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||
|
||||
# Create a non-root user to run the bot
|
||||
RUN useradd -m -u 1001 bot
|
||||
USER 1001
|
||||
|
||||
# Set the entrypoint
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
19
integration/obs-staging-bot/Dockerfile.package
Normal file
19
integration/obs-staging-bot/Dockerfile.package
Normal file
@@ -0,0 +1,19 @@
|
||||
# Use a base Python image
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
|
||||
RUN zypper --gpg-auto-import-keys ref
|
||||
|
||||
# Install any necessary dependencies for the bot
|
||||
# e.g., git, curl, etc.
|
||||
RUN zypper -n in git-core curl autogits-obs-staging-bot binutils
|
||||
|
||||
COPY integration/obs-staging-bot/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||
|
||||
# Create a non-root user to run the bot
|
||||
RUN useradd -m -u 1001 bot
|
||||
USER 1001
|
||||
|
||||
# Set the entrypoint
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
28
integration/obs-staging-bot/entrypoint.sh
Normal file
28
integration/obs-staging-bot/entrypoint.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
# This script waits for the Gitea admin token to be created,
|
||||
# exports it as an environment variable, and then executes the main container command.
|
||||
|
||||
TOKEN_FILE="/gitea-data/autogits_obs_staging_bot.token"
|
||||
|
||||
echo "OBS Staging Bot: Waiting for Gitea autogits_obs_staging_bot token at $TOKEN_FILE..."
|
||||
while [ ! -s "$TOKEN_FILE" ]; do
|
||||
sleep 2
|
||||
done
|
||||
|
||||
export GITEA_TOKEN=$(cat "$TOKEN_FILE" | tr -d '\n\r ')
|
||||
echo "OBS Staging Bot: GITEA_TOKEN exported."
|
||||
|
||||
# Execute the bot as the current user (root), using 'env' to pass required variables.
|
||||
echo "OBS Staging Bot: Executing bot..."
|
||||
|
||||
exe=$(which obs-staging-bot)
|
||||
exe=${exe:-/usr/local/bin/obs-staging-bot}
|
||||
|
||||
package=$(rpm -qa | grep autogits-obs-staging-bot) || :
|
||||
|
||||
echo "!!!!!!!!!!!!!!!! using binary $exe; installed package: $package"
|
||||
which strings > /dev/null 2>&1 && strings "$exe" | grep -A 2 vcs.revision= | head -4 || :
|
||||
|
||||
exec $exe "$@"
|
||||
77
integration/podman-compose.txt
Normal file
77
integration/podman-compose.txt
Normal file
@@ -0,0 +1,77 @@
|
||||
+-------------------------------------------------------------------------------------------------+
|
||||
| Podman-Compose Services Diagram |
|
||||
+-------------------------------------------------------------------------------------------------+
|
||||
| |
|
||||
| [Network] |
|
||||
| gitea-network (Bridge network for inter-service communication) |
|
||||
| |
|
||||
|-------------------------------------------------------------------------------------------------|
|
||||
| |
|
||||
| [Service: gitea] |
|
||||
| Description: Self-hosted Git service, central hub for repositories and code management. |
|
||||
| Container Name: gitea-test |
|
||||
| Image: Built from ./gitea Dockerfile |
|
||||
| Ports: 3000 (HTTP), 3022 (SSH) |
|
||||
| Volumes: ./gitea-data (for persistent data), ./gitea-logs (for logs) |
|
||||
| Network: gitea-network |
|
||||
| |
|
||||
|-------------------------------------------------------------------------------------------------|
|
||||
| |
|
||||
| [Service: rabbitmq] |
|
||||
| Description: Message broker for asynchronous communication between services. |
|
||||
| Container Name: rabbitmq-test |
|
||||
| Image: rabbitmq:3.13.7-management |
|
||||
| Ports: 5671 (AMQP), 15672 (Management UI) |
|
||||
| Volumes: ./rabbitmq-data (for persistent data), ./rabbitmq-config/certs (TLS certs), |
|
||||
| ./rabbitmq-config/rabbitmq.conf (config), ./rabbitmq-config/definitions.json (exchanges)|
|
||||
| Healthcheck: Ensures RabbitMQ is running and healthy. |
|
||||
| Network: gitea-network |
|
||||
| |
|
||||
|-------------------------------------------------------------------------------------------------|
|
||||
| |
|
||||
| [Service: gitea-publisher] |
|
||||
| Description: Publishes events from Gitea to the RabbitMQ message queue. |
|
||||
| Container Name: gitea-publisher |
|
||||
| Image: Built from ../gitea-events-rabbitmq-publisher/Dockerfile (local/package) |
|
||||
| Dependencies: gitea (started), rabbitmq (healthy) |
|
||||
| Environment: RABBITMQ_HOST, RABBITMQ_USERNAME, RABBITMQ_PASSWORD, SSL_CERT_FILE |
|
||||
| Command: Listens for Gitea events, publishes to 'suse' topic, debug enabled. |
|
||||
| Network: gitea-network |
|
||||
| |
|
||||
|-------------------------------------------------------------------------------------------------|
|
||||
| |
|
||||
| [Service: workflow-pr] |
|
||||
| Description: Manages pull request workflows, likely consuming events from RabbitMQ and |
|
||||
| interacting with Gitea. |
|
||||
| Container Name: workflow-pr |
|
||||
| Image: Built from ../workflow-pr/Dockerfile (local/package) |
|
||||
| Dependencies: gitea (started), rabbitmq (healthy) |
|
||||
| Environment: AMQP_USERNAME, AMQP_PASSWORD, SSL_CERT_FILE |
|
||||
| Volumes: ./gitea-data (read-only), ./workflow-pr/workflow-pr.json (config), |
|
||||
| ./workflow-pr-repos (for repositories) |
|
||||
| Command: Configures Gitea/RabbitMQ URLs, enables debug, manages repositories. |
|
||||
| Network: gitea-network |
|
||||
| |
|
||||
|-------------------------------------------------------------------------------------------------|
|
||||
| |
|
||||
| [Service: mock-obs] |
|
||||
| Description: A mock (simulated) service for the Open Build Service (OBS) for testing. |
|
||||
| Container Name: mock-obs |
|
||||
| Image: Built from ./mock-obs Dockerfile |
|
||||
| Ports: 8080 |
|
||||
| Volumes: ./mock-obs/responses (for mock API responses) |
|
||||
| Network: gitea-network |
|
||||
| |
|
||||
|-------------------------------------------------------------------------------------------------|
|
||||
| |
|
||||
| [Service: obs-staging-bot] |
|
||||
| Description: A bot that interacts with Gitea and the mock OBS, likely for staging processes. |
|
||||
| Container Name: obs-staging-bot |
|
||||
| Image: Built from ../obs-staging-bot/Dockerfile (local/package) |
|
||||
| Dependencies: gitea (started), mock-obs (started) |
|
||||
| Environment: OBS_USER, OBS_PASSWORD |
|
||||
| Volumes: ./gitea-data (read-only) |
|
||||
| Command: Configures Gitea/OBS URLs, enables debug. |
|
||||
| Network: gitea-network |
|
||||
| |
|
||||
+-------------------------------------------------------------------------------------------------+
|
||||
136
integration/podman-compose.yml
Normal file
136
integration/podman-compose.yml
Normal file
@@ -0,0 +1,136 @@
|
||||
version: "3.8"
|
||||
|
||||
networks:
|
||||
gitea-network:
|
||||
driver: bridge
|
||||
|
||||
services:
|
||||
gitea:
|
||||
build: ./gitea
|
||||
container_name: gitea-test
|
||||
environment:
|
||||
- GITEA_WORK_DIR=/var/lib/gitea
|
||||
networks:
|
||||
- gitea-network
|
||||
ports:
|
||||
# Map the HTTP and SSH ports defined in your app.ini
|
||||
- "3000:3000"
|
||||
- "3022:3022"
|
||||
volumes:
|
||||
# Persist Gitea's data (repositories, sqlite db, etc.) to a local directory
|
||||
# The :z flag allows sharing between containers
|
||||
- ./gitea-data:/var/lib/gitea:z
|
||||
# Persist Gitea's logs to a local directory
|
||||
- ./gitea-logs:/var/log/gitea:Z
|
||||
restart: unless-stopped
|
||||
|
||||
rabbitmq:
|
||||
image: rabbitmq:3.13.7-management
|
||||
container_name: rabbitmq-test
|
||||
healthcheck:
|
||||
test: ["CMD", "rabbitmq-diagnostics", "check_running", "-q"]
|
||||
interval: 30s
|
||||
timeout: 30s
|
||||
retries: 3
|
||||
networks:
|
||||
- gitea-network
|
||||
ports:
|
||||
# AMQP protocol port with TLS
|
||||
- "5671:5671"
|
||||
# HTTP management UI
|
||||
- "15672:15672"
|
||||
volumes:
|
||||
# Persist RabbitMQ data
|
||||
- ./rabbitmq-data:/var/lib/rabbitmq:Z
|
||||
# Mount TLS certs
|
||||
- ./rabbitmq-config/certs:/etc/rabbitmq/certs:Z
|
||||
# Mount rabbitmq config
|
||||
- ./rabbitmq-config/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:Z
|
||||
# Mount exchange definitions
|
||||
- ./rabbitmq-config/definitions.json:/etc/rabbitmq/definitions.json:Z
|
||||
restart: unless-stopped
|
||||
|
||||
gitea-publisher:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: integration/gitea-events-rabbitmq-publisher/Dockerfile${GIWTF_IMAGE_SUFFIX}
|
||||
container_name: gitea-publisher
|
||||
networks:
|
||||
- gitea-network
|
||||
depends_on:
|
||||
gitea:
|
||||
condition: service_started
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
- RABBITMQ_HOST=rabbitmq-test
|
||||
- RABBITMQ_USERNAME=gitea
|
||||
- RABBITMQ_PASSWORD=gitea
|
||||
- SSL_CERT_FILE=/usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||
command: [ "-listen", "0.0.0.0:8002", "-topic-domain", "suse", "-debug" ]
|
||||
restart: unless-stopped
|
||||
|
||||
workflow-pr:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: integration/workflow-pr/Dockerfile${GIWTF_IMAGE_SUFFIX}
|
||||
container_name: workflow-pr
|
||||
networks:
|
||||
- gitea-network
|
||||
depends_on:
|
||||
gitea:
|
||||
condition: service_started
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
- AMQP_USERNAME=gitea
|
||||
- AMQP_PASSWORD=gitea
|
||||
- SSL_CERT_FILE=/usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||
volumes:
|
||||
- ./gitea-data:/var/lib/gitea:ro,z
|
||||
- ./workflow-pr/workflow-pr.json:/etc/workflow-pr.json:ro,z
|
||||
- ./workflow-pr-repos:/var/lib/workflow-pr/repos:Z
|
||||
command: [
|
||||
"-check-on-start",
|
||||
"-debug",
|
||||
"-gitea-url", "http://gitea-test:3000",
|
||||
"-url", "amqps://rabbitmq-test:5671",
|
||||
"-config", "/etc/workflow-pr.json",
|
||||
"-repo-path", "/var/lib/workflow-pr/repos"
|
||||
]
|
||||
restart: unless-stopped
|
||||
|
||||
mock-obs:
|
||||
build: ./mock-obs
|
||||
container_name: mock-obs
|
||||
networks:
|
||||
- gitea-network
|
||||
ports:
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- ./mock-obs/responses:/app/responses:z # Use :z for shared SELinux label
|
||||
restart: unless-stopped
|
||||
|
||||
obs-staging-bot:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: integration/obs-staging-bot/Dockerfile${GIWTF_IMAGE_SUFFIX}
|
||||
container_name: obs-staging-bot
|
||||
networks:
|
||||
- gitea-network
|
||||
depends_on:
|
||||
gitea:
|
||||
condition: service_started
|
||||
mock-obs:
|
||||
condition: service_started
|
||||
environment:
|
||||
- OBS_USER=mock
|
||||
- OBS_PASSWORD=mock-long-password
|
||||
volumes:
|
||||
- ./gitea-data:/gitea-data:ro,z
|
||||
command:
|
||||
- "-debug"
|
||||
- "-gitea-url=http://gitea-test:3000"
|
||||
- "-obs=http://mock-obs:8080"
|
||||
- "-obs-web=http://mock-obs:8080"
|
||||
restart: unless-stopped
|
||||
10
integration/pytest.ini
Normal file
10
integration/pytest.ini
Normal file
@@ -0,0 +1,10 @@
|
||||
[pytest]
|
||||
markers =
|
||||
t001: Test case 001
|
||||
t002: Test case 002
|
||||
t003: Test case 003
|
||||
t004: Test case 004
|
||||
t005: Test case 005
|
||||
t006: Test case 006
|
||||
t007: Test case 007
|
||||
dependency: pytest-dependency marker
|
||||
30
integration/rabbitmq-config/certs/cert.pem
Normal file
30
integration/rabbitmq-config/certs/cert.pem
Normal file
@@ -0,0 +1,30 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFKzCCAxOgAwIBAgIUJsg/r0ZyIVxtAkrlZKOr4LvYEvMwDQYJKoZIhvcNAQEL
|
||||
BQAwGDEWMBQGA1UEAwwNcmFiYml0bXEtdGVzdDAeFw0yNjAxMjQxMjQyMjNaFw0z
|
||||
NjAxMjIxMjQyMjNaMBgxFjAUBgNVBAMMDXJhYmJpdG1xLXRlc3QwggIiMA0GCSqG
|
||||
SIb3DQEBAQUAA4ICDwAwggIKAoICAQC9OjTq4DgqVo0mRpS8DGRR6SFrSpb2bqnl
|
||||
YI7xSI3y67i/oP4weiZSawk2+euxhsN4FfOlsAgvpg4WyRQH5PwnXOA1Lxz51qp1
|
||||
t0VumE3B1RDheiBTE8loG1FvmikOiek2gzz76nK0R1sbKY1+/NVJpMs6dL6NzJXG
|
||||
N6aCpWTk7oeY+lW5bPBG0VRA7RUG80w9R9RDtqYc0SYUmm43tjjxPZ81rhCXFx/F
|
||||
v1kxnNTQJdATNrTn9SofymSfm42f4loOGyGBsqJYybKXOPDxrM1erBN5eCwTpJMS
|
||||
4J30aMSdQTzza2Z4wi2LR0vq/FU/ouqzlRp7+7tNJbVAsqhiUa2eeAVkFwZl9wRw
|
||||
lddY0W85U507nw5M3iQv2GTOhJRXwhWpzDUFQ0fT56hAY/V+VbF1iHGAVIz4XlUj
|
||||
gC21wuXz0xRdqP8cCd8UHLSbp8dmie161GeKVwO037aP+1hZJbm7ePsS5Na+qYG1
|
||||
LCy0GhfQn71BsYUaGJtfRcaMwIbqaNIYn+Y6S1FVjxDPXCxFXDrIcFvldmJYTyeK
|
||||
7KrkO2P1RbEiwYyPPUhthbb1Agi9ZutZsnadmPRk27t9bBjNnWaY2z17hijnzVVz
|
||||
jOHuPlpb7cSaagVzLTT0zrZ+ifnZWwdl0S2ZrjBAeVrkNt7DOCUqwBnuBqYiRZFt
|
||||
A1QicHxaEQIDAQABo20wazAdBgNVHQ4EFgQU3l25Ghab2k7UhwxftZ2vZ1HO9Sow
|
||||
HwYDVR0jBBgwFoAU3l25Ghab2k7UhwxftZ2vZ1HO9SowDwYDVR0TAQH/BAUwAwEB
|
||||
/zAYBgNVHREEETAPgg1yYWJiaXRtcS10ZXN0MA0GCSqGSIb3DQEBCwUAA4ICAQB9
|
||||
ilcsRqIvnyN25Oh668YC/xxyeNTIaIxjMLyJaMylBRjNwo1WfbdpXToaEXgot5gK
|
||||
5HGlu3OIBBwBryNAlBtf/usxzLzmkEsm1Dsn9sJNY1ZTkD8MO9yyOtLqBlqAsIse
|
||||
oPVjzSdjk1fP3uyoG/ZUVAFZHZD3/9BEsftfS13oUVxo7vYz1DSyUATT/4QTYMQB
|
||||
PytL6EKJ0dLyuy7rIkZVkaUi+P7GuDXj25Mi6Zkxaw2QnssSuoqy1bAMkzEyNFK5
|
||||
0wlNWEY8H3jRZuAz1T4AXb9sjeCgBKZoWXgmGbzleOophdzvlq66UGAWPWYFGp8Q
|
||||
4GJognovhKzSY9+3n+rMPLAXSao48SYDlyTOZeBo1DTluR5QjVd+NWbEdIsA6buQ
|
||||
a6uPTSVKsulm7hyUlEZp+SsYAtVoZx3jzKKjZXjnaxOfUFWx6pTxNXvxR7pQ/8Ls
|
||||
IfduGy4VjKVQdyuwCE7eVEPDK6d53WWs6itziuj7gfq8mHvZivIA65z05lTwqkvb
|
||||
1WS2aht+zacqVSYyNrK+/kJA2CST3ggc1EO73lRvbfO9LJZWMdO+f/tkXH4zkfmL
|
||||
A3JtJcLOWuv+ZrZvHMpKlBFNMySxE3IeGX+Ad9bGyhZvZULut95/QD7Xy4cPRZHF
|
||||
R3SRn0rn/BeTly+5fkEoFk+ttah8IbwzhduPyPIxng==
|
||||
-----END CERTIFICATE-----
|
||||
52
integration/rabbitmq-config/certs/key.pem
Normal file
52
integration/rabbitmq-config/certs/key.pem
Normal file
@@ -0,0 +1,52 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQC9OjTq4DgqVo0m
|
||||
RpS8DGRR6SFrSpb2bqnlYI7xSI3y67i/oP4weiZSawk2+euxhsN4FfOlsAgvpg4W
|
||||
yRQH5PwnXOA1Lxz51qp1t0VumE3B1RDheiBTE8loG1FvmikOiek2gzz76nK0R1sb
|
||||
KY1+/NVJpMs6dL6NzJXGN6aCpWTk7oeY+lW5bPBG0VRA7RUG80w9R9RDtqYc0SYU
|
||||
mm43tjjxPZ81rhCXFx/Fv1kxnNTQJdATNrTn9SofymSfm42f4loOGyGBsqJYybKX
|
||||
OPDxrM1erBN5eCwTpJMS4J30aMSdQTzza2Z4wi2LR0vq/FU/ouqzlRp7+7tNJbVA
|
||||
sqhiUa2eeAVkFwZl9wRwlddY0W85U507nw5M3iQv2GTOhJRXwhWpzDUFQ0fT56hA
|
||||
Y/V+VbF1iHGAVIz4XlUjgC21wuXz0xRdqP8cCd8UHLSbp8dmie161GeKVwO037aP
|
||||
+1hZJbm7ePsS5Na+qYG1LCy0GhfQn71BsYUaGJtfRcaMwIbqaNIYn+Y6S1FVjxDP
|
||||
XCxFXDrIcFvldmJYTyeK7KrkO2P1RbEiwYyPPUhthbb1Agi9ZutZsnadmPRk27t9
|
||||
bBjNnWaY2z17hijnzVVzjOHuPlpb7cSaagVzLTT0zrZ+ifnZWwdl0S2ZrjBAeVrk
|
||||
Nt7DOCUqwBnuBqYiRZFtA1QicHxaEQIDAQABAoICAA+AWvDpzNgVDouV6R3NkxNN
|
||||
upXgPqUx9BuNETCtbal6i4AxR1l/zC9gwti82QTKQi2OeM74MHd8zjcqIkiyRsDP
|
||||
wDNDKIfEAONTT+4LLoWEN5WNDGRZ4Nw1LrLqiVX+ULtNPXvynRJtLQa43PVL74oQ
|
||||
pLBle23A1n0uNmcJ9w21B6ktysN9q+JVSCZodZpD6Jk1jus8JXgDXy/9Za2NMTV8
|
||||
A5ShbYz/ETSBJCSnERz7GARW7TN6V0jS6vLTSqMQJyn0KYbHNDr7TPTL7psRuaI5
|
||||
jP/cqxmx1/WKLo5k3cR3IW/cesDGQXZhMRQvNymXJkxvWMPS36lmfyZtbFNflw4Z
|
||||
9OD+2RKt5jFDJjG8fYiYoYBdLiTj2Wdvo4mbRPNkTL75o65riDkDCQuZhDXFBm3s
|
||||
B1aDv5y1AXrzNZ5JSikszKgbLNPYB0rI3unp6i0P1985w6dyel0MGG+ouaeiyrxS
|
||||
9IgJDnE4BJ79mEzHTXtbZ/+3aGAK/Y6mU8Pz2s6/+6ccT0miievsMS+si1KESF31
|
||||
WLnsMdcrJcxqcm7Ypo24G0yBJluSDKtD1cqQUGN1MKp+EEv1SCH+4csaa3ooRB0o
|
||||
YveySjqxtmhVpQuY3egCOaXhPmX7lgYwoe+G4UIkUMwPn20WMg+jFxgPASdh4lqE
|
||||
mzpePP7STvEZAr+rrLu1AoIBAQDmCEiKOsUTtJlX3awOIRtCkIqBxS1E6rpyjfxK
|
||||
A6+zpXnE++8MhIJ07+9bPdOshGjS3JbJ+hu+IocbNg++rjRArYQnJh8/qBZ2GB2v
|
||||
Ryfptsoxtk/xUsmOfchvk4tOjvDHZrJehUtGc+LzX/WUqpgtEk1Gnx7RGRuDNnqS
|
||||
Q1+yU4NubHwOHPswBBXOnVtopcAHFpKhbKRFOHOwMZN99qcWVIkv4J9c6emcPMLI
|
||||
I/QPIvwB6WmbLa0o3JNXlD4kPdqCgNW36KEFiW8m+4tgzF3HWYSAyIeBRFG7ouE6
|
||||
yk5hiptPKhZlTmTAkQSssCXksiTw1rsspFULZSRyaaaPunvVAoIBAQDSlrKu+B2h
|
||||
AJtxWy5MQDOiroqT3KDneIGXPYgH3/tiDmxy0CIEbSb5SqZ6zAmihs3dWWCmc1JH
|
||||
YObRrqIxu+qVi4K+Uz8l7WBrS7DkjZjajq+y/mrZYUNRoL2q9mnNqRNan7zxWDJc
|
||||
U4u2NH9P4LOz6ttE4OG9SC3/gZLoepA+ANZatu93749IT7z8ske0MVPP76jVI1Gl
|
||||
D7cPIlzcBUdJgNV8UOkxeqU3+S6Jn17Tkx5qMWND/2BCN4voQ4pfGWSkbaHlMLh1
|
||||
2SbVuR+HYPY3aPJeSY7MEPoc7d2SSVOcVDr2AQwSDSCCgIFZOZlawehUz9R51hK8
|
||||
LlaccFWXhS9NAoIBAEFZNRJf48DXW4DErq5M5WuhmFeJZnTfohwNDhEQvwdwCQnW
|
||||
8HBD7LO/veXTyKCH9SeCFyxF6z+2m181mn93Cc0d/h8JC3OQEuF1tGko88PHc+Vv
|
||||
f4J1HGFohlp8NeUZYnmjSSTlBR98qIqvRhr348daHa3kYmLQmSpLfcKzdSo542qp
|
||||
UwzHWuynHHLX7THrdIQO+5T0Qi6P/P2e9+GfApSra1W4oE1K/lyuPj+RRzJNo/3/
|
||||
C0tUTI8BKrKEoKq3D65nX0+hvKzQAE24xD25kSKi4aucTDKC8B04BngnJOE8+SYi
|
||||
NL6O6Lxz9joAyKMRoMDyn7Xs8WQNVa9TKEhImAkCggEBAMljmIm/egZIoF7thf8h
|
||||
vr+rD5eL/Myf776E95wgVTVW+dtqs71r7UOmYkM48VXeeO1f1hAYZO0h/Fs2GKJb
|
||||
RWGyQ1xkHBXXRsgVYJuR1kXdAqW4rNIqM8jSYdAnStOFB5849+YOJEsrEocy+TWY
|
||||
fAJpbTwXm4n6hxK8BZQR8fN5tYSXQbd+/5V1vBQlInFuYuqOFPWPizrBJp1wjUFU
|
||||
QvJGJON4NSo+UdaPlDPEl1jabtG7XWTfylxI5qE+RgvgKuEcfyDBUQZSntLw8Pf0
|
||||
gEJJOM92pPr+mVIlICoPucfcvW4ZXkO9DgP/hLOhY8jpe5fwERBa6xvPbMC6pP/8
|
||||
PFkCggEBAOLtvboBThe57QRphsKHmCtRJHmT4oZzhMYsE+5GMGYzPNWod1hSyfXn
|
||||
EB8iTmAFP5r7FdC10B8mMpACXuDdi2jbmlYOTU6xNTprSKtv8r8CvorWJdsQwRsy
|
||||
pZ7diSCeyi0z/sIx//ov0b3WD0E8BG/HWsFbX0p5xXpaljYEv5dK7xUiWgBW+15a
|
||||
N1AeVcPiXRDwhQMVcvVOvzgwKsw+Rpls/9W4hihcBHaiMcBUDFWxJtnf4ZAGAZS3
|
||||
/694MOYlmfgT/cDqF9oOsCdxM0w24kL0dcUM7zPk314ixAAfUwXaxisBhS2roJ88
|
||||
HsuK9JPSK/AS0IqUtKiq4LZ9ErixYF0=
|
||||
-----END PRIVATE KEY-----
|
||||
35
integration/rabbitmq-config/definitions.json
Executable file
35
integration/rabbitmq-config/definitions.json
Executable file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"users": [
|
||||
{
|
||||
"name": "gitea",
|
||||
"password_hash": "5IdZmMJhNb4otX/nz9Xtmkpj9khl6+5eAmXNs/oHYwQNO3jg",
|
||||
"hashing_algorithm": "rabbit_password_hashing_sha256",
|
||||
"tags": "administrator"
|
||||
}
|
||||
],
|
||||
"vhosts": [
|
||||
{
|
||||
"name": "/"
|
||||
}
|
||||
],
|
||||
"permissions": [
|
||||
{
|
||||
"user": "gitea",
|
||||
"vhost": "/",
|
||||
"configure": ".*",
|
||||
"write": ".*",
|
||||
"read": ".*"
|
||||
}
|
||||
],
|
||||
"exchanges": [
|
||||
{
|
||||
"name": "pubsub",
|
||||
"vhost": "/",
|
||||
"type": "topic",
|
||||
"durable": true,
|
||||
"auto_delete": false,
|
||||
"internal": false,
|
||||
"arguments": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
83
integration/test-plan.md
Normal file
83
integration/test-plan.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Test Plan: workflow-pr Bot
|
||||
|
||||
## 1. Introduction
|
||||
|
||||
This document outlines the test plan for the `workflow-pr` bot. The bot is responsible for synchronizing pull requests between ProjectGit and PackageGit repositories, managing reviews, and handling merges. This test plan aims to ensure the bot's functionality, reliability, and performance.
|
||||
|
||||
## 2. Scope
|
||||
|
||||
### In Scope
|
||||
|
||||
* Pull Request synchronization (creation, update, closing).
|
||||
* Reviewer management (adding, re-adding, mandatory vs. advisory).
|
||||
* Merge management, including `ManualMergeOnly` and `ManualMergeProject` flags.
|
||||
* Configuration parsing (`workflow.config`).
|
||||
* Label management (`staging/Auto`, `review/Pending`, `review/Done`).
|
||||
* Maintainership and permissions handling.
|
||||
|
||||
### Out of Scope
|
||||
|
||||
* Package deletion requests (planned feature).
|
||||
* Underlying infrastructure (Gitea, RabbitMQ, OBS).
|
||||
* Performance and load testing.
|
||||
* Closing a PackageGit PR (currently disabled).
|
||||
|
||||
## 3. Test Objectives
|
||||
|
||||
* Verify that pull requests are correctly synchronized between ProjectGit and PackageGit.
|
||||
* Ensure that reviewers are correctly added to pull requests based on the configuration.
|
||||
* Validate that pull requests are merged only when all conditions are met.
|
||||
* Confirm that the bot correctly handles various configurations in `workflow.config`.
|
||||
* Verify that labels are correctly applied to pull requests.
|
||||
* Ensure that maintainership and permissions are correctly enforced.
|
||||
|
||||
## 4. Test Strategy
|
||||
|
||||
The testing will be conducted in a dedicated test environment that mimics the production environment. The strategy will involve a combination of:
|
||||
|
||||
* **Component Testing:** Testing individual components of the bot in isolation using unit tests written in Go.
|
||||
* **Integration Testing:** Testing the bot's interaction with Gitea, RabbitMQ, and a mock OBS server using `pytest`.
|
||||
* **End-to-End Testing:** Testing the complete workflow from creating a pull request to merging it using `pytest`.
|
||||
|
||||
### Test Automation
|
||||
|
||||
* **Unit Tests:** Go's built-in testing framework will be used to write unit tests for individual functions and methods.
|
||||
* **Integration and End-to-End Tests:** `pytest` will be used to write integration and end-to-end tests that use the Gitea API to create pull requests and verify the bot's behavior.
|
||||
|
||||
### Success Metrics
|
||||
|
||||
* **Test Coverage:** The goal is to achieve at least 80% test coverage for the bot's codebase.
|
||||
* **Bug Detection Rate:** The number of bugs found during the testing phase.
|
||||
* **Test Pass Rate:** The percentage of test cases that pass without any issues.
|
||||
|
||||
|
||||
## 5. Test Cases
|
||||
|
||||
| Test Case ID | Description | Steps to Reproduce | Expected Results | Priority |
|
||||
| :--- | :--- | :--- | :--- | :--- |
|
||||
| **TC-SYNC-001** | **Create ProjectGit PR from PackageGit PR** | 1. Create a new PR in a PackageGit repository. | 1. A new PR is created in the corresponding ProjectGit repository with the title "Forwarded PRs: <package_name>".<br>2. The ProjectGit PR description contains a link to the PackageGit PR (e.g., `PR: org/package_repo!pr_number`).<br>3. The package submodule in the ProjectGit PR points to the PackageGit PR's commit. | High |
|
||||
| **TC-SYNC-002** | **Update ProjectGit PR from PackageGit PR** | 1. Push a new commit to an existing PackageGit PR. | 1. The corresponding ProjectGit PR's head branch is updated with the new commit. | High |
|
||||
| **TC-SYNC-003** | **WIP Flag Synchronization** | 1. Mark a PackageGit PR as "Work In Progress".<br>2. Remove the WIP flag from the PackageGit PR. | 1. The corresponding ProjectGit PR is also marked as "Work In Progress".<br>2. The WIP flag on the ProjectGit PR is removed. | Medium |
|
||||
| **TC-SYNC-004** | **WIP Flag (multiple referenced package PRs)** | 1. Create a ProjectGit PR that references multiple PackageGit PRs.<br>2. Mark one of the PackageGit PRs as "Work In Progress".<br>3. Remove the "Work In Progress" flag from all PackageGit PRs. | 1. The ProjectGit PR is marked as "Work In Progress".<br>2. The "Work In Progress" flag is removed from the ProjectGit PR only after it has been removed from all associated PackageGit PRs. | Medium |
|
||||
| **TC-SYNC-005** | **NoProjectGitPR = true, edits disabled** | 1. Set `NoProjectGitPR = true` in `workflow.config`.<br>2. Create a PackageGit PR without "Allow edits from maintainers" enabled. <br>3. Push a new commit to the PackageGit PR. | 1. No ProjectGit PR is created.<br>2. The bot adds a warning comment to the PackageGit PR explaining that it cannot update the PR. | High |
|
||||
| **TC-SYNC-006** | **NoProjectGitPR = true, edits enabled** | 1. Set `NoProjectGitPR = true` in `workflow.config`.<br>2. Create a PackageGit PR with "Allow edits from maintainers" enabled.<br>3. Push a new commit to the PackageGit PR. | 1. No ProjectGit PR is created.<br>2. The submodule commit on the project PR is updated with the new commit from the PackageGit PR. | High |
|
||||
| **TC-COMMENT-001** | **Detect duplicate comments** | 1. Create a PackageGit PR.<br>2. Wait for the `workflow-pr` bot to act on the PR.<br>3. Edit the body of the PR to trigger the bot a second time. | 1. The bot should not post a duplicate comment. | High |
|
||||
| **TC-REVIEW-001** | **Add mandatory reviewers** | 1. Create a new PackageGit PR. | 1. All mandatory reviewers are added to both the PackageGit and ProjectGit PRs. | High |
|
||||
| **TC-REVIEW-002** | **Add advisory reviewers** | 1. Create a new PackageGit PR with advisory reviewers defined in the configuration. | 1. Advisory reviewers are added to the PR, but their approval is not required for merging. | Medium |
|
||||
| **TC-REVIEW-003** | **Re-add reviewers** | 1. Push a new commit to a PackageGit PR after it has been approved. | 1. The original reviewers are re-added to the PR. | Medium |
|
||||
| **TC-REVIEW-004** | **Package PR created by a maintainer** | 1. Create a PackageGit PR from the account of a package maintainer. | 1. No review is requested from other package maintainers. | High |
|
||||
| **TC-REVIEW-005** | **Package PR created by an external user (approve)** | 1. Create a PackageGit PR from the account of a user who is not a package maintainer.<br>2. One of the package maintainers approves the PR. | 1. All package maintainers are added as reviewers.<br>2. Once one maintainer approves the PR, the other maintainers are removed as reviewers. | High |
|
||||
| **TC-REVIEW-006** | **Package PR created by an external user (reject)** | 1. Create a PackageGit PR from the account of a user who is not a package maintainer.<br>2. One of the package maintainers rejects the PR. | 1. All package maintainers are added as reviewers.<br>2. Once one maintainer rejects the PR, the other maintainers are removed as reviewers. | High |
|
||||
| **TC-REVIEW-007** | **Package PR created by a maintainer with ReviewRequired=true** | 1. Set `ReviewRequired = true` in `workflow.config`.<br>2. Create a PackageGit PR from the account of a package maintainer. | 1. A review is requested from other package maintainers if available. | High |
|
||||
| **TC-MERGE-001** | **Automatic Merge** | 1. Create a PackageGit PR.<br>2. Ensure all mandatory reviews are completed on both project and package PRs. | 1. The PR is automatically merged. | High |
|
||||
| **TC-MERGE-002** | **ManualMergeOnly with Package Maintainer** | 1. Create a PackageGit PR with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the package PR from the account of a package maintainer for that package. | 1. The PR is merged. | High |
|
||||
| **TC-MERGE-003** | **ManualMergeOnly with unauthorized user** | 1. Create a PackageGit PR with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the package PR from the account of a user who is not a maintainer for that package. | 1. The PR is not merged. | High |
|
||||
| **TC-MERGE-004** | **ManualMergeOnly with multiple packages** | 1. Create a ProjectGit PR that references multiple PackageGit PRs with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on each package PR from the account of a package maintainer. | 1. The PR is merged only after "merge ok" is commented on all associated PackageGit PRs. | High |
|
||||
| **TC-MERGE-005** | **ManualMergeOnly with Project Maintainer** | 1. Create a PackageGit PR with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the package PR from the account of a project maintainer. | 1. The PR is merged. | High |
|
||||
| **TC-MERGE-006** | **ManualMergeProject with Project Maintainer** | 1. Create a PackageGit PR with `ManualMergeProject` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the project PR from the account of a project maintainer. | 1. The PR is merged. | High |
|
||||
| **TC-MERGE-007** | **ManualMergeProject with unauthorized user** | 1. Create a PackageGit PR with `ManualMergeProject` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the project PR from the account of a package maintainer. | 1. The PR is not merged. | High |
|
||||
| **TC-CONFIG-001** | **Invalid Configuration** | 1. Provide an invalid `workflow.config` file. | 1. The bot reports an error and does not process any PRs. | High |
|
||||
| **TC-LABEL-001** | **Apply `staging/Auto` label** | 1. Create a new PackageGit PR. | 1. The `staging/Auto` label is applied to the ProjectGit PR. | High |
|
||||
| **TC-LABEL-002** | **Apply `review/Pending` label** | 1. Create a new PackageGit PR. | 1. The `review/Pending` label is applied to the ProjectGit PR when there are pending reviews. | Medium |
|
||||
| **TC-LABEL-003** | **Apply `review/Done` label** | 1. Ensure all mandatory reviews for a PR are completed. | 1. The `review/Done` label is applied to the ProjectGit PR when all mandatory reviews are completed. | Medium |
|
||||
|
||||
0
integration/tests/__init__.py
Normal file
0
integration/tests/__init__.py
Normal file
720
integration/tests/conftest.py
Normal file
720
integration/tests/conftest.py
Normal file
@@ -0,0 +1,720 @@
|
||||
"""
|
||||
This module contains pytest fixtures for setting up the test environment.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import time
|
||||
import os
|
||||
|
||||
# Assuming GiteaAPIClient is in tests/lib/common_test_utils.py
|
||||
from tests.lib.common_test_utils import GiteaAPIClient
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def gitea_env():
|
||||
"""
|
||||
Sets up the Gitea environment with dummy data and provides a GiteaAPIClient instance.
|
||||
"""
|
||||
gitea_url = "http://127.0.0.1:3000"
|
||||
|
||||
# Read admin token
|
||||
admin_token_path = "./gitea-data/admin.token" # Corrected path
|
||||
admin_token = None
|
||||
try:
|
||||
with open(admin_token_path, "r") as f:
|
||||
admin_token = f.read().strip()
|
||||
except FileNotFoundError:
|
||||
raise Exception(f"Admin token file not found at {admin_token_path}. Ensure it's generated and accessible.")
|
||||
|
||||
# Headers for authenticated requests
|
||||
auth_headers = {"Authorization": f"token {admin_token}", "Content-Type": "application/json"}
|
||||
|
||||
# Wait for Gitea to be available
|
||||
print(f"Waiting for Gitea at {gitea_url}...")
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
# Check a specific API endpoint that indicates readiness
|
||||
response = requests.get(f"{gitea_url}/api/v1/version", headers=auth_headers, timeout=5)
|
||||
if response.status_code == 200:
|
||||
print("Gitea API is available.")
|
||||
break
|
||||
except requests.exceptions.ConnectionError:
|
||||
pass
|
||||
print(f"Gitea not ready ({response.status_code if 'response' in locals() else 'ConnectionError'}), retrying in 1 seconds... ({i+1}/{max_retries})")
|
||||
time.sleep(1)
|
||||
else:
|
||||
raise Exception("Gitea did not become available within the expected time.")
|
||||
|
||||
client = GiteaAPIClient(base_url=gitea_url, token=admin_token)
|
||||
|
||||
# Setup dummy data
|
||||
print("--- Starting Gitea Dummy Data Setup from Pytest Fixture ---")
|
||||
client.create_org("products")
|
||||
client.create_org("pool")
|
||||
|
||||
client.create_repo("products", "SLFO")
|
||||
client.create_repo("pool", "pkgA")
|
||||
client.create_repo("pool", "pkgB")
|
||||
|
||||
# The add_submodules method also creates workflow.config and staging.config
|
||||
client.add_submodules("products", "SLFO")
|
||||
time.sleep(1)
|
||||
|
||||
workflow_config_content = """{
|
||||
"Workflows": ["pr"],
|
||||
"GitProjectName": "products/SLFO#main",
|
||||
"Organization": "pool",
|
||||
"Branch": "main",
|
||||
"ManualMergeProject": true,
|
||||
"Reviewers": [ "-autogits_obs_staging_bot" ]
|
||||
}"""
|
||||
client.create_file("products", "SLFO", "workflow.config", workflow_config_content)
|
||||
|
||||
staging_config_content = """{
|
||||
"ObsProject": "openSUSE:Leap:16.0",
|
||||
"StagingProject": "openSUSE:Leap:16.0:PullRequest"
|
||||
}"""
|
||||
client.create_file("products", "SLFO", "staging.config", staging_config_content)
|
||||
|
||||
client.add_collaborator("products", "SLFO", "autogits_obs_staging_bot", "write")
|
||||
client.add_collaborator("products", "SLFO", "workflow-pr", "write")
|
||||
client.add_collaborator("pool", "pkgA", "workflow-pr", "write")
|
||||
client.add_collaborator("pool", "pkgB", "workflow-pr", "write")
|
||||
|
||||
client.update_repo_settings("products", "SLFO")
|
||||
client.update_repo_settings("pool", "pkgA")
|
||||
client.update_repo_settings("pool", "pkgB")
|
||||
print("--- Gitea Dummy Data Setup Complete ---")
|
||||
time.sleep(1) # Give workflow-pr bot time to become fully active
|
||||
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def configured_dev_branch_env(gitea_env: GiteaAPIClient, request):
|
||||
"""
|
||||
Fixture to set up a 'dev' branch in products/SLFO and pool/pkgA,
|
||||
and configure workflow.config in products/SLFO#dev with specific content.
|
||||
Yields (gitea_env, test_full_repo_name, dev_branch_name).
|
||||
"""
|
||||
test_org_name = "products"
|
||||
test_repo_name = "SLFO"
|
||||
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
|
||||
dev_branch_name = "dev"
|
||||
|
||||
workflow_config_content = request.param # Get config content from parametrization
|
||||
|
||||
print(f"--- Setting up 'dev' branch and workflow.config in {test_full_repo_name}#{dev_branch_name} ---")
|
||||
|
||||
# Get the latest commit SHA of the main branch
|
||||
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
|
||||
|
||||
# Create 'dev' branch from 'main' in products/SLFO
|
||||
gitea_env.create_branch(test_org_name, test_repo_name, dev_branch_name, main_branch_sha)
|
||||
|
||||
# Create 'dev' branch in pool/pkgA as well
|
||||
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
|
||||
gitea_env.create_branch("pool", "pkgA", dev_branch_name, pool_pkga_main_sha)
|
||||
|
||||
# Create 'dev' branch in pool/pkgB as well
|
||||
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
|
||||
gitea_env.create_branch("pool", "pkgB", dev_branch_name, pool_pkgb_main_sha)
|
||||
|
||||
# Create/update workflow.config with the provided content
|
||||
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", workflow_config_content, branch=dev_branch_name)
|
||||
print(f"Created workflow.config with specific content in {test_full_repo_name}#{dev_branch_name}")
|
||||
|
||||
# Restart workflow-pr service to pick up new project config
|
||||
gitea_env.restart_service("workflow-pr")
|
||||
time.sleep(1) # Give the service time to restart and re-initialize
|
||||
|
||||
yield gitea_env, test_full_repo_name, dev_branch_name
|
||||
|
||||
|
||||
# Teardown (optional, depending on test strategy)
|
||||
# For now, we'll leave resources for inspection. If a clean slate is needed for each test,
|
||||
# this fixture's scope would be 'function' and teardown logic would be added here.
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def no_project_git_pr_env(gitea_env: GiteaAPIClient):
|
||||
"""
|
||||
Sets up 'dev' branch in products/SLFO and pool/pkgA,
|
||||
and configures workflow.config in products/SLFO#dev with NoProjectGitPR: true.
|
||||
"""
|
||||
test_org_name = "products"
|
||||
test_repo_name = "SLFO"
|
||||
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
|
||||
dev_branch_name = "dev"
|
||||
|
||||
print(f"--- Setting up workflow.config in {test_full_repo_name}#{dev_branch_name} for No Project PR ---")
|
||||
|
||||
# Get the latest commit SHA of the main branch
|
||||
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
|
||||
|
||||
# Create 'dev' branch from 'main' in products/SLFO
|
||||
try:
|
||||
gitea_env.create_branch(test_org_name, test_repo_name, dev_branch_name, main_branch_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
# Create 'dev' branch in pool/pkgA as well
|
||||
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
|
||||
try:
|
||||
gitea_env.create_branch("pool", "pkgA", dev_branch_name, pool_pkga_main_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
# Create 'dev' branch in pool/pkgB as well
|
||||
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
|
||||
try:
|
||||
gitea_env.create_branch("pool", "pkgB", dev_branch_name, pool_pkgb_main_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
# Setup workflow.config to have "NoProjectGitPR": true
|
||||
workflow_config_content_no_project_pr = f"""{{
|
||||
"Workflows": ["pr"],
|
||||
"GitProjectName": "{test_full_repo_name}#{dev_branch_name}",
|
||||
"Organization": "pool",
|
||||
"Branch": "dev",
|
||||
"ManualMergeProject": true,
|
||||
"Reviewers": [ "-autogits_obs_staging_bot" ],
|
||||
"NoProjectGitPR": true
|
||||
}}"""
|
||||
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", workflow_config_content_no_project_pr, branch=dev_branch_name)
|
||||
print(f"Created workflow.config with NoProjectGitPR: true in {test_full_repo_name}#{dev_branch_name}")
|
||||
|
||||
# Restart workflow-pr service
|
||||
gitea_env.restart_service("workflow-pr")
|
||||
time.sleep(1) # Give the service time to restart and re-initialize
|
||||
|
||||
return gitea_env, test_full_repo_name, dev_branch_name
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_user_client(gitea_env: GiteaAPIClient):
|
||||
"""
|
||||
Creates a new unique user and returns a GiteaAPIClient instance for them using sudo.
|
||||
This user should not have write permissions to the test repositories by default.
|
||||
"""
|
||||
username = f"user-{int(time.time())}"
|
||||
password = "password123"
|
||||
email = f"{username}@example.com"
|
||||
|
||||
gitea_env.create_user(username, password, email)
|
||||
|
||||
# Grant write access to pool/pkgA
|
||||
gitea_env.add_collaborator("pool", "pkgA", username, "write")
|
||||
|
||||
# Use admin token with Sudo header
|
||||
admin_token = gitea_env.headers["Authorization"].split(" ")[1]
|
||||
return GiteaAPIClient(base_url=gitea_env.base_url, token=admin_token, sudo=username)
|
||||
|
||||
|
||||
def setup_users_from_config(client: GiteaAPIClient, workflow_config: str, maintainership_config: str):
|
||||
"""
|
||||
Parses workflow.config and _maintainership.json, creates users, and adds them as collaborators.
|
||||
"""
|
||||
import json
|
||||
|
||||
wf = json.loads(workflow_config)
|
||||
mt = json.loads(maintainership_config)
|
||||
|
||||
all_users = set()
|
||||
|
||||
# Extract from workflow.config Reviewers
|
||||
reviewers = wf.get("Reviewers", [])
|
||||
for r in reviewers:
|
||||
# Strip +, - prefixes
|
||||
username = r.lstrip("+-")
|
||||
if username and username not in ["autogits_obs_staging_bot", "workflow-pr"]:
|
||||
all_users.add(username)
|
||||
|
||||
# Extract from maintainership
|
||||
for pkg, users in mt.items():
|
||||
for username in users:
|
||||
all_users.add(username)
|
||||
|
||||
# Create all users
|
||||
for username in all_users:
|
||||
client.create_user(username, "password123", f"{username}@example.com")
|
||||
# Global maintainers (empty key) get write access to everything
|
||||
# Actually, let's just make them collaborators on SLFO, pkgA, pkgB for simplicity in tests
|
||||
client.add_collaborator("products", "SLFO", username, "write")
|
||||
|
||||
# Set specific repository permissions based on maintainership
|
||||
for pkg, users in mt.items():
|
||||
repo_name = pkg if pkg else None
|
||||
for username in users:
|
||||
if not repo_name:
|
||||
# Global maintainer - already added to SLFO, add to pkgA/pkgB
|
||||
client.add_collaborator("pool", "pkgA", username, "write")
|
||||
client.add_collaborator("pool", "pkgB", username, "write")
|
||||
else:
|
||||
client.add_collaborator("pool", repo_name, username, "write")
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def gitea_env():
|
||||
"""
|
||||
Sets up the Gitea environment with dummy data and provides a GiteaAPIClient instance.
|
||||
"""
|
||||
gitea_url = "http://127.0.0.1:3000"
|
||||
|
||||
# Read admin token
|
||||
admin_token_path = "./gitea-data/admin.token" # Corrected path
|
||||
admin_token = None
|
||||
try:
|
||||
with open(admin_token_path, "r") as f:
|
||||
admin_token = f.read().strip()
|
||||
except FileNotFoundError:
|
||||
raise Exception(f"Admin token file not found at {admin_token_path}. Ensure it's generated and accessible.")
|
||||
|
||||
# Headers for authenticated requests
|
||||
auth_headers = {"Authorization": f"token {admin_token}", "Content-Type": "application/json"}
|
||||
|
||||
# Wait for Gitea to be available
|
||||
print(f"Waiting for Gitea at {gitea_url}...")
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
# Check a specific API endpoint that indicates readiness
|
||||
response = requests.get(f"{gitea_url}/api/v1/version", headers=auth_headers, timeout=5)
|
||||
if response.status_code == 200:
|
||||
print("Gitea API is available.")
|
||||
break
|
||||
except requests.exceptions.ConnectionError:
|
||||
pass
|
||||
print(f"Gitea not ready ({response.status_code if 'response' in locals() else 'ConnectionError'}), retrying in 1 seconds... ({i+1}/{max_retries})")
|
||||
time.sleep(1)
|
||||
else:
|
||||
raise Exception("Gitea did not become available within the expected time.")
|
||||
|
||||
client = GiteaAPIClient(base_url=gitea_url, token=admin_token)
|
||||
|
||||
# Setup dummy data
|
||||
print("--- Starting Gitea Dummy Data Setup from Pytest Fixture ---")
|
||||
client.create_org("products")
|
||||
client.create_org("pool")
|
||||
|
||||
client.create_repo("products", "SLFO")
|
||||
client.create_repo("pool", "pkgA")
|
||||
client.create_repo("pool", "pkgB")
|
||||
|
||||
# The add_submodules method also creates workflow.config and staging.config
|
||||
client.add_submodules("products", "SLFO")
|
||||
time.sleep(1)
|
||||
|
||||
workflow_config_content = """{
|
||||
"Workflows": ["pr"],
|
||||
"GitProjectName": "products/SLFO#main",
|
||||
"Organization": "pool",
|
||||
"Branch": "main",
|
||||
"ManualMergeProject": true,
|
||||
"Reviewers": [ "-autogits_obs_staging_bot" ]
|
||||
}"""
|
||||
client.create_file("products", "SLFO", "workflow.config", workflow_config_content)
|
||||
|
||||
staging_config_content = """{
|
||||
"ObsProject": "openSUSE:Leap:16.0",
|
||||
"StagingProject": "openSUSE:Leap:16.0:PullRequest"
|
||||
}"""
|
||||
client.create_file("products", "SLFO", "staging.config", staging_config_content)
|
||||
|
||||
maintainership_content = """{
|
||||
"": ["ownerX","ownerY"],
|
||||
"pkgA": ["ownerA"],
|
||||
"pkgB": ["ownerB","ownerBB"]
|
||||
}"""
|
||||
# Create users from default main config
|
||||
setup_users_from_config(client, workflow_config_content, maintainership_content)
|
||||
|
||||
client.add_collaborator("products", "SLFO", "autogits_obs_staging_bot", "write")
|
||||
client.add_collaborator("products", "SLFO", "workflow-pr", "write")
|
||||
client.add_collaborator("pool", "pkgA", "workflow-pr", "write")
|
||||
client.add_collaborator("pool", "pkgB", "workflow-pr", "write")
|
||||
|
||||
client.update_repo_settings("products", "SLFO")
|
||||
client.update_repo_settings("pool", "pkgA")
|
||||
client.update_repo_settings("pool", "pkgB")
|
||||
print("--- Gitea Dummy Data Setup Complete ---")
|
||||
time.sleep(1) # Give workflow-pr bot time to become fully active
|
||||
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def configured_dev_branch_env(gitea_env: GiteaAPIClient, request):
|
||||
"""
|
||||
Fixture to set up a 'dev' branch in products/SLFO and pool/pkgA,
|
||||
and configure workflow.config in products/SLFO#dev with specific content.
|
||||
Yields (gitea_env, test_full_repo_name, dev_branch_name).
|
||||
"""
|
||||
test_org_name = "products"
|
||||
test_repo_name = "SLFO"
|
||||
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
|
||||
dev_branch_name = "dev"
|
||||
|
||||
workflow_config_content = request.param # Get config content from parametrization
|
||||
|
||||
print(f"--- Setting up 'dev' branch and workflow.config in {test_full_repo_name}#{dev_branch_name} ---")
|
||||
|
||||
# Get the latest commit SHA of the main branch
|
||||
gitea_env.ensure_branch_exists(test_org_name, test_repo_name, "main")
|
||||
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
|
||||
|
||||
# Create 'dev' branch from 'main' in products/SLFO
|
||||
gitea_env.create_branch(test_org_name, test_repo_name, dev_branch_name, main_branch_sha)
|
||||
|
||||
# Create 'dev' branch in pool/pkgA as well
|
||||
gitea_env.ensure_branch_exists("pool", "pkgA", "main")
|
||||
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
|
||||
gitea_env.create_branch("pool", "pkgA", dev_branch_name, pool_pkga_main_sha)
|
||||
|
||||
# Create 'dev' branch in pool/pkgB as well
|
||||
gitea_env.ensure_branch_exists("pool", "pkgB", "main")
|
||||
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
|
||||
gitea_env.create_branch("pool", "pkgB", dev_branch_name, pool_pkgb_main_sha)
|
||||
|
||||
# Create/update workflow.config with the provided content
|
||||
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", workflow_config_content, branch=dev_branch_name)
|
||||
|
||||
# For this fixture, we use default maintainership as we don't receive it in request.param
|
||||
maintainership_content = """{
|
||||
"": ["ownerX","ownerY"],
|
||||
"pkgA": ["ownerA"],
|
||||
"pkgB": ["ownerB","ownerBB"]
|
||||
}"""
|
||||
setup_users_from_config(gitea_env, workflow_config_content, maintainership_content)
|
||||
|
||||
print(f"Created workflow.config with specific content in {test_full_repo_name}#{dev_branch_name}")
|
||||
|
||||
# Restart workflow-pr service to pick up new project config
|
||||
gitea_env.restart_service("workflow-pr")
|
||||
time.sleep(1) # Give the service time to restart and re-initialize
|
||||
|
||||
yield gitea_env, test_full_repo_name, dev_branch_name
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def no_project_git_pr_env(gitea_env: GiteaAPIClient):
|
||||
"""
|
||||
Sets up 'dev' branch in products/SLFO and pool/pkgA,
|
||||
and configures workflow.config in products/SLFO#dev with NoProjectGitPR: true.
|
||||
"""
|
||||
test_org_name = "products"
|
||||
test_repo_name = "SLFO"
|
||||
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
|
||||
dev_branch_name = "dev"
|
||||
|
||||
print(f"--- Setting up workflow.config in {test_full_repo_name}#{dev_branch_name} for No Project PR ---")
|
||||
|
||||
# Get the latest commit SHA of the main branch
|
||||
gitea_env.ensure_branch_exists(test_org_name, test_repo_name, "main")
|
||||
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
|
||||
|
||||
# Create 'dev' branch from 'main' in products/SLFO
|
||||
try:
|
||||
gitea_env.create_branch(test_org_name, test_repo_name, dev_branch_name, main_branch_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
# Create 'dev' branch in pool/pkgA as well
|
||||
gitea_env.ensure_branch_exists("pool", "pkgA", "main")
|
||||
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
|
||||
try:
|
||||
gitea_env.create_branch("pool", "pkgA", dev_branch_name, pool_pkga_main_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
# Create 'dev' branch in pool/pkgB as well
|
||||
gitea_env.ensure_branch_exists("pool", "pkgB", "main")
|
||||
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
|
||||
try:
|
||||
gitea_env.create_branch("pool", "pkgB", dev_branch_name, pool_pkgb_main_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
# Setup workflow.config to have "NoProjectGitPR": true
|
||||
workflow_config_content = f"""{{
|
||||
"Workflows": ["pr"],
|
||||
"GitProjectName": "{test_full_repo_name}#{dev_branch_name}",
|
||||
"Organization": "pool",
|
||||
"Branch": "dev",
|
||||
"ManualMergeProject": true,
|
||||
"Reviewers": [ "-autogits_obs_staging_bot" ],
|
||||
"NoProjectGitPR": true
|
||||
}}"""
|
||||
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", workflow_config_content, branch=dev_branch_name)
|
||||
|
||||
maintainership_content = """{
|
||||
"": ["ownerX","ownerY"],
|
||||
"pkgA": ["ownerA"],
|
||||
"pkgB": ["ownerB","ownerBB"]
|
||||
}"""
|
||||
setup_users_from_config(gitea_env, workflow_config_content, maintainership_content)
|
||||
|
||||
print(f"Created workflow.config with NoProjectGitPR: true in {test_full_repo_name}#{dev_branch_name}")
|
||||
|
||||
# Restart workflow-pr service
|
||||
gitea_env.restart_service("workflow-pr")
|
||||
time.sleep(1) # Give the service time to restart and re-initialize
|
||||
|
||||
return gitea_env, test_full_repo_name, dev_branch_name
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_user_client(gitea_env: GiteaAPIClient):
|
||||
"""
|
||||
Creates a new unique user and returns a GiteaAPIClient instance for them using sudo.
|
||||
This user should not have write permissions to the test repositories by default.
|
||||
"""
|
||||
username = f"user-{int(time.time())}"
|
||||
password = "password123"
|
||||
email = f"{username}@example.com"
|
||||
|
||||
gitea_env.create_user(username, password, email)
|
||||
|
||||
# Grant write access to pool/pkgA
|
||||
gitea_env.add_collaborator("pool", "pkgA", username, "write")
|
||||
|
||||
# Use admin token with Sudo header
|
||||
admin_token = gitea_env.headers["Authorization"].split(" ")[1]
|
||||
return GiteaAPIClient(base_url=gitea_env.base_url, token=admin_token, sudo=username)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def automerge_env(gitea_env: GiteaAPIClient):
|
||||
"""
|
||||
Sets up 'merge' branch and custom workflow.config for automerge tests.
|
||||
"""
|
||||
test_org_name = "products"
|
||||
test_repo_name = "SLFO"
|
||||
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
|
||||
merge_branch_name = "merge"
|
||||
|
||||
print(f"--- Setting up '{merge_branch_name}' branch and workflow.config in {test_full_repo_name} ---")
|
||||
|
||||
# Get the latest commit SHA of the main branch
|
||||
gitea_env.ensure_branch_exists(test_org_name, test_repo_name, "main")
|
||||
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
|
||||
|
||||
# Create 'merge' branch from 'main' in products/SLFO
|
||||
try:
|
||||
gitea_env.create_branch(test_org_name, test_repo_name, merge_branch_name, main_branch_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
# Create 'merge' branch in pool/pkgA as well
|
||||
gitea_env.ensure_branch_exists("pool", "pkgA", "main")
|
||||
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
|
||||
try:
|
||||
gitea_env.create_branch("pool", "pkgA", merge_branch_name, pool_pkga_main_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
# Create 'merge' branch in pool/pkgB as well
|
||||
gitea_env.ensure_branch_exists("pool", "pkgB", "main")
|
||||
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
|
||||
try:
|
||||
gitea_env.create_branch("pool", "pkgB", merge_branch_name, pool_pkgb_main_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
custom_workflow_config = f"""{{
|
||||
"Workflows": ["pr"],
|
||||
"GitProjectName": "{test_full_repo_name}#{merge_branch_name}",
|
||||
"Organization": "pool",
|
||||
"Branch": "{merge_branch_name}",
|
||||
"Reviewers": [ "+usera", "+userb", "-autogits_obs_staging_bot" ]
|
||||
}}"""
|
||||
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", custom_workflow_config, branch=merge_branch_name)
|
||||
|
||||
maintainership_content = """{
|
||||
"": ["ownerX","ownerY"],
|
||||
"pkgA": ["ownerA"],
|
||||
"pkgB": ["ownerB","ownerBB"]
|
||||
}"""
|
||||
gitea_env.create_file(test_org_name, test_repo_name, "_maintainership.json", maintainership_content, branch=merge_branch_name)
|
||||
|
||||
setup_users_from_config(gitea_env, custom_workflow_config, maintainership_content)
|
||||
|
||||
# Restart workflow-pr service
|
||||
gitea_env.restart_service("workflow-pr")
|
||||
time.sleep(1)
|
||||
|
||||
return gitea_env, test_full_repo_name, merge_branch_name
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def maintainer_env(gitea_env: GiteaAPIClient):
|
||||
"""
|
||||
Sets up 'maintainer-merge' branch and workflow.config without mandatory reviewers.
|
||||
"""
|
||||
test_org_name = "products"
|
||||
test_repo_name = "SLFO"
|
||||
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
|
||||
branch_name = "maintainer-merge"
|
||||
|
||||
print(f"--- Setting up '{branch_name}' branch and workflow.config in {test_full_repo_name} ---")
|
||||
|
||||
# Get the latest commit SHA of the main branch
|
||||
gitea_env.ensure_branch_exists(test_org_name, test_repo_name, "main")
|
||||
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
|
||||
|
||||
# Create branch in products/SLFO
|
||||
try:
|
||||
gitea_env.create_branch(test_org_name, test_repo_name, branch_name, main_branch_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
# Create branch in pool/pkgA
|
||||
gitea_env.ensure_branch_exists("pool", "pkgA", "main")
|
||||
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
|
||||
try:
|
||||
gitea_env.create_branch("pool", "pkgA", branch_name, pool_pkga_main_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
# Create branch in pool/pkgB
|
||||
gitea_env.ensure_branch_exists("pool", "pkgB", "main")
|
||||
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
|
||||
try:
|
||||
gitea_env.create_branch("pool", "pkgB", branch_name, pool_pkgb_main_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
custom_workflow_config = f"""{{
|
||||
"Workflows": ["pr"],
|
||||
"GitProjectName": "{test_full_repo_name}#{branch_name}",
|
||||
"Organization": "pool",
|
||||
"Branch": "{branch_name}",
|
||||
"Reviewers": [ "-autogits_obs_staging_bot" ]
|
||||
}}"""
|
||||
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", custom_workflow_config, branch=branch_name)
|
||||
|
||||
maintainership_content = """{
|
||||
"": ["ownerX","ownerY"],
|
||||
"pkgA": ["ownerA"],
|
||||
"pkgB": ["ownerB","ownerBB"]
|
||||
}"""
|
||||
gitea_env.create_file(test_org_name, test_repo_name, "_maintainership.json", maintainership_content, branch=branch_name)
|
||||
|
||||
setup_users_from_config(gitea_env, custom_workflow_config, maintainership_content)
|
||||
|
||||
gitea_env.add_collaborator(test_org_name, test_repo_name, "autogits_obs_staging_bot", "write")
|
||||
|
||||
# Restart workflow-pr service
|
||||
gitea_env.restart_service("workflow-pr")
|
||||
time.sleep(1)
|
||||
|
||||
return gitea_env, test_full_repo_name, branch_name
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def review_required_env(gitea_env: GiteaAPIClient):
|
||||
"""
|
||||
Sets up 'review-required' branch and workflow.config with ReviewRequired: true.
|
||||
"""
|
||||
test_org_name = "products"
|
||||
test_repo_name = "SLFO"
|
||||
test_full_repo_name = f"{test_org_name}/{test_repo_name}"
|
||||
branch_name = "review-required"
|
||||
|
||||
print(f"--- Setting up '{branch_name}' branch and workflow.config in {test_full_repo_name} ---")
|
||||
|
||||
# Get the latest commit SHA of the main branch
|
||||
gitea_env.ensure_branch_exists(test_org_name, test_repo_name, "main")
|
||||
main_branch_sha = gitea_env._request("GET", f"repos/{test_org_name}/{test_repo_name}/branches/main").json()["commit"]["id"]
|
||||
|
||||
# Create branch in products/SLFO
|
||||
try:
|
||||
gitea_env.create_branch(test_org_name, test_repo_name, branch_name, main_branch_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
# Create branch in pool/pkgA
|
||||
gitea_env.ensure_branch_exists("pool", "pkgA", "main")
|
||||
pool_pkga_main_sha = gitea_env._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
|
||||
try:
|
||||
gitea_env.create_branch("pool", "pkgA", branch_name, pool_pkga_main_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
# Create branch in pool/pkgB
|
||||
gitea_env.ensure_branch_exists("pool", "pkgB", "main")
|
||||
pool_pkgb_main_sha = gitea_env._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
|
||||
try:
|
||||
gitea_env.create_branch("pool", "pkgB", branch_name, pool_pkgb_main_sha)
|
||||
except Exception as e:
|
||||
if "already exists" not in str(e).lower():
|
||||
raise
|
||||
|
||||
custom_workflow_config = f"""{{
|
||||
"Workflows": ["pr"],
|
||||
"GitProjectName": "{test_full_repo_name}#{branch_name}",
|
||||
"Organization": "pool",
|
||||
"Branch": "{branch_name}",
|
||||
"Reviewers": [ "-autogits_obs_staging_bot" ],
|
||||
"ReviewRequired": true
|
||||
}}"""
|
||||
gitea_env.create_file(test_org_name, test_repo_name, "workflow.config", custom_workflow_config, branch=branch_name)
|
||||
|
||||
maintainership_content = """{
|
||||
"": ["ownerX","ownerY"],
|
||||
"pkgA": ["ownerA"],
|
||||
"pkgB": ["ownerB","ownerBB"]
|
||||
}"""
|
||||
gitea_env.create_file(test_org_name, test_repo_name, "_maintainership.json", maintainership_content, branch=branch_name)
|
||||
|
||||
setup_users_from_config(gitea_env, custom_workflow_config, maintainership_content)
|
||||
|
||||
gitea_env.add_collaborator(test_org_name, test_repo_name, "autogits_obs_staging_bot", "write")
|
||||
|
||||
# Restart workflow-pr service
|
||||
gitea_env.restart_service("workflow-pr")
|
||||
time.sleep(1)
|
||||
|
||||
return gitea_env, test_full_repo_name, branch_name
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def ownerA_client(gitea_env: GiteaAPIClient):
|
||||
"""
|
||||
Returns a GiteaAPIClient instance for ownerA.
|
||||
"""
|
||||
admin_token = gitea_env.headers["Authorization"].split(" ")[1]
|
||||
return GiteaAPIClient(base_url=gitea_env.base_url, token=admin_token, sudo="ownerA")
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def ownerB_client(gitea_env: GiteaAPIClient):
|
||||
"""
|
||||
Returns a GiteaAPIClient instance for ownerB.
|
||||
"""
|
||||
admin_token = gitea_env.headers["Authorization"].split(" ")[1]
|
||||
return GiteaAPIClient(base_url=gitea_env.base_url, token=admin_token, sudo="ownerB")
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def ownerBB_client(gitea_env: GiteaAPIClient):
|
||||
"""
|
||||
Returns a GiteaAPIClient instance for ownerBB.
|
||||
"""
|
||||
admin_token = gitea_env.headers["Authorization"].split(" ")[1]
|
||||
return GiteaAPIClient(base_url=gitea_env.base_url, token=admin_token, sudo="ownerBB")
|
||||
23
integration/tests/data/build_result.xml.template
Normal file
23
integration/tests/data/build_result.xml.template
Normal file
@@ -0,0 +1,23 @@
|
||||
<resultlist state="0fef640bfb56c3e76fcfb698b19b59c0">
|
||||
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="aarch64" code="unpublished" state="unpublished">
|
||||
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
|
||||
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
|
||||
<status package="openjpeg2" code="succeeded"/>
|
||||
</result>
|
||||
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="ppc64le" code="unpublished" state="unpublished">
|
||||
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
|
||||
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
|
||||
<status package="openjpeg2" code="succeeded"/>
|
||||
</result>
|
||||
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="x86_64" code="unpublished" state="unpublished">
|
||||
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
|
||||
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
|
||||
<status package="openjpeg2" code="succeeded"/>
|
||||
</result>
|
||||
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="s390x" code="unpublished" state="unpublished">
|
||||
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
|
||||
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
|
||||
<status package="openjpeg2" code="succeeded"/>
|
||||
</result>
|
||||
</resultlist>
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
<project name="openSUSE:Leap:16.0:PullRequest">
|
||||
<title>Leap 16.0 PullRequest area</title>
|
||||
<description>Base project to define the pull request builds</description>
|
||||
<person userid="autogits_obs_staging_bot" role="maintainer"/>
|
||||
<person userid="maxlin_factory" role="maintainer"/>
|
||||
<group groupid="maintenance-opensuse.org" role="maintainer"/>
|
||||
<debuginfo>
|
||||
<enable/>
|
||||
</debuginfo>
|
||||
<repository name="standard">
|
||||
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||
<arch>x86_64</arch>
|
||||
<arch>i586</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
</project>
|
||||
59
integration/tests/data/source_openSUSE_Leap_16.0__meta
Normal file
59
integration/tests/data/source_openSUSE_Leap_16.0__meta
Normal file
@@ -0,0 +1,59 @@
|
||||
<project name="openSUSE:Leap:16.0">
|
||||
<title>openSUSE Leap 16.0 based on SLFO</title>
|
||||
<description>Leap 16.0 based on SLES 16.0 (specifically SLFO:1.2)</description>
|
||||
<link project="openSUSE:Backports:SLE-16.0"/>
|
||||
<scmsync>http://gitea-test:3000/products/SLFO#main</scmsync>
|
||||
<person userid="dimstar_suse" role="maintainer"/>
|
||||
<person userid="lkocman-factory" role="maintainer"/>
|
||||
<person userid="maxlin_factory" role="maintainer"/>
|
||||
<person userid="factory-auto" role="reviewer"/>
|
||||
<person userid="licensedigger" role="reviewer"/>
|
||||
<group groupid="autobuild-team" role="maintainer"/>
|
||||
<group groupid="factory-maintainers" role="maintainer"/>
|
||||
<group groupid="maintenance-opensuse.org" role="maintainer"/>
|
||||
<group groupid="factory-staging" role="reviewer"/>
|
||||
<build>
|
||||
<disable repository="ports"/>
|
||||
</build>
|
||||
<debuginfo>
|
||||
<enable/>
|
||||
</debuginfo>
|
||||
<repository name="standard" rebuild="local">
|
||||
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||
<arch>local</arch>
|
||||
<arch>i586</arch>
|
||||
<arch>x86_64</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
<repository name="product">
|
||||
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="product" trigger="manual"/>
|
||||
<path project="openSUSE:Leap:16.0:NonFree" repository="standard"/>
|
||||
<path project="openSUSE:Leap:16.0" repository="images"/>
|
||||
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||
<arch>local</arch>
|
||||
<arch>i586</arch>
|
||||
<arch>x86_64</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
<repository name="ports">
|
||||
<arch>armv7l</arch>
|
||||
</repository>
|
||||
<repository name="images">
|
||||
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="images" trigger="manual"/>
|
||||
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||
<arch>i586</arch>
|
||||
<arch>x86_64</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
</project>
|
||||
509
integration/tests/lib/common_test_utils.py
Normal file
509
integration/tests/lib/common_test_utils.py
Normal file
@@ -0,0 +1,509 @@
|
||||
import os
|
||||
import time
|
||||
import pytest
|
||||
import requests
|
||||
import json
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
import base64
|
||||
import subprocess
|
||||
|
||||
TEST_DATA_DIR = Path(__file__).parent.parent / "data"
|
||||
BUILD_RESULT_TEMPLATE = TEST_DATA_DIR / "build_result.xml.template"
|
||||
MOCK_RESPONSES_DIR = Path(__file__).parent.parent.parent / "mock-obs" / "responses"
|
||||
MOCK_BUILD_RESULT_FILE = (
|
||||
MOCK_RESPONSES_DIR / "GET_build_openSUSE:Leap:16.0:PullRequest:*__result"
|
||||
)
|
||||
MOCK_BUILD_RESULT_FILE1 = MOCK_RESPONSES_DIR / "GET_build_openSUSE:Leap:16.0__result"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_build_result():
|
||||
"""
|
||||
Fixture to create a mock build result file from the template.
|
||||
Returns a factory function that the test can call with parameters.
|
||||
"""
|
||||
|
||||
def _create_result_file(package_name: str, code: str):
|
||||
tree = ET.parse(BUILD_RESULT_TEMPLATE)
|
||||
root = tree.getroot()
|
||||
for status_tag in root.findall(".//status"):
|
||||
status_tag.set("package", package_name)
|
||||
status_tag.set("code", code)
|
||||
|
||||
MOCK_RESPONSES_DIR.mkdir(exist_ok=True)
|
||||
tree.write(MOCK_BUILD_RESULT_FILE)
|
||||
tree.write(MOCK_BUILD_RESULT_FILE1)
|
||||
return str(MOCK_BUILD_RESULT_FILE)
|
||||
|
||||
yield _create_result_file
|
||||
|
||||
if MOCK_BUILD_RESULT_FILE.exists():
|
||||
MOCK_BUILD_RESULT_FILE.unlink()
|
||||
MOCK_BUILD_RESULT_FILE1.unlink()
|
||||
|
||||
|
||||
class GiteaAPIClient:
|
||||
def __init__(self, base_url, token, sudo=None):
|
||||
self.base_url = base_url
|
||||
self.headers = {"Authorization": f"token {token}", "Content-Type": "application/json"}
|
||||
if sudo:
|
||||
self.headers["Sudo"] = sudo
|
||||
|
||||
def _request(self, method, path, **kwargs):
|
||||
url = f"{self.base_url}/api/v1/{path}"
|
||||
response = requests.request(method, url, headers=self.headers, **kwargs)
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
print(f"HTTPError in _request: {e}")
|
||||
print(f"Response Content: {e.response.text}")
|
||||
raise
|
||||
return response
|
||||
|
||||
def get_file_info(self, owner: str, repo: str, file_path: str, branch: str = "main"):
|
||||
url = f"repos/{owner}/{repo}/contents/{file_path}"
|
||||
if branch and branch != "main":
|
||||
url += f"?ref={branch}"
|
||||
try:
|
||||
response = self._request("GET", url)
|
||||
return response.json()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
return None
|
||||
raise
|
||||
|
||||
def create_user(self, username, password, email):
|
||||
print(f"--- Creating user: {username} ---")
|
||||
data = {
|
||||
"username": username,
|
||||
"password": password,
|
||||
"email": email,
|
||||
"must_change_password": False,
|
||||
"send_notify": False
|
||||
}
|
||||
try:
|
||||
self._request("POST", "admin/users", json=data)
|
||||
print(f"User '{username}' created.")
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 422: # Already exists
|
||||
print(f"User '{username}' already exists. Updating password...")
|
||||
# Update password to be sure it matches our expectation
|
||||
self._request("PATCH", f"admin/users/{username}", json={"password": password, "login_name": username})
|
||||
else:
|
||||
raise
|
||||
|
||||
def get_user_token(self, username, password, token_name="test-token"):
|
||||
print(f"--- Getting token for user: {username} ---")
|
||||
url = f"{self.base_url}/api/v1/users/{username}/tokens"
|
||||
|
||||
# Create new token using Basic Auth
|
||||
response = requests.post(url, auth=(username, password), json={"name": token_name})
|
||||
if response.status_code == 201:
|
||||
return response.json()["sha1"]
|
||||
response.raise_for_status()
|
||||
|
||||
def create_org(self, org_name):
|
||||
print(f"--- Checking organization: {org_name} ---")
|
||||
try:
|
||||
self._request("GET", f"orgs/{org_name}")
|
||||
print(f"Organization '{org_name}' already exists.")
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
print(f"Creating organization '{org_name}'...")
|
||||
data = {"username": org_name, "full_name": org_name}
|
||||
self._request("POST", "orgs", json=data)
|
||||
print(f"Organization '{org_name}' created.")
|
||||
else:
|
||||
raise
|
||||
print(f"--- Checking organization: {org_name} ---")
|
||||
try:
|
||||
self._request("GET", f"orgs/{org_name}")
|
||||
print(f"Organization '{org_name}' already exists.")
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
print(f"Creating organization '{org_name}'...")
|
||||
data = {"username": org_name, "full_name": org_name}
|
||||
self._request("POST", "orgs", json=data)
|
||||
print(f"Organization '{org_name}' created.")
|
||||
else:
|
||||
raise
|
||||
|
||||
def create_repo(self, org_name, repo_name):
|
||||
print(f"--- Checking repository: {org_name}/{repo_name} ---")
|
||||
try:
|
||||
self._request("GET", f"repos/{org_name}/{repo_name}")
|
||||
print(f"Repository '{org_name}/{repo_name}' already exists.")
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
print(f"Creating repository '{org_name}/{repo_name}'...")
|
||||
data = {
|
||||
"name": repo_name,
|
||||
"auto_init": True,
|
||||
"default_branch": "main",
|
||||
"gitignores": "Go",
|
||||
"license": "MIT",
|
||||
"private": False,
|
||||
"readme": "Default"
|
||||
}
|
||||
self._request("POST", f"orgs/{org_name}/repos", json=data)
|
||||
print(f"Repository '{org_name}/{repo_name}' created with a README.")
|
||||
time.sleep(0.1) # Added delay to allow Git operations to become available
|
||||
else:
|
||||
raise
|
||||
|
||||
def add_collaborator(self, org_name, repo_name, collaborator_name, permission="write"):
|
||||
print(f"--- Adding {collaborator_name} as a collaborator to {org_name}/{repo_name} with '{permission}' permission ---")
|
||||
data = {"permission": permission}
|
||||
# Gitea API returns 204 No Content on success and doesn't fail if already present.
|
||||
self._request("PUT", f"repos/{org_name}/{repo_name}/collaborators/{collaborator_name}", json=data)
|
||||
print(f"Attempted to add {collaborator_name} to {org_name}/{repo_name}.")
|
||||
|
||||
def add_submodules(self, org_name, repo_name):
|
||||
print(f"--- Adding submodules to {org_name}/{repo_name} using diffpatch ---")
|
||||
parent_repo_path = f"repos/{org_name}/{repo_name}"
|
||||
|
||||
try:
|
||||
self._request("GET", f"{parent_repo_path}/contents/.gitmodules")
|
||||
print("Submodules appear to be already added. Skipping.")
|
||||
return
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code != 404:
|
||||
raise
|
||||
|
||||
# Get latest commit SHAs for the submodules
|
||||
pkg_a_sha = self._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
|
||||
pkg_b_sha = self._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
|
||||
|
||||
if not pkg_a_sha or not pkg_b_sha:
|
||||
raise Exception("Error: Could not get submodule commit SHAs. Cannot apply patch.")
|
||||
|
||||
diff_content = f"""diff --git a/.gitmodules b/.gitmodules
|
||||
new file mode 100644
|
||||
index 0000000..f1838bd
|
||||
--- /dev/null
|
||||
+++ b/.gitmodules
|
||||
@@ -0,0 +1,6 @@
|
||||
+[submodule "pkgA"]
|
||||
+ path = pkgA
|
||||
+ url = ../../pool/pkgA.git
|
||||
+[submodule "pkgB"]
|
||||
+ path = pkgB
|
||||
+ url = ../../pool/pkgB.git
|
||||
diff --git a/pkgA b/pkgA
|
||||
new file mode 160000
|
||||
index 0000000..{pkg_a_sha}
|
||||
--- /dev/null
|
||||
+++ b/pkgA
|
||||
@@ -0,0 +1 @@
|
||||
+Subproject commit {pkg_a_sha}
|
||||
diff --git a/pkgB b/pkgB
|
||||
new file mode 160000
|
||||
index 0000000..{pkg_b_sha}
|
||||
--- /dev/null
|
||||
+++ b/pkgB
|
||||
@@ -0,0 +1 @@
|
||||
+Subproject commit {pkg_b_sha}
|
||||
"""
|
||||
message = "Add pkgA and pkgB as submodules"
|
||||
data = {
|
||||
"branch": "main",
|
||||
"content": diff_content,
|
||||
"message": message
|
||||
}
|
||||
print(f"Applying submodule patch to {org_name}/{repo_name}...")
|
||||
self._request("POST", f"{parent_repo_path}/diffpatch", json=data)
|
||||
print("Submodule patch applied.")
|
||||
|
||||
def update_repo_settings(self, org_name, repo_name):
|
||||
print(f"--- Updating repository settings for: {org_name}/{repo_name} ---")
|
||||
repo_data = self._request("GET", f"repos/{org_name}/{repo_name}").json()
|
||||
|
||||
# Ensure these are boolean values, not string
|
||||
repo_data["allow_manual_merge"] = True
|
||||
repo_data["autodetect_manual_merge"] = True
|
||||
|
||||
self._request("PATCH", f"repos/{org_name}/{repo_name}", json=repo_data)
|
||||
print(f"Repository settings for '{org_name}/{repo_name}' updated.")
|
||||
|
||||
|
||||
def create_file(self, owner: str, repo: str, file_path: str, content: str, branch: str = "main", message: str = "Add file"):
|
||||
file_info = self.get_file_info(owner, repo, file_path, branch=branch)
|
||||
|
||||
data = {
|
||||
"content": base64.b64encode(content.encode('utf-8')).decode('ascii'),
|
||||
"branch": branch,
|
||||
"message": message
|
||||
}
|
||||
|
||||
if file_info:
|
||||
print(f"--- Updating file {file_path} in {owner}/{repo} ---")
|
||||
# Re-fetch file_info to get the latest SHA right before update
|
||||
latest_file_info = self.get_file_info(owner, repo, file_path, branch=branch)
|
||||
if not latest_file_info:
|
||||
raise Exception(f"File {file_path} disappeared during update attempt.")
|
||||
data["sha"] = latest_file_info["sha"]
|
||||
data["message"] = f"Update {file_path}"
|
||||
method = "PUT"
|
||||
else:
|
||||
print(f"--- Creating file {file_path} in {owner}/{repo} ---")
|
||||
method = "POST"
|
||||
|
||||
url = f"repos/{owner}/{repo}/contents/{file_path}"
|
||||
self._request(method, url, json=data)
|
||||
print(f"File {file_path} {'updated' if file_info else 'created'} in {owner}/{repo}.")
|
||||
|
||||
def create_gitea_pr(self, repo_full_name: str, diff_content: str, title: str, use_fork: bool, base_branch: str = "main", body: str = ""):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
|
||||
head_owner, head_repo = owner, repo
|
||||
|
||||
if use_fork:
|
||||
sudo_user = self.headers.get("Sudo")
|
||||
head_owner = sudo_user
|
||||
head_repo = repo
|
||||
new_branch_name = f"pr-branch-{int(time.time()*1000)}"
|
||||
|
||||
print(f"--- Forking {repo_full_name} ---")
|
||||
try:
|
||||
self._request("POST", f"repos/{owner}/{repo}/forks", json={})
|
||||
print(f"--- Forked to {head_owner}/{head_repo} ---")
|
||||
time.sleep(0.5) # Give more time for fork to be ready
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 409: # Already forked
|
||||
print(f"--- Already forked to {head_owner}/{head_repo} ---")
|
||||
else:
|
||||
raise
|
||||
|
||||
# Create a unique branch in the FORK
|
||||
base_commit_sha = self._request("GET", f"repos/{owner}/{repo}/branches/{base_branch}").json()["commit"]["id"]
|
||||
print(f"--- Creating branch {new_branch_name} in {head_owner}/{head_repo} from {base_branch} ({base_commit_sha}) ---")
|
||||
self._request("POST", f"repos/{head_owner}/{head_repo}/branches", json={
|
||||
"new_branch_name": new_branch_name,
|
||||
"old_ref": base_commit_sha
|
||||
})
|
||||
else:
|
||||
new_branch_name = f"pr-branch-{int(time.time()*1000)}"
|
||||
# Get the latest commit SHA of the base branch from the ORIGINAL repo
|
||||
base_commit_sha = self._request("GET", f"repos/{owner}/{repo}/branches/{base_branch}").json()["commit"]["id"]
|
||||
|
||||
# Try to create the branch in the ORIGINAL repo
|
||||
print(f"--- Creating branch {new_branch_name} in {repo_full_name} ---")
|
||||
self._request("POST", f"repos/{owner}/{repo}/branches", json={
|
||||
"new_branch_name": new_branch_name,
|
||||
"old_ref": base_commit_sha
|
||||
})
|
||||
|
||||
# Apply the diff using diffpatch in the branch (wherever it is)
|
||||
print(f"--- Applying diff to {head_owner}/{head_repo} branch {new_branch_name} ---")
|
||||
self._request("POST", f"repos/{head_owner}/{head_repo}/diffpatch", json={
|
||||
"branch": new_branch_name,
|
||||
"content": diff_content,
|
||||
"message": title
|
||||
})
|
||||
|
||||
# Now create the PR in the ORIGINAL repo
|
||||
data = {
|
||||
"head": f"{head_owner}:{new_branch_name}" if head_owner != owner else new_branch_name,
|
||||
"base": base_branch,
|
||||
"title": title,
|
||||
"body": body,
|
||||
"allow_maintainer_edit": True
|
||||
}
|
||||
print(f"--- Creating PR in {repo_full_name} from {data['head']} ---")
|
||||
response = self._request("POST", f"repos/{owner}/{repo}/pulls", json=data)
|
||||
return response.json()
|
||||
|
||||
def create_branch(self, owner: str, repo: str, new_branch_name: str, old_ref: str):
|
||||
print(f"--- Checking branch '{new_branch_name}' in {owner}/{repo} ---")
|
||||
try:
|
||||
self._request("GET", f"repos/{owner}/{repo}/branches/{new_branch_name}")
|
||||
print(f"Branch '{new_branch_name}' already exists.")
|
||||
return
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code != 404:
|
||||
raise # Re-raise other HTTP errors
|
||||
|
||||
print(f"--- Creating branch '{new_branch_name}' in {owner}/{repo} from {old_ref} ---")
|
||||
url = f"repos/{owner}/{repo}/branches"
|
||||
data = {
|
||||
"new_branch_name": new_branch_name,
|
||||
"old_ref": old_ref
|
||||
}
|
||||
self._request("POST", url, json=data)
|
||||
print(f"Branch '{new_branch_name}' created in {owner}/{repo}.")
|
||||
|
||||
def ensure_branch_exists(self, owner: str, repo: str, branch: str = "main", timeout: int = 10):
|
||||
print(f"--- Ensuring branch '{branch}' exists in {owner}/{repo} ---")
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < timeout:
|
||||
try:
|
||||
self._request("GET", f"repos/{owner}/{repo}/branches/{branch}")
|
||||
print(f"Branch '{branch}' confirmed in {owner}/{repo}.")
|
||||
return
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
print(f"Branch '{branch}' not found yet in {owner}/{repo}. Retrying...")
|
||||
time.sleep(1)
|
||||
continue
|
||||
raise
|
||||
raise Exception(f"Timeout waiting for branch {branch} in {owner}/{repo}")
|
||||
|
||||
|
||||
|
||||
def modify_gitea_pr(self, repo_full_name: str, pr_number: int, diff_content: str, message: str):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
|
||||
# Get PR details to find the head branch AND head repo
|
||||
pr_details = self._request("GET", f"repos/{owner}/{repo}/pulls/{pr_number}").json()
|
||||
head_branch = pr_details["head"]["ref"]
|
||||
head_repo_owner = pr_details["head"]["repo"]["owner"]["login"]
|
||||
head_repo_name = pr_details["head"]["repo"]["name"]
|
||||
|
||||
# Apply the diff using diffpatch
|
||||
print(f"--- Modifying PR #{pr_number} in {head_repo_owner}/{head_repo_name} branch {head_branch} ---")
|
||||
self._request("POST", f"repos/{head_repo_owner}/{head_repo_name}/diffpatch", json={
|
||||
"branch": head_branch,
|
||||
"content": diff_content,
|
||||
"message": message
|
||||
})
|
||||
|
||||
def update_gitea_pr_properties(self, repo_full_name: str, pr_number: int, **kwargs):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
url = f"repos/{owner}/{repo}/pulls/{pr_number}"
|
||||
response = self._request("PATCH", url, json=kwargs)
|
||||
return response.json()
|
||||
|
||||
def get_timeline_events(self, repo_full_name: str, pr_number: int):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
url = f"repos/{owner}/{repo}/issues/{pr_number}/timeline"
|
||||
|
||||
# Retry logic for timeline events
|
||||
for i in range(10): # Try up to 10 times
|
||||
try:
|
||||
response = self._request("GET", url)
|
||||
timeline_events = response.json()
|
||||
if timeline_events: # Check if timeline_events list is not empty
|
||||
return timeline_events
|
||||
print(f"Attempt {i+1}: Timeline for PR {pr_number} is empty. Retrying in 1 seconds...")
|
||||
time.sleep(1)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
print(f"Attempt {i+1}: Timeline for PR {pr_number} not found yet. Retrying in 1 seconds...")
|
||||
time.sleep(1)
|
||||
else:
|
||||
raise # Re-raise other HTTP errors
|
||||
raise Exception(f"Failed to retrieve timeline for PR {pr_number} after multiple retries.")
|
||||
|
||||
def get_comments(self, repo_full_name: str, pr_number: int):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
url = f"repos/{owner}/{repo}/issues/{pr_number}/comments"
|
||||
|
||||
# Retry logic for comments
|
||||
for i in range(10): # Try up to 10 times
|
||||
try:
|
||||
response = self._request("GET", url)
|
||||
comments = response.json()
|
||||
print(f"Attempt {i+1}: Comments for PR {pr_number} received: {comments}") # Added debug print
|
||||
if comments: # Check if comments list is not empty
|
||||
return comments
|
||||
print(f"Attempt {i+1}: Comments for PR {pr_number} are empty. Retrying in 1 seconds...")
|
||||
time.sleep(1)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
print(f"Attempt {i+1}: Comments for PR {pr_number} not found yet. Retrying in 1 seconds...")
|
||||
time.sleep(1)
|
||||
else:
|
||||
raise # Re-raise other HTTP errors
|
||||
raise Exception(f"Failed to retrieve comments for PR {pr_number} after multiple retries.")
|
||||
|
||||
def get_pr_details(self, repo_full_name: str, pr_number: int):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
url = f"repos/{owner}/{repo}/pulls/{pr_number}"
|
||||
response = self._request("GET", url)
|
||||
return response.json()
|
||||
|
||||
def create_review(self, repo_full_name: str, pr_number: int, event: str = "APPROVED", body: str = "LGTM"):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
|
||||
# Check if this user already has an APPROVED review to avoid 422
|
||||
current_user = self.headers.get("Sudo") or "admin" # simplified
|
||||
existing_reviews = self.list_reviews(repo_full_name, pr_number)
|
||||
for r in existing_reviews:
|
||||
if r["user"]["login"] == current_user and r["state"] == "APPROVED" and event == "APPROVED":
|
||||
print(f"User {current_user} already has an APPROVED review for {repo_full_name} PR #{pr_number}")
|
||||
return r
|
||||
|
||||
url = f"repos/{owner}/{repo}/pulls/{pr_number}/reviews"
|
||||
data = {
|
||||
"event": event,
|
||||
"body": body
|
||||
}
|
||||
print(f"--- Creating and submitting review ({event}) for {repo_full_name} PR #{pr_number} as {current_user} ---")
|
||||
try:
|
||||
response = self._request("POST", url, json=data)
|
||||
review = response.json()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
# If it fails with 422, it might be because a review is already pending or something else
|
||||
print(f"Failed to create review: {e.response.text}")
|
||||
# Try to find a pending review to submit
|
||||
existing_reviews = self.list_reviews(repo_full_name, pr_number)
|
||||
pending_review = next((r for r in existing_reviews if r["user"]["login"] == current_user and r["state"] == "PENDING"), None)
|
||||
if pending_review:
|
||||
review = pending_review
|
||||
else:
|
||||
raise
|
||||
|
||||
# If the state is PENDING, we submit it.
|
||||
if review.get("state") == "PENDING":
|
||||
review_id = review["id"]
|
||||
submit_url = f"repos/{owner}/{repo}/pulls/{pr_number}/reviews/{review_id}"
|
||||
submit_data = {
|
||||
"event": event,
|
||||
"body": body
|
||||
}
|
||||
try:
|
||||
self._request("POST", submit_url, json=submit_data)
|
||||
print(f"--- Review {review_id} submitted ---")
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if "already" in e.response.text.lower() or "stay pending" in e.response.text.lower():
|
||||
print(f"Review {review_id} could not be submitted further: {e.response.text}")
|
||||
else:
|
||||
raise
|
||||
|
||||
return review
|
||||
|
||||
def list_reviews(self, repo_full_name: str, pr_number: int):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
url = f"repos/{owner}/{repo}/pulls/{pr_number}/reviews"
|
||||
response = self._request("GET", url)
|
||||
return response.json()
|
||||
|
||||
def approve_requested_reviews(self, repo_full_name: str, pr_number: int):
|
||||
print(f"--- Checking for REQUEST_REVIEW state in {repo_full_name} PR #{pr_number} ---")
|
||||
reviews = self.list_reviews(repo_full_name, pr_number)
|
||||
|
||||
requested_reviews = [r for r in reviews if r["state"] == "REQUEST_REVIEW"]
|
||||
if not requested_reviews:
|
||||
print(f"No reviews in REQUEST_REVIEW state found for {repo_full_name} PR #{pr_number}")
|
||||
return
|
||||
|
||||
admin_token = self.headers["Authorization"].split(" ")[1]
|
||||
for r in requested_reviews:
|
||||
reviewer_username = r["user"]["login"]
|
||||
print(f"Reacting on REQUEST_REVIEW for user {reviewer_username} by approving...")
|
||||
|
||||
reviewer_client = GiteaAPIClient(base_url=self.base_url, token=admin_token, sudo=reviewer_username)
|
||||
time.sleep(1) # give a chance to avoid possible concurrency issues with reviews request/approval
|
||||
reviewer_client.create_review(repo_full_name, pr_number, event="APPROVED", body="Approving requested review")
|
||||
|
||||
def restart_service(self, service_name: str):
|
||||
print(f"--- Restarting service: {service_name} ---")
|
||||
try:
|
||||
# Assumes podman-compose.yml is in the parent directory of tests/lib
|
||||
subprocess.run(["podman-compose", "restart", service_name], check=True, cwd=os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)))
|
||||
print(f"Service {service_name} restarted successfully.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Error restarting service {service_name}: {e}")
|
||||
raise
|
||||
|
||||
153
integration/tests/test_pr_workflow.py
Executable file
153
integration/tests/test_pr_workflow.py
Executable file
@@ -0,0 +1,153 @@
|
||||
import pytest
|
||||
import re
|
||||
import time
|
||||
import subprocess
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from tests.lib.common_test_utils import (
|
||||
GiteaAPIClient,
|
||||
mock_build_result,
|
||||
)
|
||||
|
||||
# =============================================================================
|
||||
# TEST CASES
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def test_pr_workflow_succeeded(gitea_env, mock_build_result):
|
||||
"""End-to-end test for a successful PR workflow."""
|
||||
diff = "diff --git a/test.txt b/test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
|
||||
pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR - should succeed", False)
|
||||
initial_pr_number = pr["number"]
|
||||
|
||||
compose_dir = Path(__file__).parent.parent
|
||||
|
||||
forwarded_pr_number = None
|
||||
print(
|
||||
f"Polling pool/pkgA PR #{initial_pr_number} timeline for forwarded PR event..."
|
||||
)
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("pool/pkgA", initial_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("type") == "pull_ref":
|
||||
if not (ref_issue := event.get("ref_issue")):
|
||||
continue
|
||||
url_to_check = ref_issue.get("html_url", "")
|
||||
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
|
||||
if match:
|
||||
forwarded_pr_number = match.group(1)
|
||||
break
|
||||
if forwarded_pr_number:
|
||||
break
|
||||
assert (
|
||||
forwarded_pr_number is not None
|
||||
), "Workflow bot did not create a pull_ref event on the timeline."
|
||||
print(f"Found forwarded PR: products/SLFO #{forwarded_pr_number}")
|
||||
|
||||
print(f"Polling products/SLFO PR #{forwarded_pr_number} for reviewer assignment...")
|
||||
reviewer_added = False
|
||||
for _ in range(15):
|
||||
time.sleep(1)
|
||||
pr_details = gitea_env.get_pr_details("products/SLFO", forwarded_pr_number)
|
||||
if any(
|
||||
r.get("login") == "autogits_obs_staging_bot"
|
||||
for r in pr_details.get("requested_reviewers", [])
|
||||
):
|
||||
reviewer_added = True
|
||||
break
|
||||
assert reviewer_added, "Staging bot was not added as a reviewer."
|
||||
print("Staging bot has been added as a reviewer.")
|
||||
|
||||
mock_build_result(package_name="pkgA", code="succeeded")
|
||||
|
||||
print("Restarting obs-staging-bot...")
|
||||
subprocess.run(
|
||||
["podman-compose", "restart", "obs-staging-bot"],
|
||||
cwd=compose_dir,
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
|
||||
print(f"Polling products/SLFO PR #{forwarded_pr_number} for final status...")
|
||||
status_comment_found = False
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("products/SLFO", forwarded_pr_number)
|
||||
for event in timeline_events:
|
||||
print(event.get("body", "not a body"))
|
||||
if event.get("body") and "successful" in event["body"]:
|
||||
status_comment_found = True
|
||||
break
|
||||
if status_comment_found:
|
||||
break
|
||||
assert status_comment_found, "Staging bot did not post a 'successful' comment."
|
||||
|
||||
|
||||
def test_pr_workflow_failed(gitea_env, mock_build_result):
|
||||
"""End-to-end test for a failed PR workflow."""
|
||||
diff = "diff --git a/another_test.txt b/another_test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
|
||||
pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR - should fail", False)
|
||||
initial_pr_number = pr["number"]
|
||||
|
||||
compose_dir = Path(__file__).parent.parent
|
||||
|
||||
forwarded_pr_number = None
|
||||
print(
|
||||
f"Polling pool/pkgA PR #{initial_pr_number} timeline for forwarded PR event..."
|
||||
)
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("pool/pkgA", initial_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("type") == "pull_ref":
|
||||
if not (ref_issue := event.get("ref_issue")):
|
||||
continue
|
||||
url_to_check = ref_issue.get("html_url", "")
|
||||
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
|
||||
if match:
|
||||
forwarded_pr_number = match.group(1)
|
||||
break
|
||||
if forwarded_pr_number:
|
||||
break
|
||||
assert (
|
||||
forwarded_pr_number is not None
|
||||
), "Workflow bot did not create a pull_ref event on the timeline."
|
||||
print(f"Found forwarded PR: products/SLFO #{forwarded_pr_number}")
|
||||
|
||||
print(f"Polling products/SLFO PR #{forwarded_pr_number} for reviewer assignment...")
|
||||
reviewer_added = False
|
||||
for _ in range(15):
|
||||
time.sleep(1)
|
||||
pr_details = gitea_env.get_pr_details("products/SLFO", forwarded_pr_number)
|
||||
if any(
|
||||
r.get("login") == "autogits_obs_staging_bot"
|
||||
for r in pr_details.get("requested_reviewers", [])
|
||||
):
|
||||
reviewer_added = True
|
||||
break
|
||||
assert reviewer_added, "Staging bot was not added as a reviewer."
|
||||
print("Staging bot has been added as a reviewer.")
|
||||
|
||||
mock_build_result(package_name="pkgA", code="failed")
|
||||
|
||||
print("Restarting obs-staging-bot...")
|
||||
subprocess.run(
|
||||
["podman-compose", "restart", "obs-staging-bot"],
|
||||
cwd=compose_dir,
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
|
||||
print(f"Polling products/SLFO PR #{forwarded_pr_number} for final status...")
|
||||
status_comment_found = False
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("products/SLFO", forwarded_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("body") and "failed" in event["body"]:
|
||||
status_comment_found = True
|
||||
break
|
||||
if status_comment_found:
|
||||
break
|
||||
assert status_comment_found, "Staging bot did not post a 'failed' comment."
|
||||
82
integration/tests/workflow_pr_merge_test.py
Normal file
82
integration/tests/workflow_pr_merge_test.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import pytest
|
||||
import re
|
||||
import time
|
||||
from pathlib import Path
|
||||
from tests.lib.common_test_utils import GiteaAPIClient
|
||||
|
||||
@pytest.mark.t001
|
||||
@pytest.mark.xfail(reason="The bot sometimes re-request reviews despite having all the approvals")
|
||||
def test_001_automerge(automerge_env, test_user_client):
|
||||
"""
|
||||
Test scenario:
|
||||
1. Setup custom workflow.config with mandatory reviewers (+usera, +userb).
|
||||
2. Create a package PR in 'merge' branch.
|
||||
3. Make sure the workflow-pr service created related project PR in 'merge' branch.
|
||||
4. React on 'requested' reviews by approving them.
|
||||
5. Make sure both PRs are merged automatically by the workflow-pr service.
|
||||
"""
|
||||
gitea_env, test_full_repo_name, merge_branch_name = automerge_env
|
||||
|
||||
# 1. Create a package PR
|
||||
diff = """diff --git a/merge_test_fixture.txt b/merge_test_fixture.txt
|
||||
new file mode 100644
|
||||
index 0000000..e69de29
|
||||
"""
|
||||
print(f"--- Creating package PR in pool/pkgA on branch {merge_branch_name} ---")
|
||||
package_pr = test_user_client.create_gitea_pr("pool/pkgA", diff, "Test Automerge Fixture", False, base_branch=merge_branch_name)
|
||||
package_pr_number = package_pr["number"]
|
||||
print(f"Created package PR pool/pkgA#{package_pr_number}")
|
||||
|
||||
# 2. Make sure the workflow-pr service created related project PR
|
||||
project_pr_number = None
|
||||
print(f"Polling pool/pkgA PR #{package_pr_number} timeline for forwarded PR event...")
|
||||
for _ in range(40):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("pool/pkgA", package_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("type") == "pull_ref":
|
||||
if not (ref_issue := event.get("ref_issue")):
|
||||
continue
|
||||
url_to_check = ref_issue.get("html_url", "")
|
||||
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
|
||||
if match:
|
||||
project_pr_number = int(match.group(1))
|
||||
break
|
||||
if project_pr_number:
|
||||
break
|
||||
|
||||
assert project_pr_number is not None, "Workflow bot did not create a project PR."
|
||||
print(f"Found project PR: products/SLFO#{project_pr_number}")
|
||||
|
||||
# 4. Make sure both PRs are merged automatically by the workflow-pr service
|
||||
print("Polling for PR merge status and reacting on REQUEST_REVIEW...")
|
||||
package_merged = False
|
||||
project_merged = False
|
||||
|
||||
for i in range(15): # Poll for up to 15 seconds
|
||||
# Package PR
|
||||
if not package_merged:
|
||||
pkg_details = gitea_env.get_pr_details("pool/pkgA", package_pr_number)
|
||||
if pkg_details.get("merged"):
|
||||
package_merged = True
|
||||
print(f"Package PR pool/pkgA#{package_pr_number} merged.")
|
||||
else:
|
||||
gitea_env.approve_requested_reviews("pool/pkgA", package_pr_number)
|
||||
|
||||
# Project PR
|
||||
if not project_merged:
|
||||
prj_details = gitea_env.get_pr_details("products/SLFO", project_pr_number)
|
||||
if prj_details.get("merged"):
|
||||
project_merged = True
|
||||
print(f"Project PR products/SLFO#{project_pr_number} merged.")
|
||||
else:
|
||||
gitea_env.approve_requested_reviews("products/SLFO", project_pr_number)
|
||||
|
||||
if package_merged and project_merged:
|
||||
break
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
assert package_merged, f"Package PR pool/pkgA#{package_pr_number} was not merged automatically."
|
||||
assert project_merged, f"Project PR products/SLFO#{project_pr_number} was not merged automatically."
|
||||
print("Both PRs merged successfully.")
|
||||
346
integration/tests/workflow_pr_review_test.py
Normal file
346
integration/tests/workflow_pr_review_test.py
Normal file
@@ -0,0 +1,346 @@
|
||||
import pytest
|
||||
import re
|
||||
import time
|
||||
import base64
|
||||
from pathlib import Path
|
||||
from tests.lib.common_test_utils import GiteaAPIClient
|
||||
|
||||
@pytest.mark.t004
|
||||
@pytest.mark.xfail(reason="the bot sometimes re-requests review from autogits_obs_staging_bot despite having the approval")
|
||||
def test_004_maintainer(maintainer_env, ownerA_client):
|
||||
"""
|
||||
Test scenario:
|
||||
1. workflow.config will not have users with '+' sign.
|
||||
2. The package PR is opened by the package maintainer (ownerA for pkgA).
|
||||
3. Do not submit any review approval.
|
||||
4. Check that both PRs are automatically merged anyway.
|
||||
"""
|
||||
gitea_env, test_full_repo_name, branch_name = maintainer_env
|
||||
|
||||
# 0. Smoke test ownerA_client
|
||||
print(f"--- Smoke testing ownerA_client ---")
|
||||
ownerA_client._request("GET", "users/admin")
|
||||
print(f"ownerA_client smoke test passed")
|
||||
|
||||
# 0.1 Verify all users from config exist
|
||||
print("--- Verifying all users from config exist ---")
|
||||
import json
|
||||
wf_file = gitea_env.get_file_info("products", "SLFO", "workflow.config", branch=branch_name)
|
||||
wf = json.loads(base64.b64decode(wf_file["content"]).decode("utf-8"))
|
||||
mt_file = gitea_env.get_file_info("products", "SLFO", "_maintainership.json", branch=branch_name)
|
||||
mt = json.loads(base64.b64decode(mt_file["content"]).decode("utf-8"))
|
||||
|
||||
expected_users = set()
|
||||
for r in wf.get("Reviewers", []):
|
||||
username = r.lstrip("+-")
|
||||
if username and username not in ["autogits_obs_staging_bot", "workflow-pr"]:
|
||||
expected_users.add(username)
|
||||
for pkg_users in mt.values():
|
||||
for username in pkg_users:
|
||||
expected_users.add(username)
|
||||
|
||||
for username in expected_users:
|
||||
gitea_env._request("GET", f"users/{username}")
|
||||
print(f"Verified user exists: {username}")
|
||||
|
||||
# 1. Create a package PR as ownerA
|
||||
diff = """diff --git a/maintainer_test_fixture.txt b/maintainer_test_fixture.txt
|
||||
new file mode 100644
|
||||
index 0000000..e69de29
|
||||
"""
|
||||
print(f"--- Creating package PR in pool/pkgA on branch {branch_name} as ownerA ---")
|
||||
package_pr = ownerA_client.create_gitea_pr("pool/pkgA", diff, "Test Maintainer Merge", True, base_branch=branch_name)
|
||||
package_pr_number = package_pr["number"]
|
||||
print(f"Created package PR pool/pkgA#{package_pr_number}")
|
||||
|
||||
# 2. Make sure the workflow-pr service created related project PR
|
||||
project_pr_number = None
|
||||
print(f"Polling pool/pkgA PR #{package_pr_number} timeline for forwarded PR event...")
|
||||
for _ in range(40):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("pool/pkgA", package_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("type") == "pull_ref":
|
||||
if not (ref_issue := event.get("ref_issue")):
|
||||
continue
|
||||
url_to_check = ref_issue.get("html_url", "")
|
||||
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
|
||||
if match:
|
||||
project_pr_number = int(match.group(1))
|
||||
break
|
||||
if project_pr_number:
|
||||
break
|
||||
|
||||
assert project_pr_number is not None, "Workflow bot did not create a project PR."
|
||||
print(f"Found project PR: products/SLFO#{project_pr_number}")
|
||||
|
||||
# 3. Make sure both PRs are merged automatically WITHOUT manual approvals
|
||||
print("Polling for PR merge status (only bot approval allowed)...")
|
||||
package_merged = False
|
||||
project_merged = False
|
||||
|
||||
for i in range(15): # Poll for up to 15 seconds
|
||||
# Package PR
|
||||
if not package_merged:
|
||||
pkg_details = gitea_env.get_pr_details("pool/pkgA", package_pr_number)
|
||||
if pkg_details.get("merged"):
|
||||
package_merged = True
|
||||
print(f"Package PR pool/pkgA#{package_pr_number} merged.")
|
||||
else:
|
||||
# Approve ONLY bot if requested
|
||||
reviews = gitea_env.list_reviews("pool/pkgA", package_pr_number)
|
||||
if any(r["state"] == "REQUEST_REVIEW" and r["user"]["login"] == "autogits_obs_staging_bot" for r in reviews):
|
||||
gitea_env.approve_requested_reviews("pool/pkgA", package_pr_number)
|
||||
|
||||
# Project PR
|
||||
if not project_merged:
|
||||
prj_details = gitea_env.get_pr_details("products/SLFO", project_pr_number)
|
||||
if prj_details.get("merged"):
|
||||
project_merged = True
|
||||
print(f"Project PR products/SLFO#{project_pr_number} merged.")
|
||||
else:
|
||||
# Approve ONLY bot if requested
|
||||
reviews = gitea_env.list_reviews("products/SLFO", project_pr_number)
|
||||
if any(r["state"] == "REQUEST_REVIEW" and r["user"]["login"] == "autogits_obs_staging_bot" for r in reviews):
|
||||
gitea_env.approve_requested_reviews("products/SLFO", project_pr_number)
|
||||
|
||||
if package_merged and project_merged:
|
||||
break
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
assert package_merged, f"Package PR pool/pkgA#{package_pr_number} was not merged automatically."
|
||||
assert project_merged, f"Project PR products/SLFO#{project_pr_number} was not merged automatically."
|
||||
print("Both PRs merged successfully by maintainer rule.")
|
||||
|
||||
|
||||
@pytest.mark.t005
|
||||
# @pytest.mark.xfail(reason="TBD troubleshoot")
|
||||
def test_005_any_maintainer_approval_sufficient(maintainer_env, ownerA_client, ownerBB_client):
|
||||
"""
|
||||
Test scenario:
|
||||
1. The package PR for pkgB is opened by ownerA (who is not a maintainer of pkgB).
|
||||
2. Check that review request comes to both ownerB and ownerBB.
|
||||
3. ownerB doesn't leave review.
|
||||
4. check that review from ownerBB was enough to get both PRs merged.
|
||||
"""
|
||||
gitea_env, test_full_repo_name, branch_name = maintainer_env
|
||||
|
||||
# 1. Create a package PR for pool/pkgB as ownerA
|
||||
diff = """diff --git a/pkgB_test_fixture.txt b/pkgB_test_fixture.txt
|
||||
new file mode 100644
|
||||
index 0000000..e69de29
|
||||
"""
|
||||
print(f"--- Creating package PR in pool/pkgB on branch {branch_name} as ownerA ---")
|
||||
package_pr = ownerA_client.create_gitea_pr("pool/pkgB", diff, "Test Single Maintainer Merge", True, base_branch=branch_name)
|
||||
package_pr_number = package_pr["number"]
|
||||
print(f"Created package PR pool/pkgB#{package_pr_number}")
|
||||
|
||||
# 2. Make sure the workflow-pr service created related project PR
|
||||
project_pr_number = None
|
||||
print(f"Polling pool/pkgB PR #{package_pr_number} timeline for forwarded PR event...")
|
||||
for _ in range(40):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("pool/pkgB", package_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("type") == "pull_ref":
|
||||
if not (ref_issue := event.get("ref_issue")):
|
||||
continue
|
||||
url_to_check = ref_issue.get("html_url", "")
|
||||
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
|
||||
if match:
|
||||
project_pr_number = int(match.group(1))
|
||||
break
|
||||
if project_pr_number:
|
||||
break
|
||||
|
||||
assert project_pr_number is not None, "Workflow bot did not create a project PR."
|
||||
print(f"Found project PR: products/SLFO#{project_pr_number}")
|
||||
|
||||
# 3. Check that review requests came to ownerB and ownerBB
|
||||
print("Checking for review requests from ownerB and ownerBB...")
|
||||
reviewers_requested = set()
|
||||
for _ in range(20):
|
||||
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
|
||||
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
|
||||
if "ownerB" in reviewers_requested and "ownerBB" in reviewers_requested:
|
||||
break
|
||||
time.sleep(1)
|
||||
|
||||
assert "ownerB" in reviewers_requested, f"ownerB was not requested for review. Requested: {reviewers_requested}"
|
||||
assert "ownerBB" in reviewers_requested, f"ownerBB was not requested for review. Requested: {reviewers_requested}"
|
||||
print(f"Confirmed: ownerB and ownerBB were requested for review.")
|
||||
|
||||
# 4. ownerBB leaves review, ownerB does not.
|
||||
print("ownerBB approving the PR...")
|
||||
ownerBB_client.create_review("pool/pkgB", package_pr_number, event="APPROVED", body="Approval from ownerBB")
|
||||
|
||||
# 5. Check that both PRs are merged automatically
|
||||
print("Polling for PR merge status (only bot approval allowed for project PR)...")
|
||||
package_merged = False
|
||||
project_merged = False
|
||||
|
||||
for i in range(15): # Poll for up to 15 seconds
|
||||
# Package PR
|
||||
if not package_merged:
|
||||
pkg_details = gitea_env.get_pr_details("pool/pkgB", package_pr_number)
|
||||
if pkg_details.get("merged"):
|
||||
package_merged = True
|
||||
print(f"Package PR pool/pkgB#{package_pr_number} merged.")
|
||||
|
||||
# Project PR
|
||||
if not project_merged:
|
||||
prj_details = gitea_env.get_pr_details("products/SLFO", project_pr_number)
|
||||
if prj_details.get("merged"):
|
||||
project_merged = True
|
||||
print(f"Project PR products/SLFO#{project_pr_number} merged.")
|
||||
else:
|
||||
# Approve ONLY bot if requested
|
||||
reviews = gitea_env.list_reviews("products/SLFO", project_pr_number)
|
||||
if any(r["state"] == "REQUEST_REVIEW" and r["user"]["login"] == "autogits_obs_staging_bot" for r in reviews):
|
||||
gitea_env.approve_requested_reviews("products/SLFO", project_pr_number)
|
||||
|
||||
if package_merged and project_merged:
|
||||
break
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
assert package_merged, f"Package PR pool/pkgB#{package_pr_number} was not merged automatically."
|
||||
assert project_merged, f"Project PR products/SLFO#{project_pr_number} was not merged automatically."
|
||||
print("Both PRs merged successfully with only one maintainer approval.")
|
||||
|
||||
|
||||
@pytest.mark.t006
|
||||
def test_006_maintainer_rejection_removes_other_requests(maintainer_env, ownerA_client, ownerBB_client):
|
||||
"""
|
||||
Test scenario:
|
||||
1. The package PR for pkgB is opened by ownerA (who is not a maintainer of pkgB).
|
||||
2. Check that review request comes to both ownerB and ownerBB.
|
||||
3. ownerBB rejects the PR (REQUEST_CHANGES).
|
||||
4. Check that review request for ownerB is removed.
|
||||
"""
|
||||
gitea_env, test_full_repo_name, branch_name = maintainer_env
|
||||
|
||||
# 1. Create a package PR for pool/pkgB as ownerA
|
||||
diff = """diff --git a/pkgB_rejection_test.txt b/pkgB_rejection_test.txt
|
||||
new file mode 100644
|
||||
index 0000000..e69de29
|
||||
"""
|
||||
print(f"--- Creating package PR in pool/pkgB on branch {branch_name} as ownerA ---")
|
||||
package_pr = ownerA_client.create_gitea_pr("pool/pkgB", diff, "Test Maintainer Rejection", True, base_branch=branch_name)
|
||||
package_pr_number = package_pr["number"]
|
||||
print(f"Created package PR pool/pkgB#{package_pr_number}")
|
||||
|
||||
# 2. Check that review requests came to ownerB and ownerBB
|
||||
print("Checking for review requests from ownerB and ownerBB...")
|
||||
for _ in range(20):
|
||||
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
|
||||
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
|
||||
if "ownerB" in reviewers_requested and "ownerBB" in reviewers_requested:
|
||||
break
|
||||
time.sleep(1)
|
||||
else:
|
||||
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
|
||||
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
|
||||
pytest.fail(f"ownerB and ownerBB were not both requested. Got: {reviewers_requested}")
|
||||
|
||||
# 3. ownerBB rejects the PR
|
||||
print("ownerBB rejecting the PR...")
|
||||
ownerBB_client.create_review("pool/pkgB", package_pr_number, event="REQUEST_CHANGES", body="Rejecting from ownerBB")
|
||||
|
||||
# 4. Check that review request for ownerB is removed
|
||||
print("Checking if ownerB's review request is removed...")
|
||||
for _ in range(20):
|
||||
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
|
||||
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
|
||||
if "ownerB" not in reviewers_requested:
|
||||
print("Confirmed: ownerB's review request was removed.")
|
||||
break
|
||||
time.sleep(1)
|
||||
else:
|
||||
pytest.fail("ownerB's review request was not removed after ownerBB rejection.")
|
||||
|
||||
|
||||
@pytest.mark.t007
|
||||
@pytest.mark.xfail(reason="TBD troubleshoot")
|
||||
def test_007_review_required_needs_all_approvals(review_required_env, ownerA_client, ownerBB_client):
|
||||
"""
|
||||
Test scenario:
|
||||
1. it uses new fixture with "ReviewRequired = true" in the workflow.config.
|
||||
2. Package PR for pkgB opened by ownerA.
|
||||
3. Check review request comes to both ownerB and ownerBB.
|
||||
4. ownerBB approves.
|
||||
5. make sure that review is not merged automatically and the request for ownerB is not removed.
|
||||
"""
|
||||
gitea_env, test_full_repo_name, branch_name = review_required_env
|
||||
|
||||
# 0. Smoke test ownerA_client
|
||||
print(f"--- Smoke testing ownerA_client ---")
|
||||
ownerA_client._request("GET", "users/admin")
|
||||
print(f"ownerA_client smoke test passed")
|
||||
|
||||
# 1. Create a package PR for pool/pkgB as ownerA
|
||||
diff = """diff --git a/pkgB_review_required_test.txt b/pkgB_review_required_test.txt
|
||||
new file mode 100644
|
||||
index 0000000..e69de29
|
||||
"""
|
||||
print(f"--- Creating package PR in pool/pkgB on branch {branch_name} as ownerA ---")
|
||||
package_pr = ownerA_client.create_gitea_pr("pool/pkgB", diff, "Test Review Required", True, base_branch=branch_name)
|
||||
package_pr_number = package_pr["number"]
|
||||
print(f"Created package PR pool/pkgB#{package_pr_number}")
|
||||
|
||||
# 2. Make sure the workflow-pr service created related project PR
|
||||
project_pr_number = None
|
||||
print(f"Polling pool/pkgB PR #{package_pr_number} timeline for forwarded PR event...")
|
||||
for _ in range(40):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("pool/pkgB", package_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("type") == "pull_ref":
|
||||
if not (ref_issue := event.get("ref_issue")):
|
||||
continue
|
||||
url_to_check = ref_issue.get("html_url", "")
|
||||
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
|
||||
if match:
|
||||
project_pr_number = int(match.group(1))
|
||||
break
|
||||
if project_pr_number:
|
||||
break
|
||||
|
||||
assert project_pr_number is not None, "Workflow bot did not create a project PR."
|
||||
print(f"Found project PR: products/SLFO#{project_pr_number}")
|
||||
|
||||
# 3. Check that review requests came to ownerB and ownerBB
|
||||
print("Checking for review requests from ownerB and ownerBB...")
|
||||
for _ in range(20):
|
||||
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
|
||||
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
|
||||
if "ownerB" in reviewers_requested and "ownerBB" in reviewers_requested:
|
||||
break
|
||||
time.sleep(1)
|
||||
else:
|
||||
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
|
||||
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
|
||||
pytest.fail(f"ownerB and ownerBB were not both requested. Got: {reviewers_requested}")
|
||||
|
||||
# 4. ownerBB leaves review, ownerB does not.
|
||||
print("ownerBB approving the PR...")
|
||||
ownerBB_client.create_review("pool/pkgB", package_pr_number, event="APPROVED", body="Approval from ownerBB")
|
||||
|
||||
# 5. Check that the PR is NOT merged automatically and ownerB request remains
|
||||
print("Waiting to ensure PR is NOT merged and ownerB request remains...")
|
||||
for i in range(10):
|
||||
pkg_details = gitea_env.get_pr_details("pool/pkgB", package_pr_number)
|
||||
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
|
||||
review_states = [(r["user"]["login"], r["state"]) for r in reviews]
|
||||
print(f"Attempt {i+1}: Merged={pkg_details.get('merged')}, Reviews={review_states}")
|
||||
time.sleep(2)
|
||||
|
||||
pkg_details = gitea_env.get_pr_details("pool/pkgB", package_pr_number)
|
||||
assert not pkg_details.get("merged"), "Package PR was merged automatically but it should NOT have been (ReviewRequired=true)."
|
||||
|
||||
reviews = gitea_env.list_reviews("pool/pkgB", package_pr_number)
|
||||
reviewers_requested = {r["user"]["login"] for r in reviews if r["state"] == "REQUEST_REVIEW"}
|
||||
assert "ownerB" in reviewers_requested, f"ownerB's review request was removed, but it should have remained. All reviews: {[(r['user']['login'], r['state']) for r in reviews]}"
|
||||
|
||||
print("Confirmed: PR not merged and ownerB review request remains as expected.")
|
||||
326
integration/tests/workflow_pr_sync_test.py
Executable file
326
integration/tests/workflow_pr_sync_test.py
Executable file
@@ -0,0 +1,326 @@
|
||||
import pytest
|
||||
import re
|
||||
import time
|
||||
import subprocess
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from tests.lib.common_test_utils import (
|
||||
GiteaAPIClient,
|
||||
)
|
||||
|
||||
# =============================================================================
|
||||
# TEST CASES
|
||||
# =============================================================================
|
||||
|
||||
pytest.pr = None
|
||||
pytest.pr_details = None
|
||||
pytest.initial_pr_number = None
|
||||
pytest.forwarded_pr_number = None
|
||||
|
||||
|
||||
@pytest.mark.t001
|
||||
@pytest.mark.dependency()
|
||||
def test_001_project_pr(gitea_env):
|
||||
"""Forwarded PR correct title"""
|
||||
diff = "diff --git a/another_test.txt b/another_test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
|
||||
pytest.pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR", False)
|
||||
pytest.initial_pr_number = pytest.pr["number"]
|
||||
time.sleep(5) # Give Gitea some time to process the PR and make the timeline available
|
||||
|
||||
compose_dir = Path(__file__).parent.parent
|
||||
|
||||
pytest.forwarded_pr_number = None
|
||||
print(
|
||||
f"Polling pool/pkgA PR #{pytest.initial_pr_number} timeline for forwarded PR event..."
|
||||
)
|
||||
# Instead of polling timeline, check if forwarded PR exists directly
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("pool/pkgA", pytest.initial_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("type") == "pull_ref":
|
||||
if not (ref_issue := event.get("ref_issue")):
|
||||
continue
|
||||
url_to_check = ref_issue.get("html_url", "")
|
||||
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
|
||||
if match:
|
||||
pytest.forwarded_pr_number = match.group(1)
|
||||
break
|
||||
if pytest.forwarded_pr_number:
|
||||
break
|
||||
assert (
|
||||
pytest.forwarded_pr_number is not None
|
||||
), "Workflow bot did not create a forwarded PR."
|
||||
pytest.pr_details = gitea_env.get_pr_details("products/SLFO", pytest.forwarded_pr_number)
|
||||
assert (
|
||||
pytest.pr_details["title"] == "Forwarded PRs: pkgA"
|
||||
), "Forwarded PR correct title"
|
||||
|
||||
|
||||
@pytest.mark.t002
|
||||
@pytest.mark.dependency(depends=["test_001_project_pr"])
|
||||
def test_002_updated_project_pr(gitea_env):
|
||||
"""Forwarded PR head is updated"""
|
||||
diff = "diff --git a/another_test.txt b/another_test.txt\nnew file mode 100444\nindex 0000000..e69de21\n"
|
||||
gitea_env.modify_gitea_pr("pool/pkgA", pytest.initial_pr_number, diff, "Tweaks")
|
||||
sha_old = pytest.pr_details["head"]["sha"]
|
||||
|
||||
sha_changed = False
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
new_pr_details = gitea_env.get_pr_details("products/SLFO", pytest.forwarded_pr_number)
|
||||
sha_new = new_pr_details["head"]["sha"]
|
||||
if sha_new != sha_old:
|
||||
print(f"Sha changed from {sha_old} to {sha_new}")
|
||||
sha_changed = True
|
||||
break
|
||||
|
||||
assert sha_changed, "Forwarded PR has sha updated"
|
||||
|
||||
|
||||
@pytest.mark.t003
|
||||
@pytest.mark.dependency(depends=["test_001_project_pr"])
|
||||
def test_003_wip(gitea_env):
|
||||
"""WIP flag set for PR"""
|
||||
# 1. set WIP flag in PR f"pool/pkgA#{pytest.initial_pr_number}"
|
||||
initial_pr_details = gitea_env.get_pr_details("pool/pkgA", pytest.initial_pr_number)
|
||||
wip_title = "WIP: " + initial_pr_details["title"]
|
||||
|
||||
gitea_env.update_gitea_pr_properties("pool/pkgA", pytest.initial_pr_number, title=wip_title)
|
||||
# 2. in loop check whether WIP flag is set for PR f"products/SLFO #{pytest.forwarded_pr_number}"
|
||||
wip_flag_set = False
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
forwarded_pr_details = gitea_env.get_pr_details(
|
||||
"products/SLFO", pytest.forwarded_pr_number
|
||||
)
|
||||
if "WIP: " in forwarded_pr_details["title"]:
|
||||
wip_flag_set = True
|
||||
break
|
||||
|
||||
assert wip_flag_set, "WIP flag was not set in the forwarded PR."
|
||||
|
||||
# Remove WIP flag from PR f"pool/pkgA#{pytest.initial_pr_number}"
|
||||
initial_pr_details = gitea_env.get_pr_details("pool/pkgA", pytest.initial_pr_number)
|
||||
non_wip_title = initial_pr_details["title"].replace("WIP: ", "")
|
||||
gitea_env.update_gitea_pr_properties(
|
||||
"pool/pkgA", pytest.initial_pr_number, title=non_wip_title
|
||||
)
|
||||
|
||||
# In loop check whether WIP flag is removed for PR f"products/SLFO #{pytest.forwarded_pr_number}"
|
||||
wip_flag_removed = False
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
forwarded_pr_details = gitea_env.get_pr_details(
|
||||
"products/SLFO", pytest.forwarded_pr_number
|
||||
)
|
||||
if "WIP: " not in forwarded_pr_details["title"]:
|
||||
wip_flag_removed = True
|
||||
break
|
||||
assert wip_flag_removed, "WIP flag was not removed from the forwarded PR."
|
||||
|
||||
|
||||
@pytest.mark.t005
|
||||
@pytest.mark.xfail(reason="works only in ibs_state branch?")
|
||||
@pytest.mark.dependency()
|
||||
def test_005_NoProjectGitPR_edits_disabled(no_project_git_pr_env, test_user_client):
|
||||
"""
|
||||
Reworked test: Sets workflow.config with NoProjectGitPR: true and creates a Package PR.
|
||||
Verifies that no Project PR is created, then manually creates one and checks for bot warning.
|
||||
"""
|
||||
gitea_env, test_full_repo_name, dev_branch_name = no_project_git_pr_env
|
||||
|
||||
# 1. Create a Package PR (without "Allow edits from maintainers" enabled)
|
||||
initial_diff = """diff --git a/first_file.txt b/first_file.txt
|
||||
new file mode 100644
|
||||
index 0000000..e69de29
|
||||
--- /dev/null
|
||||
+++ b/first_file.txt
|
||||
@@ -0,0 +1 @@
|
||||
+Initial content
|
||||
"""
|
||||
package_pr = test_user_client.create_gitea_pr("pool/pkgA", initial_diff, "Test PR for No Project PR, No Edits", False, base_branch=dev_branch_name)
|
||||
package_pr_number = package_pr["number"]
|
||||
print(f"Created Package PR #{package_pr_number}")
|
||||
|
||||
# 2. Verify that the workflow-pr bot did not create a Project PR
|
||||
project_pr_created = False
|
||||
for i in range(10): # Poll for some time
|
||||
time.sleep(2)
|
||||
timeline_events = gitea_env.get_timeline_events("pool/pkgA", package_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("type") == "pull_ref":
|
||||
if not (ref_issue := event.get("ref_issue")):
|
||||
continue
|
||||
url_to_check = ref_issue.get("html_url", "")
|
||||
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
|
||||
if match:
|
||||
project_pr_created = True
|
||||
break
|
||||
if project_pr_created:
|
||||
break
|
||||
|
||||
assert not project_pr_created, "Workflow bot unexpectedly created a Project PR in products/SLFO."
|
||||
print("Verification complete: No Project PR was created by the bot.")
|
||||
|
||||
# 3. Manually create the Project PR
|
||||
pkgA_main_sha = gitea_env._request("GET", f"repos/pool/pkgA/branches/{dev_branch_name}").json()["commit"]["id"]
|
||||
package_pr_details = gitea_env.get_pr_details("pool/pkgA", package_pr_number)
|
||||
pkgA_pr_head_sha = package_pr_details["head"]["sha"]
|
||||
|
||||
project_pr_title = "Forwarded PRs: pkgA (Manual)"
|
||||
project_pr_body = f"Manual Project PR for NoProjectGitPR. \nPR: pool/pkgA!{package_pr_number}"
|
||||
project_pr_diff = f"""diff --git a/pkgA b/pkgA
|
||||
index {pkgA_main_sha[:7]}..{pkgA_pr_head_sha[:7]} 160000
|
||||
--- a/pkgA
|
||||
+++ b/pkgA
|
||||
@@ -1 +1 @@
|
||||
-Subproject commit {pkgA_main_sha}
|
||||
+Subproject commit {pkgA_pr_head_sha}
|
||||
"""
|
||||
manual_project_pr = test_user_client.create_gitea_pr(test_full_repo_name, project_pr_diff, project_pr_title, True, base_branch=dev_branch_name, body=project_pr_body)
|
||||
manual_project_pr_number = manual_project_pr["number"]
|
||||
|
||||
# Verify and set allow_maintainer_edit to False
|
||||
test_user_client.update_gitea_pr_properties(test_full_repo_name, manual_project_pr_number, allow_maintainer_edit=False)
|
||||
|
||||
# Verify that allow_maintainer_edit is now disabled
|
||||
updated_pr = gitea_env.get_pr_details(test_full_repo_name, manual_project_pr_number)
|
||||
assert updated_pr.get("allow_maintainer_edit") is False, "Expected allow_maintainer_edit to be False after update"
|
||||
|
||||
print(f"Manually created Project PR #{manual_project_pr_number} in {test_full_repo_name}")
|
||||
|
||||
# 4. Trigger an update on the Package PR to prompt the bot to react to the manual Project PR
|
||||
new_diff_content = """diff --git a/trigger_bot.txt b/trigger_bot.txt
|
||||
new file mode 100644
|
||||
index 0000000..e69de29
|
||||
--- /dev/null
|
||||
+++ b/trigger_bot.txt
|
||||
@@ -0,0 +1 @@
|
||||
+Trigger content
|
||||
"""
|
||||
test_user_client.modify_gitea_pr("pool/pkgA", package_pr_number, new_diff_content, "Trigger bot update")
|
||||
|
||||
# 5. Verify that the bot adds a warning comment because it cannot update the manual PR (edits disabled)
|
||||
warning_found = False
|
||||
print(f"Polling Package PR #{package_pr_number} for warning comment...")
|
||||
for _ in range(20):
|
||||
time.sleep(3)
|
||||
comments = gitea_env.get_comments("pool/pkgA", package_pr_number)
|
||||
for comment in comments:
|
||||
# According to test-plan.md, the warning explains that it cannot update the PR.
|
||||
if "cannot update" in comment.get("body", "").lower():
|
||||
warning_found = True
|
||||
print(f"Warning comment found: {comment.get('body')}")
|
||||
break
|
||||
if warning_found:
|
||||
break
|
||||
|
||||
# assert warning_found, "Bot did not post the expected warning comment on the Package PR."
|
||||
# print("Verification complete: Bot posted a warning comment as expected.")
|
||||
|
||||
|
||||
@pytest.mark.t006
|
||||
@pytest.mark.xfail(reason="works only in ibs_state branch?")
|
||||
@pytest.mark.dependency()
|
||||
def test_006_NoProjectGitPR_edits_enabled(no_project_git_pr_env, test_user_client):
|
||||
"""
|
||||
Verify that no project PR is created when "NoProjectGitPR" is true
|
||||
and "Allow edits from maintainers" is enabled, using a dev branch.
|
||||
"""
|
||||
gitea_env, test_full_repo_name, dev_branch_name = no_project_git_pr_env
|
||||
|
||||
# 2. Create a Package PR with "Allow edits from maintainers" enabled
|
||||
diff = """diff --git a/new_feature.txt b/new_feature.txt
|
||||
new file mode 100644
|
||||
index 0000000..e69de29
|
||||
--- /dev/null
|
||||
+++ b/new_feature.txt
|
||||
@@ -0,0 +1 @@
|
||||
+New feature content
|
||||
"""
|
||||
package_pr = test_user_client.create_gitea_pr("pool/pkgA", diff, "Test PR for NoProjectGitPR", False, base_branch=dev_branch_name)
|
||||
package_pr_number = package_pr["number"]
|
||||
|
||||
# Enable "Allow edits from maintainers"
|
||||
test_user_client.update_gitea_pr_properties("pool/pkgA", package_pr_number, allow_maintainer_edit=True)
|
||||
print(f"Created Package PR #{package_pr_number} and enabled 'Allow edits from maintainers'.")
|
||||
|
||||
# Get SHAs needed for the manual Project PR diff
|
||||
pkgA_main_sha = gitea_env._request("GET", f"repos/pool/pkgA/branches/{dev_branch_name}").json()["commit"]["id"]
|
||||
package_pr_details = gitea_env.get_pr_details("pool/pkgA", package_pr_number)
|
||||
pkgA_pr_head_sha = package_pr_details["head"]["sha"]
|
||||
|
||||
# 3. Assert that the workflow-pr bot did not create a Project PR in the products/SLFO repository
|
||||
project_pr_created = False
|
||||
for i in range(20): # Poll for a reasonable time
|
||||
time.sleep(2) # Wait a bit longer to be sure
|
||||
timeline_events = gitea_env.get_timeline_events("pool/pkgA", package_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("type") == "pull_ref":
|
||||
if not (ref_issue := event.get("ref_issue")):
|
||||
continue
|
||||
url_to_check = ref_issue.get("html_url", "")
|
||||
# Regex now searches for products/SLFO/pulls/(\d+)
|
||||
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
|
||||
if match:
|
||||
project_pr_created = True
|
||||
break
|
||||
if project_pr_created:
|
||||
break
|
||||
|
||||
assert not project_pr_created, "Workflow bot unexpectedly created a Project PR in products/SLFO."
|
||||
print("Verification complete: No Project PR was created in products/SLFO as expected.")
|
||||
|
||||
# 1. Create that Project PR from the test code.
|
||||
project_pr_title = "Forwarded PRs: pkgA"
|
||||
project_pr_body = f"Test Project PR for NoProjectGitPR. \nPR: pool/pkgA!{package_pr_number}"
|
||||
project_pr_diff = f"""diff --git a/pkgA b/pkgA
|
||||
index {pkgA_main_sha[:7]}..{pkgA_pr_head_sha[:7]} 160000
|
||||
--- a/pkgA
|
||||
+++ b/pkgA
|
||||
@@ -1 +1 @@
|
||||
-Subproject commit {pkgA_main_sha}
|
||||
+Subproject commit {pkgA_pr_head_sha}
|
||||
"""
|
||||
manual_project_pr = test_user_client.create_gitea_pr(test_full_repo_name, project_pr_diff, project_pr_title, True, base_branch=dev_branch_name, body=project_pr_body)
|
||||
manual_project_pr_number = manual_project_pr["number"]
|
||||
# Explicitly ensure allow_maintainer_edit is True (it should be by default now, but just in case)
|
||||
test_user_client.update_gitea_pr_properties(test_full_repo_name, manual_project_pr_number, allow_maintainer_edit=True)
|
||||
print(f"Manually created Project PR #{manual_project_pr_number} in {test_full_repo_name}")
|
||||
time.sleep(5) # Give the bot time to potentially react or for the PR to settle
|
||||
|
||||
# Get initial SHA of the manually created Project PR
|
||||
initial_project_pr_details = gitea_env.get_pr_details(test_full_repo_name, manual_project_pr_number)
|
||||
initial_head_sha = initial_project_pr_details["head"]["sha"]
|
||||
print(f"Manually created Project PR initial head SHA: {initial_head_sha}")
|
||||
|
||||
# 2. Add new commit to the package PR.
|
||||
new_diff_content = """diff --git a/another_file.txt b/another_file.txt
|
||||
new file mode 100644
|
||||
index 0000000..f587a12
|
||||
--- /dev/null
|
||||
+++ b/another_file.txt
|
||||
@@ -0,0 +1 @@
|
||||
+Another file content
|
||||
"""
|
||||
test_user_client.modify_gitea_pr("pool/pkgA", package_pr_number, new_diff_content, "Add another file to Package PR")
|
||||
print(f"Added new commit to Package PR #{package_pr_number}.")
|
||||
time.sleep(5) # Give the bot time to react
|
||||
|
||||
# 3. Make sure the project PR is properly updated by the bot
|
||||
project_pr_updated = False
|
||||
print(f"Polling manually created Project PR #{manual_project_pr_number} for update...")
|
||||
for _ in range(20): # Poll for a reasonable time
|
||||
time.sleep(2) # Wait a bit longer to be sure
|
||||
current_project_pr_details = gitea_env.get_pr_details(test_full_repo_name, manual_project_pr_number)
|
||||
current_head_sha = current_project_pr_details["head"]["sha"]
|
||||
if current_head_sha != initial_head_sha:
|
||||
project_pr_updated = True
|
||||
print(f"Manually created Project PR updated. New head SHA: {current_head_sha}")
|
||||
break
|
||||
|
||||
assert project_pr_updated, "Manually created Project PR was not updated by the bot."
|
||||
print("Verification complete: Manually created Project PR was updated by the bot as expected.")
|
||||
|
||||
|
||||
1
integration/workflow-pr/Dockerfile
Symbolic link
1
integration/workflow-pr/Dockerfile
Symbolic link
@@ -0,0 +1 @@
|
||||
Dockerfile.package
|
||||
17
integration/workflow-pr/Dockerfile.local
Normal file
17
integration/workflow-pr/Dockerfile.local
Normal file
@@ -0,0 +1,17 @@
|
||||
# Use the same base image as the Gitea container
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
# Add the custom CA to the trust store
|
||||
COPY integration/rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||
RUN update-ca-certificates
|
||||
|
||||
# Install git and ssh
|
||||
RUN zypper -n in git-core openssh-clients binutils git-lfs
|
||||
|
||||
# Copy the pre-built binary into the container
|
||||
COPY workflow-pr/workflow-pr /usr/local/bin/workflow-pr
|
||||
COPY integration/workflow-pr/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +4755 /usr/local/bin/entrypoint.sh
|
||||
|
||||
# Set the entrypoint for the container
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
18
integration/workflow-pr/Dockerfile.package
Normal file
18
integration/workflow-pr/Dockerfile.package
Normal file
@@ -0,0 +1,18 @@
|
||||
# Use the same base image as the Gitea container
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
# Add the custom CA to the trust store
|
||||
COPY integration/rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||
RUN update-ca-certificates
|
||||
|
||||
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
|
||||
RUN zypper --gpg-auto-import-keys ref
|
||||
|
||||
# Install git and ssh
|
||||
RUN zypper -n in git-core openssh-clients autogits-workflow-pr binutils git-lfs
|
||||
|
||||
COPY integration/workflow-pr/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +4755 /usr/local/bin/entrypoint.sh
|
||||
|
||||
# Set the entrypoint for the container
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
66
integration/workflow-pr/entrypoint.sh
Normal file
66
integration/workflow-pr/entrypoint.sh
Normal file
@@ -0,0 +1,66 @@
|
||||
#!/bin/bash
|
||||
TOKEN_FILE="/var/lib/gitea/workflow-pr.token"
|
||||
|
||||
# Wait for the token file to be created by the gitea setup script
|
||||
echo "Waiting for $TOKEN_FILE..."
|
||||
while [ ! -s "$TOKEN_FILE" ]; do
|
||||
sleep 2
|
||||
done
|
||||
|
||||
# Read token and trim whitespace/newlines
|
||||
GITEA_TOKEN=$(cat "$TOKEN_FILE" | tr -d '\n\r ' )
|
||||
|
||||
if [ -z "$GITEA_TOKEN" ]; then
|
||||
echo "Error: Token file $TOKEN_FILE is empty after trimming."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export GITEA_TOKEN
|
||||
echo "GITEA_TOKEN exported (length: ${#GITEA_TOKEN})"
|
||||
|
||||
# Wait for the dummy data to be created by the gitea setup script
|
||||
echo "Waiting for workflow.config in products/SLFO..."
|
||||
API_URL="http://gitea-test:3000/api/v1/repos/products/SLFO/contents/workflow.config"
|
||||
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token $GITEA_TOKEN" "$API_URL")
|
||||
|
||||
while [ "$HTTP_STATUS" != "200" ]; do
|
||||
echo "workflow.config not found yet (HTTP Status: $HTTP_STATUS). Retrying in 5s..."
|
||||
sleep 5
|
||||
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token $GITEA_TOKEN" "$API_URL")
|
||||
done
|
||||
|
||||
# Wait for the shared SSH key to be generated by the gitea setup script
|
||||
echo "Waiting for /var/lib/gitea/ssh-keys/id_ed25519..."
|
||||
while [ ! -f /var/lib/gitea/ssh-keys/id_ed25519 ]; do
|
||||
sleep 2
|
||||
done
|
||||
|
||||
export AUTOGITS_IDENTITY_FILE="/root/.ssh/id_ed25519"
|
||||
|
||||
# Pre-populate known_hosts with Gitea's SSH host key
|
||||
echo "Preparing SSH environment in /root/.ssh..."
|
||||
mkdir -p /root/.ssh
|
||||
chmod 700 /root/.ssh
|
||||
|
||||
# Copy the private key to the standard location and set permissions
|
||||
cp /var/lib/gitea/ssh-keys/id_ed25519 /root/.ssh/id_ed25519
|
||||
chmod 600 /root/.ssh/id_ed25519
|
||||
|
||||
echo "Scanning Gitea SSH host key..."
|
||||
# We try multiple times because Gitea might still be starting its SSH server
|
||||
for i in {1..10}; do
|
||||
ssh-keyscan -p 3022 gitea-test >> /root/.ssh/known_hosts 2>/dev/null && break
|
||||
echo "Retrying ssh-keyscan in 2s..."
|
||||
sleep 2
|
||||
done
|
||||
chmod 644 /root/.ssh/known_hosts
|
||||
|
||||
exe=$(which workflow-pr)
|
||||
exe=${exe:-/usr/local/bin/workflow-pr}
|
||||
|
||||
package=$(rpm -qa | grep autogits-workflow-pr) || :
|
||||
|
||||
echo "!!!!!!!!!!!!!!!! using binary $exe; installed package: $package"
|
||||
which strings > /dev/null 2>&1 && strings "$exe" | grep -A 2 vcs.revision= | head -4 || :
|
||||
|
||||
exec "$exe" "$@"
|
||||
7
integration/workflow-pr/workflow-pr.json
Normal file
7
integration/workflow-pr/workflow-pr.json
Normal file
@@ -0,0 +1,7 @@
|
||||
[
|
||||
"products/SLFO#main",
|
||||
"products/SLFO#dev",
|
||||
"products/SLFO#merge",
|
||||
"products/SLFO#maintainer-merge",
|
||||
"products/SLFO#review-required"
|
||||
]
|
||||
@@ -50,6 +50,10 @@ const (
|
||||
|
||||
var runId uint
|
||||
|
||||
var GitWorkTreeAllocate func(string, string, string) (common.GitHandlerGenerator, error) = func(basePath, gitAuthor, email string) (common.GitHandlerGenerator, error) {
|
||||
return common.AllocateGitWorkTree(basePath, gitAuthor, email)
|
||||
}
|
||||
|
||||
func FetchPrGit(git common.Git, pr *models.PullRequest) error {
|
||||
// clone PR head via base (target) repo
|
||||
cloneURL := pr.Base.Repo.CloneURL
|
||||
@@ -144,9 +148,9 @@ func ProcessBuildStatus(project *common.BuildResultList) BuildStatusSummary {
|
||||
|
||||
func ProcessRepoBuildStatus(results []*common.PackageBuildStatus) (status BuildStatusSummary) {
|
||||
|
||||
PackageBuildStatusSorter := func(a, b *common.PackageBuildStatus) int {
|
||||
return strings.Compare(a.Package, b.Package)
|
||||
}
|
||||
PackageBuildStatusSorter := func(a, b *common.PackageBuildStatus) int {
|
||||
return strings.Compare(a.Package, b.Package)
|
||||
}
|
||||
|
||||
common.LogDebug("******* RESULTS: ")
|
||||
data, _ := xml.MarshalIndent(results, "", " ")
|
||||
@@ -191,24 +195,23 @@ func GetPackageBuildStatus(project *common.BuildResultList, packageName string)
|
||||
return true, BuildStatusSummaryUnknown // true for 'missing'
|
||||
}
|
||||
|
||||
// Check for any failures
|
||||
// Check for any unfinished builds
|
||||
for _, pkgStatus := range packageStatuses {
|
||||
res, ok := common.ObsBuildStatusDetails[pkgStatus.Code]
|
||||
if !ok {
|
||||
common.LogInfo("unknown package result code:", pkgStatus.Code, "for package:", pkgStatus.Package)
|
||||
return false, BuildStatusSummaryUnknown
|
||||
}
|
||||
if !res.Success {
|
||||
return false, BuildStatusSummaryFailed
|
||||
if !res.Finished {
|
||||
return false, BuildStatusSummaryBuilding
|
||||
}
|
||||
}
|
||||
|
||||
// Check for any unfinished builds
|
||||
// Check for any failures
|
||||
for _, pkgStatus := range packageStatuses {
|
||||
res, _ := common.ObsBuildStatusDetails[pkgStatus.Code]
|
||||
// 'ok' is already checked in the loop above
|
||||
if !res.Finished {
|
||||
return false, BuildStatusSummaryBuilding
|
||||
if !res.Success {
|
||||
return false, BuildStatusSummaryFailed
|
||||
}
|
||||
}
|
||||
|
||||
@@ -216,7 +219,7 @@ func GetPackageBuildStatus(project *common.BuildResultList, packageName string)
|
||||
return false, BuildStatusSummarySuccess
|
||||
}
|
||||
|
||||
func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string, stagingMasterPrj string) (*common.ProjectMeta, error) {
|
||||
func GenerateObsPrjMeta(obs common.ObsClientInterface, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string, stagingMasterPrj string) (*common.ProjectMeta, error) {
|
||||
common.LogDebug("repo content fetching ...")
|
||||
err := FetchPrGit(git, pr)
|
||||
if err != nil {
|
||||
@@ -260,13 +263,13 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
|
||||
}
|
||||
|
||||
common.LogDebug("Trying first staging master project: ", stagingMasterPrj)
|
||||
meta, err := ObsClient.GetProjectMeta(stagingMasterPrj)
|
||||
meta, err := obs.GetProjectMeta(stagingMasterPrj)
|
||||
if err == nil {
|
||||
// success, so we use that staging master project as our build project
|
||||
buildPrj = stagingMasterPrj
|
||||
} else {
|
||||
common.LogInfo("error fetching project meta for ", stagingMasterPrj, ". Fall Back to ", buildPrj)
|
||||
meta, err = ObsClient.GetProjectMeta(buildPrj)
|
||||
meta, err = obs.GetProjectMeta(buildPrj)
|
||||
}
|
||||
if err != nil {
|
||||
common.LogError("error fetching project meta for", buildPrj, ". Err:", err)
|
||||
@@ -330,10 +333,10 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
|
||||
// stagingProject:$buildProject
|
||||
// ^- stagingProject:$buildProject:$subProjectName (based on templateProject)
|
||||
|
||||
func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingProject, templateProject, subProjectName string, buildDisableRepos []string) error {
|
||||
func CreateQASubProject(obs common.ObsClientInterface, stagingConfig *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingProject, templateProject, subProjectName string, buildDisableRepos []string) error {
|
||||
common.LogDebug("Setup QA sub projects")
|
||||
common.LogDebug("reading templateProject ", templateProject)
|
||||
templateMeta, err := ObsClient.GetProjectMeta(templateProject)
|
||||
templateMeta, err := obs.GetProjectMeta(templateProject)
|
||||
if err != nil {
|
||||
common.LogError("error fetching template project meta for", templateProject, ":", err)
|
||||
return err
|
||||
@@ -343,10 +346,10 @@ func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, git
|
||||
templateMeta.Name = stagingProject + ":" + subProjectName
|
||||
// freeze tag for now
|
||||
if len(templateMeta.ScmSync) > 0 {
|
||||
repository, err := url.Parse(templateMeta.ScmSync)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
repository, err := url.Parse(templateMeta.ScmSync)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
common.LogDebug("getting data for ", repository.EscapedPath())
|
||||
split := strings.Split(repository.EscapedPath(), "/")
|
||||
@@ -354,12 +357,12 @@ func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, git
|
||||
|
||||
common.LogDebug("getting commit for ", org, " repo ", repo, " fragment ", repository.Fragment)
|
||||
branch, err := gitea.GetCommit(org, repo, repository.Fragment)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// set expanded commit url
|
||||
repository.Fragment = branch.SHA
|
||||
repository.Fragment = branch.SHA
|
||||
templateMeta.ScmSync = repository.String()
|
||||
common.LogDebug("Setting scmsync url to ", templateMeta.ScmSync)
|
||||
}
|
||||
@@ -406,11 +409,11 @@ func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, git
|
||||
templateMeta.Repositories[idx].Paths[pidx].Project = templateMeta.Name
|
||||
} else
|
||||
// Check for path prefixes against a template project inside of template project area
|
||||
if strings.HasPrefix(path.Project, stagingConfig.StagingProject + ":") {
|
||||
if strings.HasPrefix(path.Project, stagingConfig.StagingProject+":") {
|
||||
newProjectName := stagingProject
|
||||
// find project name
|
||||
for _, setup := range stagingConfig.QA {
|
||||
if setup.Origin == path.Project {
|
||||
if setup.Origin == path.Project {
|
||||
common.LogDebug(" Match:", setup.Origin)
|
||||
newProjectName = newProjectName + ":" + setup.Name
|
||||
common.LogDebug(" New:", newProjectName)
|
||||
@@ -418,14 +421,14 @@ func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, git
|
||||
}
|
||||
}
|
||||
templateMeta.Repositories[idx].Paths[pidx].Project = newProjectName
|
||||
common.LogDebug(" Matched prefix")
|
||||
common.LogDebug(" Matched prefix")
|
||||
}
|
||||
common.LogDebug(" Path using project ", templateMeta.Repositories[idx].Paths[pidx].Project)
|
||||
}
|
||||
}
|
||||
|
||||
if !IsDryRun {
|
||||
err = ObsClient.SetProjectMeta(templateMeta)
|
||||
err = obs.SetProjectMeta(templateMeta)
|
||||
if err != nil {
|
||||
common.LogError("cannot create project:", templateMeta.Name, err)
|
||||
x, _ := xml.MarshalIndent(templateMeta, "", " ")
|
||||
@@ -439,10 +442,10 @@ func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, git
|
||||
return nil
|
||||
}
|
||||
|
||||
func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest) (RequestModification, error) {
|
||||
func StartOrUpdateBuild(obs common.ObsClientInterface, config *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest) (RequestModification, error) {
|
||||
common.LogDebug("fetching OBS project Meta")
|
||||
obsPrProject := GetObsProjectAssociatedWithPr(config, ObsClient.HomeProject, pr)
|
||||
meta, err := ObsClient.GetProjectMeta(obsPrProject)
|
||||
obsPrProject := GetObsProjectAssociatedWithPr(config, obs.GetHomeProject(), pr)
|
||||
meta, err := obs.GetProjectMeta(obsPrProject)
|
||||
if err != nil {
|
||||
common.LogError("error fetching project meta for", obsPrProject, ":", err)
|
||||
return RequestModificationNoChange, err
|
||||
@@ -467,7 +470,7 @@ func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea comm
|
||||
if meta == nil {
|
||||
// new build
|
||||
common.LogDebug(" Staging master:", config.StagingProject)
|
||||
meta, err = GenerateObsPrjMeta(git, gitea, pr, obsPrProject, config.ObsProject, config.StagingProject)
|
||||
meta, err = GenerateObsPrjMeta(obs, git, gitea, pr, obsPrProject, config.ObsProject, config.StagingProject)
|
||||
if err != nil {
|
||||
return RequestModificationNoChange, err
|
||||
}
|
||||
@@ -479,7 +482,7 @@ func StartOrUpdateBuild(config *common.StagingConfig, git common.Git, gitea comm
|
||||
common.LogDebug("Creating build project:")
|
||||
common.LogDebug(" meta:", string(x))
|
||||
} else {
|
||||
err = ObsClient.SetProjectMeta(meta)
|
||||
err = obs.SetProjectMeta(meta)
|
||||
if err != nil {
|
||||
x, _ := xml.MarshalIndent(meta, "", " ")
|
||||
common.LogDebug(" meta:", string(x))
|
||||
@@ -550,7 +553,7 @@ func ParseNotificationToPR(thread *models.NotificationThread) (org string, repo
|
||||
return
|
||||
}
|
||||
|
||||
func ProcessPullNotification(gitea common.Gitea, thread *models.NotificationThread) {
|
||||
func ProcessPullNotification(obs common.ObsClientInterface, gitea common.Gitea, thread *models.NotificationThread) {
|
||||
defer func() {
|
||||
err := recover()
|
||||
if err != nil {
|
||||
@@ -566,7 +569,7 @@ func ProcessPullNotification(gitea common.Gitea, thread *models.NotificationThre
|
||||
}
|
||||
common.LogInfo("processing PR:", org, "/", repo, "#", num)
|
||||
|
||||
done, err := ProcessPullRequest(gitea, org, repo, num)
|
||||
done, err := ProcessPullRequest(obs, gitea, org, repo, num)
|
||||
if !IsDryRun && err == nil && done {
|
||||
gitea.SetNotificationRead(thread.ID)
|
||||
} else if err != nil {
|
||||
@@ -576,7 +579,7 @@ func ProcessPullNotification(gitea common.Gitea, thread *models.NotificationThre
|
||||
|
||||
var CleanedUpIssues []int64 = []int64{}
|
||||
|
||||
func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThread) (CleanupComplete bool) {
|
||||
func CleanupPullNotification(obs common.ObsClientInterface, gitea common.Gitea, thread *models.NotificationThread) (CleanupComplete bool) {
|
||||
defer func() {
|
||||
err := recover()
|
||||
if err != nil {
|
||||
@@ -643,8 +646,8 @@ func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThre
|
||||
return false
|
||||
}
|
||||
|
||||
stagingProject := GetObsProjectAssociatedWithPr(config, ObsClient.HomeProject, pr)
|
||||
if prj, err := ObsClient.GetProjectMeta(stagingProject); err != nil {
|
||||
stagingProject := GetObsProjectAssociatedWithPr(config, obs.GetHomeProject(), pr)
|
||||
if prj, err := obs.GetProjectMeta(stagingProject); err != nil {
|
||||
common.LogError("Failed fetching meta for project:", stagingProject, ". Not cleaning up")
|
||||
return false
|
||||
} else if prj == nil && err == nil {
|
||||
@@ -658,13 +661,13 @@ func CleanupPullNotification(gitea common.Gitea, thread *models.NotificationThre
|
||||
project := stagingProject + ":" + qa.Name
|
||||
common.LogDebug("Cleaning up QA staging", project)
|
||||
if !IsDryRun {
|
||||
if err := ObsClient.DeleteProject(project); err != nil {
|
||||
if err := obs.DeleteProject(project); err != nil {
|
||||
common.LogError("Failed to cleanup QA staging", project, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
if !IsDryRun {
|
||||
if err := ObsClient.DeleteProject(stagingProject); err != nil {
|
||||
if err := obs.DeleteProject(stagingProject); err != nil {
|
||||
common.LogError("Failed to cleanup staging", stagingProject, err)
|
||||
}
|
||||
}
|
||||
@@ -685,7 +688,7 @@ func SetStatus(gitea common.Gitea, org, repo, hash string, status *models.Commit
|
||||
return err
|
||||
}
|
||||
|
||||
func commentOnPackagePR(gitea common.Gitea, org string, repo string, prNum int64, msg string) {
|
||||
func CommentPROnce(gitea common.Gitea, org string, repo string, prNum int64, msg string) {
|
||||
if IsDryRun {
|
||||
common.LogInfo("Would comment on package PR %s/%s#%d: %s", org, repo, prNum, msg)
|
||||
return
|
||||
@@ -697,6 +700,18 @@ func commentOnPackagePR(gitea common.Gitea, org string, repo string, prNum int64
|
||||
return
|
||||
}
|
||||
|
||||
timeline, err := gitea.GetTimeline(org, repo, prNum)
|
||||
if err != nil {
|
||||
common.LogError("Failed to get timeline for PR %s/%s#%d: %v", org, repo, prNum, err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, t := range timeline {
|
||||
if t.User != nil && t.User.UserName == BotUser && t.Type == common.TimelineCommentType_Comment && t.Body == msg {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
err = gitea.AddComment(pr, msg)
|
||||
if err != nil {
|
||||
common.LogError("Failed to comment on package PR %s/%s#%d: %v", org, repo, prNum, err)
|
||||
@@ -704,7 +719,7 @@ func commentOnPackagePR(gitea common.Gitea, org string, repo string, prNum int64
|
||||
}
|
||||
|
||||
// Create and remove QA projects
|
||||
func ProcessQaProjects(stagingConfig *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingProject string) []string {
|
||||
func ProcessQaProjects(obs common.ObsClientInterface, stagingConfig *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingProject string) ([]string, string) {
|
||||
usedQAprojects := make([]string, 0)
|
||||
prLabelNames := make(map[string]int)
|
||||
for _, label := range pr.Labels {
|
||||
@@ -717,7 +732,7 @@ func ProcessQaProjects(stagingConfig *common.StagingConfig, git common.Git, gite
|
||||
if _, ok := prLabelNames[setup.Label]; !ok {
|
||||
if !IsDryRun {
|
||||
// blindly remove, will fail when not existing
|
||||
ObsClient.DeleteProject(QAproject)
|
||||
obs.DeleteProject(QAproject)
|
||||
}
|
||||
common.LogInfo("QA project ", setup.Name, "has no matching Label")
|
||||
continue
|
||||
@@ -726,24 +741,22 @@ func ProcessQaProjects(stagingConfig *common.StagingConfig, git common.Git, gite
|
||||
|
||||
usedQAprojects = append(usedQAprojects, QAproject)
|
||||
// check for existens first, no error, but no meta is a 404
|
||||
if meta, err := ObsClient.GetProjectMeta(QAproject); meta == nil && err == nil {
|
||||
if meta, err := obs.GetProjectMeta(QAproject); meta == nil && err == nil {
|
||||
common.LogInfo("Create QA project ", QAproject)
|
||||
CreateQASubProject(stagingConfig, git, gitea, pr,
|
||||
CreateQASubProject(obs, stagingConfig, git, gitea, pr,
|
||||
stagingProject,
|
||||
setup.Origin,
|
||||
setup.Name,
|
||||
setup.BuildDisableRepos)
|
||||
msg = msg + "QA Project added: " + ObsWebHost + "/project/show/" +
|
||||
QAproject + "\n"
|
||||
msg = msg + "QA Project added: " + ObsWebHost + "/project/show/" +
|
||||
QAproject + "\n"
|
||||
}
|
||||
}
|
||||
if len(msg) > 1 {
|
||||
gitea.AddComment(pr, msg)
|
||||
}
|
||||
return usedQAprojects
|
||||
|
||||
return usedQAprojects, msg
|
||||
}
|
||||
|
||||
func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, error) {
|
||||
func ProcessPullRequest(obs common.ObsClientInterface, gitea common.Gitea, org, repo string, id int64) (bool, error) {
|
||||
dir, err := os.MkdirTemp(os.TempDir(), BotName)
|
||||
common.PanicOnError(err)
|
||||
if IsDryRun {
|
||||
@@ -752,7 +765,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
defer os.RemoveAll(dir)
|
||||
}
|
||||
|
||||
gh, err := common.AllocateGitWorkTree(dir, GitAuthor, "noaddress@suse.de")
|
||||
gh, err := GitWorkTreeAllocate(dir, GitAuthor, "noaddress@suse.de")
|
||||
common.PanicOnError(err)
|
||||
|
||||
git, err := gh.CreateGitHandler(org)
|
||||
@@ -797,7 +810,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
if err != nil {
|
||||
common.LogError("Staging config", common.StagingConfigFile, "not found in PR to the project. Aborting.")
|
||||
if !IsDryRun {
|
||||
_, err = gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find project config in PR: "+common.ProjectConfigFile)
|
||||
_, _ = gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot find project config in PR: "+common.ProjectConfigFile)
|
||||
}
|
||||
return true, err
|
||||
}
|
||||
@@ -817,7 +830,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
return true, nil
|
||||
}
|
||||
|
||||
meta, err := ObsClient.GetProjectMeta(stagingConfig.ObsProject)
|
||||
meta, err := obs.GetProjectMeta(stagingConfig.ObsProject)
|
||||
if err != nil || meta == nil {
|
||||
common.LogError("Cannot find reference project meta:", stagingConfig.ObsProject, err)
|
||||
if !IsDryRun && err == nil {
|
||||
@@ -857,6 +870,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
l := len(stagingConfig.ObsProject)
|
||||
if l >= len(stagingConfig.StagingProject) || stagingConfig.ObsProject != stagingConfig.StagingProject[0:l] {
|
||||
common.LogError("StagingProject (", stagingConfig.StagingProject, ") is not child of target project", stagingConfig.ObsProject)
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
|
||||
@@ -945,8 +959,8 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
}
|
||||
|
||||
common.LogDebug("ObsProject:", stagingConfig.ObsProject)
|
||||
stagingProject := GetObsProjectAssociatedWithPr(stagingConfig, ObsClient.HomeProject, pr)
|
||||
change, err := StartOrUpdateBuild(stagingConfig, git, gitea, pr)
|
||||
stagingProject := GetObsProjectAssociatedWithPr(stagingConfig, obs.GetHomeProject(), pr)
|
||||
change, err := StartOrUpdateBuild(obs, stagingConfig, git, gitea, pr)
|
||||
status := &models.CommitStatus{
|
||||
Context: BotName,
|
||||
Description: "OBS Staging build",
|
||||
@@ -977,11 +991,8 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
|
||||
SetStatus(gitea, org, repo, pr.Head.Sha, status)
|
||||
}
|
||||
if change != RequestModificationNoChange && !IsDryRun {
|
||||
gitea.AddComment(pr, msg)
|
||||
}
|
||||
|
||||
stagingResult, err := ObsClient.BuildStatus(stagingProject)
|
||||
stagingResult, err := obs.BuildStatus(stagingProject)
|
||||
if err != nil {
|
||||
common.LogError("failed fetching stage project status for", stagingProject, ":", err)
|
||||
}
|
||||
@@ -989,7 +1000,12 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
_, packagePRs := common.ExtractDescriptionAndPRs(bufio.NewScanner(strings.NewReader(pr.Body)))
|
||||
|
||||
// always update QA projects because Labels can change
|
||||
qaProjects := ProcessQaProjects(stagingConfig, git, gitea, pr, stagingProject)
|
||||
qaProjects, qaProjectMsg := ProcessQaProjects(obs, stagingConfig, git, gitea, pr, stagingProject)
|
||||
|
||||
if change != RequestModificationNoChange && !IsDryRun {
|
||||
msg += qaProjectMsg
|
||||
CommentPROnce(gitea, org, repo, id, msg)
|
||||
}
|
||||
|
||||
done := false
|
||||
overallBuildStatus := ProcessBuildStatus(stagingResult)
|
||||
@@ -997,7 +1013,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
if len(qaProjects) > 0 && overallBuildStatus == BuildStatusSummarySuccess {
|
||||
seperator := " in "
|
||||
for _, qaProject := range qaProjects {
|
||||
qaResult, err := ObsClient.BuildStatus(qaProject)
|
||||
qaResult, err := obs.BuildStatus(qaProject)
|
||||
if err != nil {
|
||||
common.LogError("failed fetching stage project status for", qaProject, ":", err)
|
||||
}
|
||||
@@ -1057,7 +1073,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
default:
|
||||
continue
|
||||
}
|
||||
commentOnPackagePR(gitea, packagePR.Org, packagePR.Repo, packagePR.Num, msg)
|
||||
CommentPROnce(gitea, packagePR.Org, packagePR.Repo, packagePR.Num, msg)
|
||||
}
|
||||
|
||||
if len(missingPkgs) > 0 {
|
||||
@@ -1067,10 +1083,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
msg = msg + " - " + pkg + "\n"
|
||||
}
|
||||
common.LogInfo(msg)
|
||||
err := gitea.AddComment(pr, msg)
|
||||
if err != nil {
|
||||
common.LogError(err)
|
||||
}
|
||||
CommentPROnce(gitea, org, repo, id, msg)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1089,8 +1102,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func PollWorkNotifications(giteaUrl string) {
|
||||
gitea := common.AllocateGiteaTransport(giteaUrl)
|
||||
func PollWorkNotifications(obs common.ObsClientInterface, gitea common.Gitea) {
|
||||
data, err := gitea.GetNotifications(common.GiteaNotificationType_Pull, nil)
|
||||
|
||||
if err != nil {
|
||||
@@ -1106,7 +1118,7 @@ func PollWorkNotifications(giteaUrl string) {
|
||||
if !ListPullNotificationsOnly {
|
||||
switch notification.Subject.Type {
|
||||
case "Pull":
|
||||
ProcessPullNotification(gitea, notification)
|
||||
ProcessPullNotification(obs, gitea, notification)
|
||||
default:
|
||||
if !IsDryRun {
|
||||
gitea.SetNotificationRead(notification.ID)
|
||||
@@ -1129,7 +1141,7 @@ func PollWorkNotifications(giteaUrl string) {
|
||||
continue
|
||||
}
|
||||
|
||||
cleanupFinished = CleanupPullNotification(gitea, n) && cleanupFinished
|
||||
cleanupFinished = CleanupPullNotification(obs, gitea, n) && cleanupFinished
|
||||
}
|
||||
} else if err != nil {
|
||||
common.LogError(err)
|
||||
@@ -1143,7 +1155,8 @@ var ObsApiHost string
|
||||
var ObsWebHost string
|
||||
var IsDryRun bool
|
||||
var ProcessPROnly string
|
||||
var ObsClient *common.ObsClient
|
||||
var ObsClient common.ObsClientInterface
|
||||
var BotUser string
|
||||
|
||||
func ObsWebHostFromApiHost(apihost string) string {
|
||||
u, err := url.Parse(apihost)
|
||||
@@ -1208,9 +1221,18 @@ func main() {
|
||||
}
|
||||
|
||||
if len(*buildRoot) > 0 {
|
||||
ObsClient.HomeProject = *buildRoot
|
||||
ObsClient.SetHomeProject(*buildRoot)
|
||||
}
|
||||
|
||||
gitea := common.AllocateGiteaTransport(GiteaUrl)
|
||||
|
||||
user, err := gitea.GetCurrentUser()
|
||||
if err != nil {
|
||||
common.LogError("Cannot fetch current user:", err)
|
||||
return
|
||||
}
|
||||
BotUser = user.UserName
|
||||
|
||||
if len(*ProcessPROnly) > 0 {
|
||||
rx := regexp.MustCompile("^([^/#]+)/([^/#]+)#([0-9]+)$")
|
||||
m := rx.FindStringSubmatch(*ProcessPROnly)
|
||||
@@ -1219,15 +1241,14 @@ func main() {
|
||||
return
|
||||
}
|
||||
|
||||
gitea := common.AllocateGiteaTransport(GiteaUrl)
|
||||
id, _ := strconv.ParseInt(m[3], 10, 64)
|
||||
|
||||
ProcessPullRequest(gitea, m[1], m[2], id)
|
||||
ProcessPullRequest(ObsClient, gitea, m[1], m[2], id)
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
PollWorkNotifications(GiteaUrl)
|
||||
PollWorkNotifications(ObsClient, gitea)
|
||||
common.LogInfo("Poll cycle finished")
|
||||
time.Sleep(5 * time.Minute)
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
4
systemd/workflow-direct.target
Normal file
4
systemd/workflow-direct.target
Normal file
@@ -0,0 +1,4 @@
|
||||
[Unit]
|
||||
Description=Autogits Workflow Direct instances
|
||||
Documentation=https://src.opensuse.org/git-workflow/autogits
|
||||
|
||||
@@ -5,15 +5,20 @@ After=network-online.target
|
||||
[Service]
|
||||
Type=exec
|
||||
ExecStart=/usr/bin/workflow-direct
|
||||
EnvironmentFile=-/etc/default/%i/workflow-direct.env
|
||||
DynamicUser=yes
|
||||
EnvironmentFile=/etc/default/%i/workflow-direct.env
|
||||
#DynamicUser=yes
|
||||
NoNewPrivileges=yes
|
||||
ProtectSystem=strict
|
||||
RuntimeDirectory=%i
|
||||
# DynamicUser does not work as we cannot seem to be able to put SSH keyfiles into the temp home that are readable by SSH
|
||||
# Also, systemd override is needed away to assign User to run this. This should be dependent per instance.
|
||||
ProtectHome=no
|
||||
PrivateTmp=yes
|
||||
# RuntimeDirectory=%i
|
||||
# SLES 15 doesn't have HOME set for dynamic users, so we improvise
|
||||
BindReadOnlyPaths=/etc/default/%i/known_hosts:/etc/ssh/ssh_known_hosts /etc/default/%i/config.json:%t/%i/config.json /etc/default/%i/id_ed25519 /etc/default/%i/id_ed25519.pub
|
||||
WorkingDirectory=%t/%i
|
||||
# BindReadOnlyPaths=/etc/default/%i/known_hosts:/etc/ssh/ssh_known_hosts /etc/default/%i/config.json:%t/%i/config.json /etc/default/%i/id_ed25519 /etc/default/%i/id_ed25519.pub
|
||||
# WorkingDirectory=%t/%i
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
WantedBy=workflow-direct.target
|
||||
|
||||
|
||||
5
systemd/workflow-pr.target
Normal file
5
systemd/workflow-pr.target
Normal file
@@ -0,0 +1,5 @@
|
||||
[Unit]
|
||||
Description=Autogits Workflow PR instances
|
||||
Documentation=https://src.opensuse.org/git-workflow/autogits
|
||||
|
||||
|
||||
24
systemd/workflow-pr@.service
Normal file
24
systemd/workflow-pr@.service
Normal file
@@ -0,0 +1,24 @@
|
||||
[Unit]
|
||||
Description=WorkflowPR git bot for %i
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=exec
|
||||
ExecStart=/usr/bin/workflow-pr
|
||||
EnvironmentFile=/etc/default/%i/workflow-pr.env
|
||||
#DynamicUser=yes
|
||||
NoNewPrivileges=yes
|
||||
ProtectSystem=strict
|
||||
# DynamicUser does not work as we cannot seem to be able to put SSH keyfiles into the temp home that are readable by SSH
|
||||
# Also, systemd override is needed away to assign User to run this. This should be dependent per instance.
|
||||
ProtectHome=no
|
||||
PrivateTmp=yes
|
||||
# RuntimeDirectory=%i
|
||||
# SLES 15 doesn't have HOME set for dynamic users, so we improvise
|
||||
# BindReadOnlyPaths=/etc/default/%i/known_hosts:/etc/ssh/ssh_known_hosts /etc/default/%i/config.json:%t/%i/config.json /etc/default/%i/id_ed25519 /etc/default/%i/id_ed25519.pub
|
||||
# WorkingDirectory=%t/%i
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
WantedBy=workflow-pr.target
|
||||
|
||||
@@ -37,8 +37,10 @@ Main Tasks
|
||||
| ManualMergeOnly | true | Both PackageGit PR and ProjectGit PR are merged upon an allowed package maintainer or project maintainer commenting “merge ok” in the PackageGit PR. |
|
||||
| ManualMergeOnly and ManualMergeProject | false | Both ProjectGit and PackageGit PRs are merged as soon as all reviews are completed in both PrjGit and PkgGit PRs. |
|
||||
|
||||
Config file
|
||||
-----------
|
||||
Project specific config file
|
||||
----------------------------
|
||||
|
||||
This is the ProjectGit config file. For runtime config file, see bottom.
|
||||
|
||||
* Filename: `workflow.config`
|
||||
* Location: ProjectGit
|
||||
@@ -156,8 +158,44 @@ NOTE: Project Maintainers have these permissions automatically.
|
||||
Server configuration
|
||||
--------------------------
|
||||
|
||||
**Configuration file:**
|
||||
The configuration file is a JSON file that consists of a list of project git locations
|
||||
that are then consulted for their `workflow.config` config files.
|
||||
|
||||
```
|
||||
[]ProjectGit = {
|
||||
"org" | "org/repo" | "org/repo#branch"
|
||||
}
|
||||
|
||||
default repo = _ObsPrj
|
||||
default branch = as specified in Gitea
|
||||
```
|
||||
|
||||
For example,
|
||||
|
||||
```
|
||||
[ "org", "openSUSE/Leap", "openSUSE/Leap#16.0" ]
|
||||
```
|
||||
|
||||
Are all valid entries. These are then resolved to,
|
||||
|
||||
* For `org`, it's assumed that default repository of `_ObsPrj` in `org` organization and using Gitea's default branch
|
||||
* For `openSUSE/Leap`, the repository "Leap" using Gitea's default branch in `openSUSE` organization.
|
||||
* For `openSUSE/Leap#16.0`, the repository "Leap" with branch "16.0" in `openSUSE` organization.
|
||||
|
||||
For each of these project gits, `workflow.config` is read.
|
||||
|
||||
|
||||
**Runtime Options**
|
||||
|
||||
| Option | Default | Environmental Default | Notes |
|
||||
|---------------|----------------------------|-----------------------|------------------------------------|
|
||||
| git-author | AutoGits PR Review Bot | AUTOGITS_GIT_AUTHOR | Name of author for bot created commits |
|
||||
| git-email | noone@suse.de | AUTOGITS_GIT_EMAIL | Email for the bot created commits |
|
||||
| config | | AUTOGITS_CONFIG | Path to above config file |
|
||||
| gitea-url | https://src.opensuse.org | AUTOGITS_GITEA_URL | Gitea's URL instance |
|
||||
| rabbit-url | amqps://rabbit.opensuse.org| AUTOGITS_RABBIT_URL | RabbitMQ's URL instance |
|
||||
| debug | false | AUTOGITS_DEBUG | Extra logging |
|
||||
| check-on-start| false | AUTOGITS_CHECK_ON_START| Whether to check all projects for consistency on start. Can take a while |
|
||||
| check-interval| 5 | | Consistency check interval |
|
||||
| repo-path | Uses temp directory | AUTOGITS_REPO_PATH | Path where to store repositories. |
|
||||
|
||||
| Field | Type | Notes |
|
||||
| ----- | ----- | ----- |
|
||||
| root | Array of string | Format **org/repo\#branch** |
|
||||
|
||||
@@ -45,22 +45,29 @@ var GitHandler common.GitHandlerGenerator
|
||||
var Gitea common.Gitea
|
||||
|
||||
func main() {
|
||||
flag.StringVar(&GitAuthor, "git-author", "AutoGits PR Review Bot", "Git commit author")
|
||||
flag.StringVar(&GitEmail, "git-email", "amajer+devel-git@suse.de", "Git commit email")
|
||||
flag.StringVar(&GitAuthor, "git-author", common.GetEnvOverrideString(os.Getenv("AUTOGITS_GIT_AUTHOR"), "AutoGits PR Review Bot"), "Git commit author")
|
||||
flag.StringVar(&GitEmail, "git-email", common.GetEnvOverrideString(os.Getenv("AUTOGITS_GIT_EMAIL"), "noone@suse.de"), "Git commit email")
|
||||
|
||||
workflowConfig := flag.String("config", "", "Repository and workflow definition file")
|
||||
giteaUrl := flag.String("gitea-url", "https://src.opensuse.org", "Gitea instance")
|
||||
rabbitUrl := flag.String("url", "amqps://rabbit.opensuse.org", "URL for RabbitMQ instance")
|
||||
debugMode := flag.Bool("debug", false, "Extra debugging information")
|
||||
checkOnStart := flag.Bool("check-on-start", false, "Check all repositories for consistency on start, without delays")
|
||||
workflowConfig := flag.String("config", common.GetEnvOverrideString(os.Getenv("AUTOGITS_CONFIG"), ""), "Repository and workflow definition file")
|
||||
giteaUrl := flag.String("gitea-url", common.GetEnvOverrideString(os.Getenv("AUTOGITS_GITEA_URL"), "https://src.opensuse.org"), "Gitea instance")
|
||||
|
||||
legacyRabbitUrl := flag.String("url", "", "Legacy. Use rabbit-url") /* TO BE REMOVED */
|
||||
rabbitUrl := flag.String("rabbit-url", common.GetEnvOverrideString(os.Getenv("AUTOGITS_RABBIT_URL"), "amqps://rabbit.opensuse.org"), "URL for RabbitMQ instance")
|
||||
|
||||
debugMode := flag.Bool("debug", common.GetEnvOverrideBool(os.Getenv("AUTOGITS_DEBUG"), false), "Extra debugging information")
|
||||
checkOnStart := flag.Bool("check-on-start", common.GetEnvOverrideBool(os.Getenv("AUTOGITS_CHECK_ON_START"), false), "Check all repositories for consistency on start, without delays")
|
||||
checkIntervalHours := flag.Float64("check-interval", 5, "Check interval (+-random delay) for repositories for consitency, in hours")
|
||||
flag.BoolVar(&ListPROnly, "list-prs-only", false, "Only lists PRs without acting on them")
|
||||
flag.Int64Var(&PRID, "id", -1, "Process only the specific ID and ignore the rest. Use for debugging")
|
||||
basePath := flag.String("repo-path", "", "Repository path. Default is temporary directory")
|
||||
basePath := flag.String("repo-path", common.GetEnvOverrideString(os.Getenv("AUTOGITS_REPO_PATH"), ""), "Repository path. Default is temporary directory")
|
||||
pr := flag.String("only-pr", "", "Only specific PR to process. For debugging")
|
||||
flag.BoolVar(&common.IsDryRun, "dry", false, "Dry mode. Do not push changes to remote repo.")
|
||||
flag.Parse()
|
||||
|
||||
if len(*legacyRabbitUrl) > 0 {
|
||||
*rabbitUrl = *legacyRabbitUrl
|
||||
}
|
||||
|
||||
common.SetLoggingLevel(common.LogLevelInfo)
|
||||
if *debugMode {
|
||||
common.SetLoggingLevel(common.LogLevelDebug)
|
||||
|
||||
Reference in New Issue
Block a user