Compare commits
3 Commits
| Author | SHA256 | Date | |
|---|---|---|---|
| 66a2c0565e | |||
| d5d6910906 | |||
| 444959540a |
@@ -23,6 +23,7 @@ jobs:
|
||||
- run: git checkout FETCH_HEAD
|
||||
- run: go generate -C common
|
||||
- run: go generate -C workflow-pr
|
||||
- run: go generate -C workflow-pr/interfaces
|
||||
- run: git add -N .; git diff
|
||||
- run: |
|
||||
status=$(git status --short)
|
||||
|
||||
@@ -12,6 +12,7 @@ jobs:
|
||||
- run: git checkout FETCH_HEAD
|
||||
- run: go generate -C common
|
||||
- run: go generate -C workflow-pr
|
||||
- run: go generate -C workflow-pr/interfaces
|
||||
- run: |
|
||||
host=${{ gitea.server_url }}
|
||||
host=${host#https://}
|
||||
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -1,7 +1,4 @@
|
||||
*.osc
|
||||
*.conf
|
||||
/integration/gitea-data
|
||||
/integration/gitea-logs
|
||||
/integration/rabbitmq-data
|
||||
/integration/workflow-pr-repos
|
||||
__pycache__/
|
||||
utils/gitmodules-automerge/gitmodules-automerge
|
||||
utils/hujson/hujson
|
||||
|
||||
4
Makefile
4
Makefile
@@ -1,4 +0,0 @@
|
||||
MODULES := devel-importer utils/hujson utils/maintainer-update gitea-events-rabbitmq-publisher gitea_status_proxy group-review obs-forward-bot obs-staging-bot obs-status-service workflow-direct workflow-pr
|
||||
|
||||
build:
|
||||
for m in $(MODULES); do go build -C $$m -buildmode=pie || exit 1 ; done
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
|
||||
Name: autogits
|
||||
Version: 1
|
||||
Version: 0
|
||||
Release: 0
|
||||
Summary: GitWorkflow utilities
|
||||
License: GPL-2.0-or-later
|
||||
@@ -41,7 +41,6 @@ Command-line tool to import devel projects from obs to git
|
||||
|
||||
%package doc
|
||||
Summary: Common documentation files
|
||||
BuildArch: noarch
|
||||
|
||||
%description -n autogits-doc
|
||||
Common documentation files
|
||||
@@ -57,11 +56,10 @@ with a topic
|
||||
|
||||
|
||||
%package gitea-status-proxy
|
||||
Summary: Proxy for setting commit status in Gitea
|
||||
Summary: gitea-status-proxy
|
||||
|
||||
%description gitea-status-proxy
|
||||
Setting commit status requires code write access token. This proxy
|
||||
is middleware that delegates status setting without access to other APIs
|
||||
|
||||
|
||||
%package group-review
|
||||
Summary: Reviews of groups defined in ProjectGit
|
||||
@@ -98,6 +96,7 @@ Provides: /usr/bin/hujson
|
||||
|
||||
%description utils
|
||||
HuJSON to JSON parser, using stdin -> stdout pipe
|
||||
gitmodules-automerge fixes conflicts in .gitmodules conflicts during merge
|
||||
|
||||
|
||||
%package workflow-direct
|
||||
@@ -130,7 +129,7 @@ go build \
|
||||
-C utils/hujson \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C utils/maintainer-update \
|
||||
-C utils/gitmodules-automerge \
|
||||
-buildmode=pie
|
||||
go build \
|
||||
-C gitea-events-rabbitmq-publisher \
|
||||
@@ -163,7 +162,6 @@ go test -C group-review -v
|
||||
go test -C obs-staging-bot -v
|
||||
go test -C obs-status-service -v
|
||||
go test -C workflow-direct -v
|
||||
go test -C utils/maintainer-update
|
||||
# TODO build fails
|
||||
#go test -C workflow-pr -v
|
||||
|
||||
@@ -173,18 +171,15 @@ install -D -m0755 gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publishe
|
||||
install -D -m0644 systemd/gitea-events-rabbitmq-publisher.service %{buildroot}%{_unitdir}/gitea-events-rabbitmq-publisher.service
|
||||
install -D -m0755 gitea_status_proxy/gitea_status_proxy %{buildroot}%{_bindir}/gitea_status_proxy
|
||||
install -D -m0755 group-review/group-review %{buildroot}%{_bindir}/group-review
|
||||
install -D -m0644 systemd/group-review@.service %{buildroot}%{_unitdir}/group-review@.service
|
||||
install -D -m0755 obs-forward-bot/obs-forward-bot %{buildroot}%{_bindir}/obs-forward-bot
|
||||
install -D -m0755 obs-staging-bot/obs-staging-bot %{buildroot}%{_bindir}/obs-staging-bot
|
||||
install -D -m0644 systemd/obs-staging-bot.service %{buildroot}%{_unitdir}/obs-staging-bot.service
|
||||
install -D -m0755 obs-status-service/obs-status-service %{buildroot}%{_bindir}/obs-status-service
|
||||
install -D -m0644 systemd/obs-status-service.service %{buildroot}%{_unitdir}/obs-status-service.service
|
||||
install -D -m0755 workflow-direct/workflow-direct %{buildroot}%{_bindir}/workflow-direct
|
||||
install -D -m0644 systemd/workflow-direct@.service %{buildroot}%{_unitdir}/workflow-direct@.service
|
||||
install -D -m0755 workflow-pr/workflow-pr %{buildroot}%{_bindir}/workflow-pr
|
||||
install -D -m0644 systemd/workflow-pr@.service %{buildroot}%{_unitdir}/workflow-pr@.service
|
||||
install -D -m0755 utils/hujson/hujson %{buildroot}%{_bindir}/hujson
|
||||
install -D -m0755 utils/maintainer-update/maintainer-update %{buildroot}%{_bindir}/maintainer-update
|
||||
install -D -m0755 utils/gitmodules-automerge/gitmodules-automerge %{buildroot}%{_bindir}/gitmodules-automerge
|
||||
|
||||
%pre gitea-events-rabbitmq-publisher
|
||||
%service_add_pre gitea-events-rabbitmq-publisher.service
|
||||
@@ -198,18 +193,6 @@ install -D -m0755 utils/maintainer-update/maintainer-update
|
||||
%postun gitea-events-rabbitmq-publisher
|
||||
%service_del_postun gitea-events-rabbitmq-publisher.service
|
||||
|
||||
%pre group-review
|
||||
%service_add_pre group-review@.service
|
||||
|
||||
%post group-review
|
||||
%service_add_post group-review@.service
|
||||
|
||||
%preun group-review
|
||||
%service_del_preun group-review@.service
|
||||
|
||||
%postun group-review
|
||||
%service_del_postun group-review@.service
|
||||
|
||||
%pre obs-staging-bot
|
||||
%service_add_pre obs-staging-bot.service
|
||||
|
||||
@@ -234,30 +217,6 @@ install -D -m0755 utils/maintainer-update/maintainer-update
|
||||
%postun obs-status-service
|
||||
%service_del_postun obs-status-service.service
|
||||
|
||||
%pre workflow-direct
|
||||
%service_add_pre workflow-direct.service
|
||||
|
||||
%post workflow-direct
|
||||
%service_add_post workflow-direct.service
|
||||
|
||||
%preun workflow-direct
|
||||
%service_del_preun workflow-direct.service
|
||||
|
||||
%postun workflow-direct
|
||||
%service_del_postun workflow-direct.service
|
||||
|
||||
%pre workflow-pr
|
||||
%service_add_pre workflow-pr.service
|
||||
|
||||
%post workflow-pr
|
||||
%service_add_post workflow-pr.service
|
||||
|
||||
%preun workflow-pr
|
||||
%service_del_preun workflow-pr.service
|
||||
|
||||
%postun workflow-pr
|
||||
%service_del_postun workflow-pr.service
|
||||
|
||||
%files devel-importer
|
||||
%license COPYING
|
||||
%doc devel-importer/README.md
|
||||
@@ -282,7 +241,6 @@ install -D -m0755 utils/maintainer-update/maintainer-update
|
||||
%license COPYING
|
||||
%doc group-review/README.md
|
||||
%{_bindir}/group-review
|
||||
%{_unitdir}/group-review@.service
|
||||
|
||||
%files obs-forward-bot
|
||||
%license COPYING
|
||||
@@ -303,17 +261,15 @@ install -D -m0755 utils/maintainer-update/maintainer-update
|
||||
%files utils
|
||||
%license COPYING
|
||||
%{_bindir}/hujson
|
||||
%{_bindir}/maintainer-update
|
||||
%{_bindir}/gitmodules-automerge
|
||||
|
||||
%files workflow-direct
|
||||
%license COPYING
|
||||
%doc workflow-direct/README.md
|
||||
%{_bindir}/workflow-direct
|
||||
%{_unitdir}/workflow-direct@.service
|
||||
|
||||
%files workflow-pr
|
||||
%license COPYING
|
||||
%doc workflow-pr/README.md
|
||||
%{_bindir}/workflow-pr
|
||||
%{_unitdir}/workflow-pr@.service
|
||||
|
||||
|
||||
@@ -54,8 +54,6 @@ type ReviewGroup struct {
|
||||
type QAConfig struct {
|
||||
Name string
|
||||
Origin string
|
||||
Label string // requires this gitea lable to be set or skipped
|
||||
BuildDisableRepos []string // which repos to build disable in the new project
|
||||
}
|
||||
|
||||
type Permissions struct {
|
||||
@@ -63,20 +61,6 @@ type Permissions struct {
|
||||
Members []string
|
||||
}
|
||||
|
||||
const (
|
||||
Label_StagingAuto = "staging/Auto"
|
||||
Label_ReviewPending = "review/Pending"
|
||||
Label_ReviewDone = "review/Done"
|
||||
)
|
||||
|
||||
func LabelKey(tag_value string) string {
|
||||
// capitalize first letter and remove /
|
||||
if len(tag_value) == 0 {
|
||||
return ""
|
||||
}
|
||||
return strings.ToUpper(tag_value[0:1]) + strings.ReplaceAll(tag_value[1:], "/", "")
|
||||
}
|
||||
|
||||
type AutogitConfig struct {
|
||||
Workflows []string // [pr, direct, test]
|
||||
Organization string
|
||||
@@ -88,12 +72,9 @@ type AutogitConfig struct {
|
||||
Committers []string // group in addition to Reviewers and Maintainers that can order the bot around, mostly as helper for factory-maintainers
|
||||
Subdirs []string // list of directories to sort submodules into. Needed b/c _manifest cannot list non-existent directories
|
||||
|
||||
Labels map[string]string // list of tags, if not default, to apply
|
||||
|
||||
NoProjectGitPR bool // do not automatically create project git PRs, just assign reviewers and assume somethign else creates the ProjectGit PR
|
||||
ManualMergeOnly bool // only merge with "Merge OK" comment by Project Maintainers and/or Package Maintainers and/or reviewers
|
||||
ManualMergeProject bool // require merge of ProjectGit PRs with "Merge OK" by ProjectMaintainers and/or reviewers
|
||||
ReviewRequired bool // always require a maintainer review, even if maintainer submits it. Only ignored if no other package or project reviewers
|
||||
}
|
||||
|
||||
type AutogitConfigs []*AutogitConfig
|
||||
@@ -207,8 +188,6 @@ func (configs AutogitConfigs) GetPrjGitConfig(org, repo, branch string) *Autogit
|
||||
if c.GitProjectName == prjgit {
|
||||
return c
|
||||
}
|
||||
}
|
||||
for _, c := range configs {
|
||||
if c.Organization == org && c.Branch == branch {
|
||||
return c
|
||||
}
|
||||
@@ -294,14 +273,6 @@ func (config *AutogitConfig) GetRemoteBranch() string {
|
||||
return "origin_" + config.Branch
|
||||
}
|
||||
|
||||
func (config *AutogitConfig) Label(label string) string {
|
||||
if t, found := config.Labels[LabelKey(label)]; found {
|
||||
return t
|
||||
}
|
||||
|
||||
return label
|
||||
}
|
||||
|
||||
type StagingConfig struct {
|
||||
ObsProject string
|
||||
RebuildAll bool
|
||||
|
||||
@@ -10,67 +10,6 @@ import (
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestLabelKey(t *testing.T) {
|
||||
tests := map[string]string{
|
||||
"": "",
|
||||
"foo": "Foo",
|
||||
"foo/bar": "Foobar",
|
||||
"foo/Bar": "FooBar",
|
||||
}
|
||||
|
||||
for k, v := range tests {
|
||||
if c := common.LabelKey(k); c != v {
|
||||
t.Error("expected", v, "got", c, "input", k)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigLabelParser(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
json string
|
||||
label_value string
|
||||
}{
|
||||
{
|
||||
name: "empty",
|
||||
json: "{}",
|
||||
label_value: "path/String",
|
||||
},
|
||||
{
|
||||
name: "defined",
|
||||
json: `{"Labels": {"foo": "bar", "PathString": "moo/Label"}}`,
|
||||
label_value: "moo/Label",
|
||||
},
|
||||
{
|
||||
name: "undefined",
|
||||
json: `{"Labels": {"foo": "bar", "NotPathString": "moo/Label"}}`,
|
||||
label_value: "path/String",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
repo := models.Repository{
|
||||
DefaultBranch: "master",
|
||||
}
|
||||
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGiteaFileContentAndRepoFetcher(ctl)
|
||||
gitea.EXPECT().GetRepositoryFileContent("foo", "bar", "", "workflow.config").Return([]byte(test.json), "abc", nil)
|
||||
gitea.EXPECT().GetRepository("foo", "bar").Return(&repo, nil)
|
||||
|
||||
config, err := common.ReadWorkflowConfig(gitea, "foo/bar")
|
||||
if err != nil || config == nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if l := config.Label("path/String"); l != test.label_value {
|
||||
t.Error("Expecting", test.label_value, "got", l)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestProjectConfigMatcher(t *testing.T) {
|
||||
configs := common.AutogitConfigs{
|
||||
{
|
||||
@@ -82,15 +21,6 @@ func TestProjectConfigMatcher(t *testing.T) {
|
||||
Branch: "main",
|
||||
GitProjectName: "test/prjgit#main",
|
||||
},
|
||||
{
|
||||
Organization: "test",
|
||||
Branch: "main",
|
||||
GitProjectName: "test/bar#never_match",
|
||||
},
|
||||
{
|
||||
Organization: "test",
|
||||
GitProjectName: "test/bar#main",
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
@@ -120,20 +50,6 @@ func TestProjectConfigMatcher(t *testing.T) {
|
||||
branch: "main",
|
||||
config: 1,
|
||||
},
|
||||
{
|
||||
name: "prjgit only match",
|
||||
org: "test",
|
||||
repo: "bar",
|
||||
branch: "main",
|
||||
config: 3,
|
||||
},
|
||||
{
|
||||
name: "non-default branch match",
|
||||
org: "test",
|
||||
repo: "bar",
|
||||
branch: "something_main",
|
||||
config: -1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
@@ -189,10 +105,6 @@ func TestConfigWorkflowParser(t *testing.T) {
|
||||
if config.ManualMergeOnly != false {
|
||||
t.Fatal("This should be false")
|
||||
}
|
||||
|
||||
if config.Label("foobar") != "foobar" {
|
||||
t.Fatal("undefined label should return default value")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,13 +20,10 @@ package common
|
||||
|
||||
const (
|
||||
GiteaTokenEnv = "GITEA_TOKEN"
|
||||
GiteaHostEnv = "GITEA_HOST"
|
||||
ObsUserEnv = "OBS_USER"
|
||||
ObsPasswordEnv = "OBS_PASSWORD"
|
||||
ObsSshkeyEnv = "OBS_SSHKEY"
|
||||
ObsSshkeyFileEnv = "OBS_SSHKEYFILE"
|
||||
ObsApiEnv = "OBS_API"
|
||||
ObsWebEnv = "OBS_WEB"
|
||||
|
||||
DefaultGitPrj = "_ObsPrj"
|
||||
PrjLinksFile = "links.json"
|
||||
|
||||
296
common/git_parser.go
Normal file
296
common/git_parser.go
Normal file
@@ -0,0 +1,296 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
const (
|
||||
GitStatus_Untracked = 0
|
||||
GitStatus_Modified = 1
|
||||
GitStatus_Ignored = 2
|
||||
GitStatus_Unmerged = 3 // States[0..3] -- Stage1, Stage2, Stage3 of merge objects
|
||||
GitStatus_Renamed = 4 // orig name in States[0]
|
||||
)
|
||||
|
||||
type GitStatusData struct {
|
||||
Path string
|
||||
Status int
|
||||
States [3]string
|
||||
|
||||
/*
|
||||
<sub> A 4 character field describing the submodule state.
|
||||
"N..." when the entry is not a submodule.
|
||||
"S<c><m><u>" when the entry is a submodule.
|
||||
<c> is "C" if the commit changed; otherwise ".".
|
||||
<m> is "M" if it has tracked changes; otherwise ".".
|
||||
<u> is "U" if there are untracked changes; otherwise ".".
|
||||
*/
|
||||
SubmoduleChanges string
|
||||
}
|
||||
|
||||
func parseGit_HexString(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 32)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
switch {
|
||||
case c == 0 || c == ' ':
|
||||
return string(str), nil
|
||||
case c >= 'a' && c <= 'f':
|
||||
case c >= 'A' && c <= 'F':
|
||||
case c >= '0' && c <= '9':
|
||||
default:
|
||||
return "", errors.New("Invalid character in hex string:" + string(c))
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
func parseGit_String(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 100)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", errors.New("Unexpected EOF. Expected NUL string term")
|
||||
}
|
||||
if c == 0 || c == ' ' {
|
||||
return string(str), nil
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
|
||||
func parseGit_StringWithSpace(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 100)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", errors.New("Unexpected EOF. Expected NUL string term")
|
||||
}
|
||||
if c == 0 {
|
||||
return string(str), nil
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
|
||||
func skipGitStatusEntry(data io.ByteReader, skipSpaceLen int) error {
|
||||
for skipSpaceLen > 0 {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c == ' ' {
|
||||
skipSpaceLen--
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
|
||||
ret := GitStatusData{}
|
||||
statusType, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
switch statusType {
|
||||
case '1':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 8); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Modified
|
||||
ret.Path, err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '2':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 9); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Renamed
|
||||
ret.Path, err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.States[0], err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '?':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Untracked
|
||||
ret.Path, err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '!':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Ignored
|
||||
ret.Path, err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case 'u':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 2); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.SubmoduleChanges, err = parseGit_String(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = skipGitStatusEntry(data, 4); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if ret.States[0], err = parseGit_HexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[1], err = parseGit_HexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[2], err = parseGit_HexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Unmerged
|
||||
ret.Path, err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
default:
|
||||
return nil, errors.New("Invalid status type" + string(statusType))
|
||||
}
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func parseGitStatusData(data io.ByteReader) (Data, error) {
|
||||
ret := make([]GitStatusData, 0, 10)
|
||||
for {
|
||||
data, err := parseSingleStatusEntry(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if data == nil {
|
||||
break
|
||||
}
|
||||
|
||||
ret = append(ret, *data)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
type Data interface{}
|
||||
|
||||
type CommitStatus int
|
||||
|
||||
const (
|
||||
Add CommitStatus = iota
|
||||
Rm
|
||||
Copy
|
||||
Modify
|
||||
Rename
|
||||
TypeChange
|
||||
Unmerged
|
||||
Unknown
|
||||
)
|
||||
|
||||
type GitDiffRawData struct {
|
||||
SrcMode, DstMode string
|
||||
SrcCommit, DstCommit string
|
||||
Status CommitStatus
|
||||
Src, Dst string
|
||||
}
|
||||
|
||||
func parseGit_DiffIndexStatus(data io.ByteReader, d *GitDiffRawData) error {
|
||||
b, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch b {
|
||||
case 'A':
|
||||
d.Status = Add
|
||||
case 'C':
|
||||
d.Status = Copy
|
||||
case 'D':
|
||||
d.Status = Rm
|
||||
case 'M':
|
||||
d.Status = Modify
|
||||
case 'R':
|
||||
d.Status = Rename
|
||||
case 'T':
|
||||
d.Status = TypeChange
|
||||
case 'U':
|
||||
d.Status = Unmerged
|
||||
case 'X':
|
||||
return fmt.Errorf("Unexpected unknown change type. This is a git bug")
|
||||
}
|
||||
_, err = parseGit_StringWithSpace(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseSingleGitDiffIndexRawData(data io.ByteReader) (*GitDiffRawData, error) {
|
||||
var ret GitDiffRawData
|
||||
|
||||
b, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if b != ':' {
|
||||
return nil, fmt.Errorf("Expected ':' but got '%s'", string(b))
|
||||
}
|
||||
|
||||
if ret.SrcMode, err = parseGit_String(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.DstMode, err = parseGit_String(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.Src, err = parseGit_String(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.Dst, err = parseGit_String(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = parseGit_DiffIndexStatus(data, &ret); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Dst = ret.Src
|
||||
switch ret.Status {
|
||||
case Copy, Rename:
|
||||
if ret.Src, err = parseGit_StringWithSpace(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func parseGitDiffIndexRawData(data io.ByteReader) (Data, error) {
|
||||
ret := make([]GitDiffRawData, 0, 10)
|
||||
for {
|
||||
data, err := parseSingleGitDiffIndexRawData(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if data == nil {
|
||||
break
|
||||
}
|
||||
|
||||
ret = append(ret, *data)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
@@ -19,9 +19,7 @@ package common
|
||||
*/
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
@@ -44,6 +42,11 @@ type GitDirectoryLister interface {
|
||||
GitDirectoryList(gitPath, commitId string) (dirlist map[string]string, err error)
|
||||
}
|
||||
|
||||
type GitSubmoduleFileConflictResolver interface {
|
||||
GitResolveConflicts(cwd, MergeBase, Head, MergeHead string) error
|
||||
GitResolveSubmoduleFileConflict(s GitStatusData, cwd, mergeBase, head, mergeHead string) error
|
||||
}
|
||||
|
||||
type GitStatusLister interface {
|
||||
GitStatus(cwd string) ([]GitStatusData, error)
|
||||
}
|
||||
@@ -75,6 +78,7 @@ type Git interface {
|
||||
GitExecQuietOrPanic(cwd string, params ...string)
|
||||
|
||||
GitDiffLister
|
||||
GitSubmoduleFileConflictResolver
|
||||
}
|
||||
|
||||
type GitHandlerImpl struct {
|
||||
@@ -350,20 +354,23 @@ var ExtraGitParams []string
|
||||
|
||||
func (e *GitHandlerImpl) GitExecWithOutput(cwd string, params ...string) (string, error) {
|
||||
cmd := exec.Command("/usr/bin/git", params...)
|
||||
var identityFile string
|
||||
if i := os.Getenv("AUTOGITS_IDENTITY_FILE"); len(i) > 0 {
|
||||
identityFile = " -i " + i
|
||||
}
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_CONFIG_GLOBAL=/dev/null",
|
||||
"GIT_AUTHOR_NAME=" + e.GitCommiter,
|
||||
"GIT_COMMITTER_NAME=" + e.GitCommiter,
|
||||
"EMAIL=not@exist@src.opensuse.org",
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
"GIT_LFS_SKIP_PUSH=1",
|
||||
"GIT_SSH_COMMAND=/usr/bin/ssh -o StrictHostKeyChecking=yes" + identityFile,
|
||||
"GIT_SSH_COMMAND=/usr/bin/ssh -o StrictHostKeyChecking=yes",
|
||||
}
|
||||
if len(e.GitEmail) > 0 {
|
||||
cmd.Env = append(cmd.Env, "EMAIL="+e.GitEmail)
|
||||
}
|
||||
if len(e.GitCommiter) > 0 {
|
||||
cmd.Env = append(cmd.Env,
|
||||
"GIT_AUTHOR_NAME="+e.GitCommiter,
|
||||
"GIT_COMMITTER_NAME="+e.GitCommiter,
|
||||
)
|
||||
}
|
||||
|
||||
if len(ExtraGitParams) > 0 {
|
||||
cmd.Env = append(cmd.Env, ExtraGitParams...)
|
||||
}
|
||||
@@ -1006,193 +1013,10 @@ func (e *GitHandlerImpl) GitSubmoduleCommitId(cwd, packageName, commitId string)
|
||||
return subCommitId, len(subCommitId) > 0
|
||||
}
|
||||
|
||||
const (
|
||||
GitStatus_Untracked = 0
|
||||
GitStatus_Modified = 1
|
||||
GitStatus_Ignored = 2
|
||||
GitStatus_Unmerged = 3 // States[0..3] -- Stage1, Stage2, Stage3 of merge objects
|
||||
GitStatus_Renamed = 4 // orig name in States[0]
|
||||
)
|
||||
|
||||
type GitStatusData struct {
|
||||
Path string
|
||||
Status int
|
||||
States [3]string
|
||||
|
||||
/*
|
||||
<sub> A 4 character field describing the submodule state.
|
||||
"N..." when the entry is not a submodule.
|
||||
"S<c><m><u>" when the entry is a submodule.
|
||||
<c> is "C" if the commit changed; otherwise ".".
|
||||
<m> is "M" if it has tracked changes; otherwise ".".
|
||||
<u> is "U" if there are untracked changes; otherwise ".".
|
||||
*/
|
||||
SubmoduleChanges string
|
||||
}
|
||||
|
||||
func parseGitStatusHexString(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 32)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
switch {
|
||||
case c == 0 || c == ' ':
|
||||
return string(str), nil
|
||||
case c >= 'a' && c <= 'f':
|
||||
case c >= 'A' && c <= 'F':
|
||||
case c >= '0' && c <= '9':
|
||||
default:
|
||||
return "", errors.New("Invalid character in hex string:" + string(c))
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
func parseGitStatusString(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 100)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", errors.New("Unexpected EOF. Expected NUL string term")
|
||||
}
|
||||
if c == 0 || c == ' ' {
|
||||
return string(str), nil
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
|
||||
func parseGitStatusStringWithSpace(data io.ByteReader) (string, error) {
|
||||
str := make([]byte, 0, 100)
|
||||
for {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return "", errors.New("Unexpected EOF. Expected NUL string term")
|
||||
}
|
||||
if c == 0 {
|
||||
return string(str), nil
|
||||
}
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
|
||||
func skipGitStatusEntry(data io.ByteReader, skipSpaceLen int) error {
|
||||
for skipSpaceLen > 0 {
|
||||
c, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c == ' ' {
|
||||
skipSpaceLen--
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseSingleStatusEntry(data io.ByteReader) (*GitStatusData, error) {
|
||||
ret := GitStatusData{}
|
||||
statusType, err := data.ReadByte()
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
switch statusType {
|
||||
case '1':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 8); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Modified
|
||||
ret.Path, err = parseGitStatusStringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '2':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 9); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Renamed
|
||||
ret.Path, err = parseGitStatusStringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.States[0], err = parseGitStatusStringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '?':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Untracked
|
||||
ret.Path, err = parseGitStatusStringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case '!':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Ignored
|
||||
ret.Path, err = parseGitStatusStringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case 'u':
|
||||
var err error
|
||||
if err = skipGitStatusEntry(data, 2); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.SubmoduleChanges, err = parseGitStatusString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = skipGitStatusEntry(data, 4); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if ret.States[0], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[1], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret.States[2], err = parseGitStatusHexString(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Status = GitStatus_Unmerged
|
||||
ret.Path, err = parseGitStatusStringWithSpace(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
default:
|
||||
return nil, errors.New("Invalid status type" + string(statusType))
|
||||
}
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func parseGitStatusData(data io.ByteReader) ([]GitStatusData, error) {
|
||||
ret := make([]GitStatusData, 0, 10)
|
||||
for {
|
||||
data, err := parseSingleStatusEntry(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if data == nil {
|
||||
break
|
||||
}
|
||||
|
||||
ret = append(ret, *data)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitStatus(cwd string) (ret []GitStatusData, err error) {
|
||||
LogDebug("getting git-status()")
|
||||
|
||||
cmd := exec.Command("/usr/bin/git", "status", "--porcelain=2", "-z")
|
||||
func (e *GitHandlerImpl) GitExecWithDataParse(cwd string, dataprocessor func(io.ByteReader) (Data, error), gitcmd string, args ...string) (Data, error) {
|
||||
LogDebug("getting", gitcmd)
|
||||
args = append([]string{gitcmd}, args...)
|
||||
cmd := exec.Command("/usr/bin/git", args...)
|
||||
cmd.Env = []string{
|
||||
"GIT_CEILING_DIRECTORIES=" + e.GitPath,
|
||||
"GIT_LFS_SKIP_SMUDGE=1",
|
||||
@@ -1209,7 +1033,12 @@ func (e *GitHandlerImpl) GitStatus(cwd string) (ret []GitStatusData, err error)
|
||||
LogError("Error running command", cmd.Args, err)
|
||||
}
|
||||
|
||||
return parseGitStatusData(bufio.NewReader(bytes.NewReader(out)))
|
||||
return dataprocessor(bytes.NewReader(out))
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitStatus(cwd string) (ret []GitStatusData, err error) {
|
||||
data, err := e.GitExecWithDataParse(cwd, parseGitStatusData, "status", "--porcelain=2", "-z")
|
||||
return data.([]GitStatusData), err
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitDiff(cwd, base, head string) (string, error) {
|
||||
@@ -1234,3 +1063,122 @@ func (e *GitHandlerImpl) GitDiff(cwd, base, head string) (string, error) {
|
||||
|
||||
return string(out), nil
|
||||
}
|
||||
|
||||
func (e *GitHandlerImpl) GitDiffIndex(cwd, commit string) ([]GitDiffRawData, error) {
|
||||
data, err := e.GitExecWithDataParse("diff-index", parseGitDiffIndexRawData, cwd, "diff-index", "-z", "--raw", "--full-index", "--submodule=short", "HEAD")
|
||||
return data.([]GitDiffRawData), err
|
||||
}
|
||||
|
||||
func (git *GitHandlerImpl) GitResolveConflicts(cwd, mergeBase, head, mergeHead string) error {
|
||||
status, err := git.GitStatus(cwd)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Status failed: %w", err)
|
||||
}
|
||||
|
||||
// we can only resolve conflicts with .gitmodules
|
||||
for _, s := range status {
|
||||
if s.Status == GitStatus_Unmerged && s.Path == ".gitmodules" {
|
||||
if err := git.GitResolveSubmoduleFileConflict(s, cwd, mergeBase, head, mergeHead); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if s.Status == GitStatus_Unmerged {
|
||||
return fmt.Errorf("Cannot automatically resolve conflict: %s", s.Path)
|
||||
}
|
||||
}
|
||||
|
||||
return git.GitExec(cwd, "-c", "core.editor=true", "merge", "--continue")
|
||||
}
|
||||
|
||||
func (git *GitHandlerImpl) GitResolveSubmoduleFileConflict(s GitStatusData, cwd, mergeBase, head, mergeHead string) error {
|
||||
submodules1, err := git.GitSubmoduleList(cwd, mergeBase)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
|
||||
}
|
||||
/*
|
||||
submodules2, err := git.GitSubmoduleList(cwd, head)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
|
||||
}
|
||||
*/
|
||||
submodules3, err := git.GitSubmoduleList(cwd, mergeHead)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
|
||||
}
|
||||
|
||||
// find modified submodules in the mergeHead
|
||||
modifiedSubmodules := make([]string, 0, 10)
|
||||
removedSubmodules := make([]string, 0, 10)
|
||||
addedSubmodules := make([]string, 0, 10)
|
||||
|
||||
for submodulePath, oldHash := range submodules1 {
|
||||
if newHash, found := submodules3[submodulePath]; found && newHash != oldHash {
|
||||
modifiedSubmodules = append(modifiedSubmodules, submodulePath)
|
||||
} else if !found {
|
||||
removedSubmodules = append(removedSubmodules, submodulePath)
|
||||
}
|
||||
}
|
||||
for submodulePath, _ := range submodules3 {
|
||||
if _, found := submodules1[submodulePath]; !found {
|
||||
addedSubmodules = append(addedSubmodules, submodulePath)
|
||||
}
|
||||
}
|
||||
|
||||
// We need to adjust the `submodules` list by the pending changes to the index
|
||||
s1, err := git.GitExecWithOutput(cwd, "cat-file", "blob", s.States[0])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
s2, err := git.GitExecWithOutput(cwd, "cat-file", "blob", s.States[1])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
s3, err := git.GitExecWithOutput(cwd, "cat-file", "blob", s.States[2])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
|
||||
_, err = ParseSubmodulesFile(strings.NewReader(s1))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
subs2, err := ParseSubmodulesFile(strings.NewReader(s2))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
subs3, err := ParseSubmodulesFile(strings.NewReader(s3))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
|
||||
overrideSet := make([]Submodule, 0, len(addedSubmodules)+len(modifiedSubmodules))
|
||||
for i := range subs3 {
|
||||
if slices.Contains(addedSubmodules, subs3[i].Path) || slices.Contains(modifiedSubmodules, subs3[i].Path) {
|
||||
overrideSet = append(overrideSet, subs3[i])
|
||||
}
|
||||
}
|
||||
// merge from subs1 (merge-base), subs2 (changes in base since merge-base HEAD), subs3 (merge_source MERGE_HEAD)
|
||||
// this will update submodules
|
||||
SubmoduleCompare := func(a, b Submodule) int { return strings.Compare(a.Path, b.Path) }
|
||||
CompactCompare := func(a, b Submodule) bool { return a.Path == b.Path }
|
||||
|
||||
// remove submodules that are removed in the PR
|
||||
subs2 = slices.DeleteFunc(subs2, func(a Submodule) bool { return slices.Contains(removedSubmodules, a.Path) })
|
||||
|
||||
mergedSubs := slices.Concat(overrideSet, subs2)
|
||||
slices.SortStableFunc(mergedSubs, SubmoduleCompare)
|
||||
filteredSubs := slices.CompactFunc(mergedSubs, CompactCompare)
|
||||
|
||||
out, err := os.Create(path.Join(git.GetPath(), cwd, ".gitmodules"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Can't open .gitmodules for writing: %w", err)
|
||||
}
|
||||
if err = WriteSubmodules(filteredSubs, out); err != nil {
|
||||
return fmt.Errorf("Can't write .gitmodules: %w", err)
|
||||
}
|
||||
if out.Close(); err != nil {
|
||||
return fmt.Errorf("Can't close .gitmodules: %w", err)
|
||||
}
|
||||
|
||||
git.GitExecOrPanic(cwd, "add", ".gitmodules")
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"runtime/debug"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -93,6 +94,145 @@ func TestGitClone(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestSubmoduleConflictResolution(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
checkout, merge string
|
||||
result string
|
||||
merge_fail bool
|
||||
}{
|
||||
{
|
||||
name: "adding two submodules",
|
||||
checkout: "base_add_b1",
|
||||
merge: "base_add_b2",
|
||||
result: `[submodule "pkgA"]
|
||||
path = pkgA
|
||||
url = ../pkgA
|
||||
[submodule "pkgB"]
|
||||
path = pkgB
|
||||
url = ../pkgB
|
||||
[submodule "pkgB1"]
|
||||
path = pkgB1
|
||||
url = ../pkgB1
|
||||
[submodule "pkgB2"]
|
||||
path = pkgB2
|
||||
url = ../pkgB2
|
||||
[submodule "pkgC"]
|
||||
path = pkgC
|
||||
url = ../pkgC
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "remove one module and add another",
|
||||
checkout: "base_rm_c",
|
||||
merge: "base_add_b2",
|
||||
result: `[submodule "pkgA"]
|
||||
path = pkgA
|
||||
url = ../pkgA
|
||||
[submodule "pkgB"]
|
||||
path = pkgB
|
||||
url = ../pkgB
|
||||
[submodule "pkgB2"]
|
||||
path = pkgB2
|
||||
url = ../pkgB2
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "add one and remove another",
|
||||
checkout: "base_add_b2",
|
||||
merge: "base_rm_c",
|
||||
result: `[submodule "pkgA"]
|
||||
path = pkgA
|
||||
url = ../pkgA
|
||||
[submodule "pkgB"]
|
||||
path = pkgB
|
||||
url = ../pkgB
|
||||
[submodule "pkgB2"]
|
||||
path = pkgB2
|
||||
url = ../pkgB2
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "rm modified submodule",
|
||||
checkout: "base_modify_c",
|
||||
merge: "base_rm_c",
|
||||
merge_fail: true,
|
||||
},
|
||||
{
|
||||
name: "modified removed submodule",
|
||||
checkout: "base_rm_c",
|
||||
merge: "base_modify_c",
|
||||
merge_fail: true,
|
||||
},
|
||||
}
|
||||
|
||||
d, err := os.MkdirTemp(os.TempDir(), "submoduletests")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
cmd := exec.Command(cwd + "/test_repo_setup.sh")
|
||||
cmd.Dir = d
|
||||
_, err = cmd.Output()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
gh, err := AllocateGitWorkTree(d, "test", "foo@example.com")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
success := true
|
||||
noErrorOrFail := func(t *testing.T, err error) {
|
||||
if err != nil {
|
||||
t.Fatal(string(debug.Stack()), err)
|
||||
}
|
||||
}
|
||||
for _, test := range tests {
|
||||
success = t.Run(test.name, func(t *testing.T) {
|
||||
git, err := gh.ReadExistingPath("prjgit")
|
||||
defer git.Close()
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
noErrorOrFail(t, git.GitExec("", "reset", "--hard"))
|
||||
noErrorOrFail(t, git.GitExec("", "checkout", "-B", "test", test.checkout))
|
||||
// noErrorOrFail(t, git.GitExec("", "merge", test.checkout))
|
||||
err = git.GitExec("", "merge", test.merge)
|
||||
if err == nil {
|
||||
t.Fatal("expected a conflict")
|
||||
}
|
||||
err = git.GitResolveConflicts("", "main", test.checkout, test.merge)
|
||||
if err != nil {
|
||||
if test.merge_fail {
|
||||
return // success
|
||||
}
|
||||
t.Fatal(err)
|
||||
}
|
||||
if test.merge_fail {
|
||||
t.Fatal("Expected fail but succeeded?")
|
||||
}
|
||||
data, err := os.ReadFile(git.GetPath() + "/.gitmodules")
|
||||
if err != nil {
|
||||
t.Fatal("Cannot read .gitmodules.", err)
|
||||
}
|
||||
|
||||
if string(data) != test.result {
|
||||
t.Error("Expected", len(test.result), test.result, "but have", len(data), string(data))
|
||||
}
|
||||
}) && success
|
||||
}
|
||||
|
||||
if success {
|
||||
os.RemoveAll(d)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGitMsgParsing(t *testing.T) {
|
||||
t.Run("tree message with size 56", func(t *testing.T) {
|
||||
const hdr = "f40888ea4515fe2e8eea617a16f5f50a45f652d894de3ad181d58de3aafb8f98 tree 56\x00"
|
||||
@@ -584,12 +724,15 @@ func TestGitStatusParse(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(r) != len(test.res) {
|
||||
t.Fatal("len(r):", len(r), "is not expected", len(test.res))
|
||||
|
||||
res := r.([]GitStatusData)
|
||||
|
||||
if len(res) != len(test.res) {
|
||||
t.Fatal("len(r):", len(res), "is not expected", len(test.res))
|
||||
}
|
||||
|
||||
for _, expected := range test.res {
|
||||
if !slices.Contains(r, expected) {
|
||||
if !slices.Contains(res, expected) {
|
||||
t.Fatal("result", r, "doesn't contains expected", expected)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,6 @@ import (
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
transport "github.com/go-openapi/runtime/client"
|
||||
@@ -67,16 +66,7 @@ const (
|
||||
ReviewStateUnknown models.ReviewStateType = ""
|
||||
)
|
||||
|
||||
type GiteaLabelGetter interface {
|
||||
GetLabels(org, repo string, idx int64) ([]*models.Label, error)
|
||||
}
|
||||
|
||||
type GiteaLabelSettter interface {
|
||||
SetLabels(org, repo string, idx int64, labels []string) ([]*models.Label, error)
|
||||
}
|
||||
|
||||
type GiteaTimelineFetcher interface {
|
||||
ResetTimelineCache(org, repo string, idx int64)
|
||||
GetTimeline(org, repo string, idx int64) ([]*models.TimelineComment, error)
|
||||
}
|
||||
|
||||
@@ -101,10 +91,9 @@ type GiteaPRUpdater interface {
|
||||
UpdatePullRequest(org, repo string, num int64, options *models.EditPullRequestOption) (*models.PullRequest, error)
|
||||
}
|
||||
|
||||
type GiteaPRTimelineReviewFetcher interface {
|
||||
type GiteaPRTimelineFetcher interface {
|
||||
GiteaPRFetcher
|
||||
GiteaTimelineFetcher
|
||||
GiteaReviewFetcher
|
||||
}
|
||||
|
||||
type GiteaCommitFetcher interface {
|
||||
@@ -130,16 +119,10 @@ type GiteaPRChecker interface {
|
||||
GiteaMaintainershipReader
|
||||
}
|
||||
|
||||
type GiteaReviewFetcherAndRequesterAndUnrequester interface {
|
||||
type GiteaReviewFetcherAndRequester interface {
|
||||
GiteaReviewTimelineFetcher
|
||||
GiteaCommentFetcher
|
||||
GiteaReviewRequester
|
||||
GiteaReviewUnrequester
|
||||
}
|
||||
|
||||
type GiteaUnreviewTimelineFetcher interface {
|
||||
GiteaTimelineFetcher
|
||||
GiteaReviewUnrequester
|
||||
}
|
||||
|
||||
type GiteaReviewRequester interface {
|
||||
@@ -199,8 +182,6 @@ type Gitea interface {
|
||||
GiteaCommitStatusGetter
|
||||
GiteaCommitStatusSetter
|
||||
GiteaSetRepoOptions
|
||||
GiteaLabelGetter
|
||||
GiteaLabelSettter
|
||||
|
||||
GetNotifications(Type string, since *time.Time) ([]*models.NotificationThread, error)
|
||||
GetDoneNotifications(Type string, page int64) ([]*models.NotificationThread, error)
|
||||
@@ -208,7 +189,7 @@ type Gitea interface {
|
||||
GetOrganization(orgName string) (*models.Organization, error)
|
||||
GetOrganizationRepositories(orgName string) ([]*models.Repository, error)
|
||||
CreateRepositoryIfNotExist(git Git, org, repoName string) (*models.Repository, error)
|
||||
CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error, bool)
|
||||
CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error)
|
||||
GetPullRequestFileContent(pr *models.PullRequest, path string) ([]byte, string, error)
|
||||
GetRecentPullRequests(org, repo, branch string) ([]*models.PullRequest, error)
|
||||
GetRecentCommits(org, repo, branch string, commitNo int64) ([]*models.Commit, error)
|
||||
@@ -485,30 +466,6 @@ func (gitea *GiteaTransport) SetRepoOptions(owner, repo string, manual_merge boo
|
||||
return ok.Payload, err
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetLabels(owner, repo string, idx int64) ([]*models.Label, error) {
|
||||
ret, err := gitea.client.Issue.IssueGetLabels(issue.NewIssueGetLabelsParams().WithOwner(owner).WithRepo(repo).WithIndex(idx), gitea.transport.DefaultAuthentication)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret.Payload, err
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) SetLabels(owner, repo string, idx int64, labels []string) ([]*models.Label, error) {
|
||||
interfaceLabels := make([]interface{}, len(labels))
|
||||
for i, l := range labels {
|
||||
interfaceLabels[i] = l
|
||||
}
|
||||
|
||||
ret, err := gitea.client.Issue.IssueAddLabel(issue.NewIssueAddLabelParams().WithOwner(owner).WithRepo(repo).WithIndex(idx).WithBody(&models.IssueLabelsOption{Labels: interfaceLabels}),
|
||||
gitea.transport.DefaultAuthentication)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret.Payload, nil
|
||||
}
|
||||
|
||||
const (
|
||||
GiteaNotificationType_Pull = "Pull"
|
||||
)
|
||||
@@ -686,7 +643,7 @@ func (gitea *GiteaTransport) CreateRepositoryIfNotExist(git Git, org, repoName s
|
||||
return repo.Payload, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error, bool) {
|
||||
func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository, srcId, targetId, title, body string) (*models.PullRequest, error) {
|
||||
prOptions := models.CreatePullRequestOption{
|
||||
Base: targetId,
|
||||
Head: srcId,
|
||||
@@ -702,7 +659,7 @@ func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository
|
||||
WithHead(srcId),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
); err == nil && pr.Payload.State == "open" {
|
||||
return pr.Payload, nil, false
|
||||
return pr.Payload, nil
|
||||
}
|
||||
|
||||
pr, err := gitea.client.Repository.RepoCreatePullRequest(
|
||||
@@ -716,10 +673,10 @@ func (gitea *GiteaTransport) CreatePullRequestIfNotExist(repo *models.Repository
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot create pull request. %w", err), true
|
||||
return nil, fmt.Errorf("Cannot create pull request. %w", err)
|
||||
}
|
||||
|
||||
return pr.GetPayload(), nil, true
|
||||
return pr.GetPayload(), nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) RequestReviews(pr *models.PullRequest, reviewers ...string) ([]*models.PullReview, error) {
|
||||
@@ -806,91 +763,45 @@ func (gitea *GiteaTransport) AddComment(pr *models.PullRequest, comment string)
|
||||
return nil
|
||||
}
|
||||
|
||||
type TimelineCacheData struct {
|
||||
data []*models.TimelineComment
|
||||
lastCheck time.Time
|
||||
}
|
||||
|
||||
var giteaTimelineCache map[string]TimelineCacheData = make(map[string]TimelineCacheData)
|
||||
var giteaTimelineCacheMutex sync.RWMutex
|
||||
|
||||
func (gitea *GiteaTransport) ResetTimelineCache(org, repo string, idx int64) {
|
||||
giteaTimelineCacheMutex.Lock()
|
||||
defer giteaTimelineCacheMutex.Unlock()
|
||||
|
||||
prID := fmt.Sprintf("%s/%s!%d", org, repo, idx)
|
||||
Cache, IsCached := giteaTimelineCache[prID]
|
||||
if IsCached {
|
||||
Cache.lastCheck = Cache.lastCheck.Add(-time.Hour)
|
||||
giteaTimelineCache[prID] = Cache
|
||||
}
|
||||
}
|
||||
|
||||
// returns timeline in reverse chronological create order
|
||||
func (gitea *GiteaTransport) GetTimeline(org, repo string, idx int64) ([]*models.TimelineComment, error) {
|
||||
page := int64(1)
|
||||
resCount := 1
|
||||
|
||||
prID := fmt.Sprintf("%s/%s!%d", org, repo, idx)
|
||||
giteaTimelineCacheMutex.RLock()
|
||||
TimelineCache, IsCached := giteaTimelineCache[prID]
|
||||
var LastCachedTime strfmt.DateTime
|
||||
if IsCached {
|
||||
l := len(TimelineCache.data)
|
||||
if l > 0 {
|
||||
LastCachedTime = TimelineCache.data[0].Updated
|
||||
}
|
||||
|
||||
// cache data for 5 seconds
|
||||
if TimelineCache.lastCheck.Add(time.Second*5).Compare(time.Now()) > 0 {
|
||||
giteaTimelineCacheMutex.RUnlock()
|
||||
return TimelineCache.data, nil
|
||||
}
|
||||
}
|
||||
giteaTimelineCacheMutex.RUnlock()
|
||||
|
||||
giteaTimelineCacheMutex.Lock()
|
||||
defer giteaTimelineCacheMutex.Unlock()
|
||||
retData := []*models.TimelineComment{}
|
||||
|
||||
for resCount > 0 {
|
||||
opts := issue.NewIssueGetCommentsAndTimelineParams().WithOwner(org).WithRepo(repo).WithIndex(idx).WithPage(&page)
|
||||
if !LastCachedTime.IsZero() {
|
||||
opts = opts.WithSince(&LastCachedTime)
|
||||
}
|
||||
res, err := gitea.client.Issue.IssueGetCommentsAndTimeline(opts, gitea.transport.DefaultAuthentication)
|
||||
res, err := gitea.client.Issue.IssueGetCommentsAndTimeline(
|
||||
issue.NewIssueGetCommentsAndTimelineParams().
|
||||
WithOwner(org).
|
||||
WithRepo(repo).
|
||||
WithIndex(idx).
|
||||
WithPage(&page),
|
||||
gitea.transport.DefaultAuthentication,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if resCount = len(res.Payload); resCount == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
for _, d := range res.Payload {
|
||||
if d != nil {
|
||||
if time.Time(d.Created).Compare(time.Time(LastCachedTime)) > 0 {
|
||||
// created after last check, so we append here
|
||||
TimelineCache.data = append(TimelineCache.data, d)
|
||||
} else {
|
||||
// we need something updated in the timeline, maybe
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if resCount < 10 {
|
||||
resCount = len(res.Payload)
|
||||
LogDebug("page:", page, "len:", resCount)
|
||||
if resCount == 0 {
|
||||
break
|
||||
}
|
||||
page++
|
||||
|
||||
for _, d := range res.Payload {
|
||||
if d != nil {
|
||||
retData = append(retData, d)
|
||||
}
|
||||
}
|
||||
}
|
||||
LogDebug("timeline", prID, "# timeline:", len(TimelineCache.data))
|
||||
slices.SortFunc(TimelineCache.data, func(a, b *models.TimelineComment) int {
|
||||
LogDebug("total results:", len(retData))
|
||||
slices.SortFunc(retData, func(a, b *models.TimelineComment) int {
|
||||
return time.Time(b.Created).Compare(time.Time(a.Created))
|
||||
})
|
||||
|
||||
TimelineCache.lastCheck = time.Now()
|
||||
giteaTimelineCache[prID] = TimelineCache
|
||||
|
||||
return TimelineCache.data, nil
|
||||
return retData, nil
|
||||
}
|
||||
|
||||
func (gitea *GiteaTransport) GetRepositoryFileContent(org, repo, hash, path string) ([]byte, string, error) {
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/repository"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
@@ -27,15 +25,12 @@ const ProjectFileKey = "_project"
|
||||
type MaintainershipMap struct {
|
||||
Data map[string][]string
|
||||
IsDir bool
|
||||
Config *AutogitConfig
|
||||
FetchPackage func(string) ([]byte, error)
|
||||
Raw []byte
|
||||
}
|
||||
|
||||
func ParseMaintainershipData(data []byte) (*MaintainershipMap, error) {
|
||||
func parseMaintainershipData(data []byte) (*MaintainershipMap, error) {
|
||||
maintainers := &MaintainershipMap{
|
||||
Data: make(map[string][]string),
|
||||
Raw: data,
|
||||
}
|
||||
if err := json.Unmarshal(data, &maintainers.Data); err != nil {
|
||||
return nil, err
|
||||
@@ -44,9 +39,7 @@ func ParseMaintainershipData(data []byte) (*MaintainershipMap, error) {
|
||||
return maintainers, nil
|
||||
}
|
||||
|
||||
func FetchProjectMaintainershipData(gitea GiteaMaintainershipReader, config *AutogitConfig) (*MaintainershipMap, error) {
|
||||
org, prjGit, branch := config.GetPrjGit()
|
||||
|
||||
func FetchProjectMaintainershipData(gitea GiteaMaintainershipReader, org, prjGit, branch string) (*MaintainershipMap, error) {
|
||||
data, _, err := gitea.FetchMaintainershipDirFile(org, prjGit, branch, ProjectFileKey)
|
||||
dir := true
|
||||
if err != nil || data == nil {
|
||||
@@ -66,9 +59,8 @@ func FetchProjectMaintainershipData(gitea GiteaMaintainershipReader, config *Aut
|
||||
}
|
||||
}
|
||||
|
||||
m, err := ParseMaintainershipData(data)
|
||||
m, err := parseMaintainershipData(data)
|
||||
if m != nil {
|
||||
m.Config = config
|
||||
m.IsDir = dir
|
||||
m.FetchPackage = func(pkg string) ([]byte, error) {
|
||||
data, _, err := gitea.FetchMaintainershipDirFile(org, prjGit, branch, pkg)
|
||||
@@ -88,8 +80,6 @@ func (data *MaintainershipMap) ListProjectMaintainers(groups []*ReviewGroup) []s
|
||||
return nil
|
||||
}
|
||||
|
||||
m = slices.Clone(m)
|
||||
|
||||
// expands groups
|
||||
for _, g := range groups {
|
||||
m = g.ExpandMaintainers(m)
|
||||
@@ -126,7 +116,6 @@ func (data *MaintainershipMap) ListPackageMaintainers(pkg string, groups []*Revi
|
||||
}
|
||||
}
|
||||
}
|
||||
pkgMaintainers = slices.Clone(pkgMaintainers)
|
||||
prjMaintainers := data.ListProjectMaintainers(nil)
|
||||
|
||||
prjMaintainer:
|
||||
@@ -160,10 +149,7 @@ func (data *MaintainershipMap) IsApproved(pkg string, reviews []*models.PullRevi
|
||||
}
|
||||
|
||||
LogDebug("Looking for review by:", reviewers)
|
||||
slices.Sort(reviewers)
|
||||
reviewers = slices.Compact(reviewers)
|
||||
SubmitterIdxInReviewers := slices.Index(reviewers, submitter)
|
||||
if SubmitterIdxInReviewers > -1 && (!data.Config.ReviewRequired || len(reviewers) == 1) {
|
||||
if slices.Contains(reviewers, submitter) {
|
||||
LogDebug("Submitter is maintainer. Approving.")
|
||||
return true
|
||||
}
|
||||
@@ -178,135 +164,13 @@ func (data *MaintainershipMap) IsApproved(pkg string, reviews []*models.PullRevi
|
||||
return false
|
||||
}
|
||||
|
||||
func (data *MaintainershipMap) modifyInplace(writer io.StringWriter) error {
|
||||
var original map[string][]string
|
||||
if err := json.Unmarshal(data.Raw, &original); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dec := json.NewDecoder(bytes.NewReader(data.Raw))
|
||||
_, err := dec.Token()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
output := ""
|
||||
lastPos := 0
|
||||
modified := false
|
||||
|
||||
type entry struct {
|
||||
key string
|
||||
valStart int
|
||||
valEnd int
|
||||
}
|
||||
var entries []entry
|
||||
|
||||
for dec.More() {
|
||||
kToken, _ := dec.Token()
|
||||
key := kToken.(string)
|
||||
var raw json.RawMessage
|
||||
dec.Decode(&raw)
|
||||
valEnd := int(dec.InputOffset())
|
||||
valStart := valEnd - len(raw)
|
||||
entries = append(entries, entry{key, valStart, valEnd})
|
||||
}
|
||||
|
||||
changed := make(map[string]bool)
|
||||
for k, v := range data.Data {
|
||||
if ov, ok := original[k]; !ok || !slices.Equal(v, ov) {
|
||||
changed[k] = true
|
||||
}
|
||||
}
|
||||
for k := range original {
|
||||
if _, ok := data.Data[k]; !ok {
|
||||
changed[k] = true
|
||||
}
|
||||
}
|
||||
|
||||
if len(changed) == 0 {
|
||||
_, err = writer.WriteString(string(data.Raw))
|
||||
return err
|
||||
}
|
||||
|
||||
for _, e := range entries {
|
||||
if v, ok := data.Data[e.key]; ok {
|
||||
prefix := string(data.Raw[lastPos:e.valStart])
|
||||
if modified && strings.TrimSpace(output) == "{" {
|
||||
if commaIdx := strings.Index(prefix, ","); commaIdx != -1 {
|
||||
if quoteIdx := strings.Index(prefix, "\""); quoteIdx == -1 || commaIdx < quoteIdx {
|
||||
prefix = prefix[:commaIdx] + prefix[commaIdx+1:]
|
||||
}
|
||||
}
|
||||
}
|
||||
output += prefix
|
||||
if changed[e.key] {
|
||||
slices.Sort(v)
|
||||
newVal, _ := json.Marshal(v)
|
||||
output += string(newVal)
|
||||
modified = true
|
||||
} else {
|
||||
output += string(data.Raw[e.valStart:e.valEnd])
|
||||
}
|
||||
} else {
|
||||
// Deleted
|
||||
modified = true
|
||||
}
|
||||
lastPos = e.valEnd
|
||||
}
|
||||
output += string(data.Raw[lastPos:])
|
||||
|
||||
// Handle additions (simplistic: at the end)
|
||||
for k, v := range data.Data {
|
||||
if _, ok := original[k]; !ok {
|
||||
slices.Sort(v)
|
||||
newVal, _ := json.Marshal(v)
|
||||
keyStr, _ := json.Marshal(k)
|
||||
|
||||
// Insert before closing brace
|
||||
if idx := strings.LastIndex(output, "}"); idx != -1 {
|
||||
prefix := output[:idx]
|
||||
suffix := output[idx:]
|
||||
|
||||
trimmedPrefix := strings.TrimRight(prefix, " \n\r\t")
|
||||
if !strings.HasSuffix(trimmedPrefix, "{") && !strings.HasSuffix(trimmedPrefix, ",") {
|
||||
// find the actual position of the last non-whitespace character in prefix
|
||||
lastCharIdx := strings.LastIndexAny(prefix, "]}0123456789\"")
|
||||
if lastCharIdx != -1 {
|
||||
prefix = prefix[:lastCharIdx+1] + "," + prefix[lastCharIdx+1:]
|
||||
}
|
||||
}
|
||||
|
||||
insertion := fmt.Sprintf(" %s: %s", string(keyStr), string(newVal))
|
||||
if !strings.HasSuffix(prefix, "\n") {
|
||||
insertion = "\n" + insertion
|
||||
}
|
||||
output = prefix + insertion + "\n" + suffix
|
||||
modified = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if modified {
|
||||
_, err := writer.WriteString(output)
|
||||
return err
|
||||
}
|
||||
_, err = writer.WriteString(string(data.Raw))
|
||||
return err
|
||||
}
|
||||
|
||||
func (data *MaintainershipMap) WriteMaintainershipFile(writer io.StringWriter) error {
|
||||
if data.IsDir {
|
||||
return fmt.Errorf("Not implemented")
|
||||
}
|
||||
|
||||
if len(data.Raw) > 0 {
|
||||
if err := data.modifyInplace(writer); err == nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to full write
|
||||
writer.WriteString("{\n")
|
||||
|
||||
if d, ok := data.Data[""]; ok {
|
||||
eol := ","
|
||||
if len(data.Data) == 1 {
|
||||
@@ -317,12 +181,17 @@ func (data *MaintainershipMap) WriteMaintainershipFile(writer io.StringWriter) e
|
||||
writer.WriteString(fmt.Sprintf(" \"\": %s%s\n", string(str), eol))
|
||||
}
|
||||
|
||||
keys := make([]string, 0, len(data.Data))
|
||||
keys := make([]string, len(data.Data))
|
||||
i := 0
|
||||
for pkg := range data.Data {
|
||||
if pkg == "" {
|
||||
continue
|
||||
}
|
||||
keys = append(keys, pkg)
|
||||
keys[i] = pkg
|
||||
i++
|
||||
}
|
||||
if len(keys) >= i {
|
||||
keys = slices.Delete(keys, i, len(keys))
|
||||
}
|
||||
slices.Sort(keys)
|
||||
for i, pkg := range keys {
|
||||
|
||||
@@ -13,10 +13,10 @@ import (
|
||||
)
|
||||
|
||||
func TestMaintainership(t *testing.T) {
|
||||
config := &common.AutogitConfig{
|
||||
config := common.AutogitConfig{
|
||||
Branch: "bar",
|
||||
Organization: "foo",
|
||||
GitProjectName: common.DefaultGitPrj + "#bar",
|
||||
GitProjectName: common.DefaultGitPrj,
|
||||
}
|
||||
|
||||
packageTests := []struct {
|
||||
@@ -141,7 +141,7 @@ func TestMaintainership(t *testing.T) {
|
||||
notFoundError := repository.NewRepoGetContentsNotFound()
|
||||
for _, test := range packageTests {
|
||||
runTests := func(t *testing.T, mi common.GiteaMaintainershipReader) {
|
||||
maintainers, err := common.FetchProjectMaintainershipData(mi, config)
|
||||
maintainers, err := common.FetchProjectMaintainershipData(mi, config.Organization, config.GitProjectName, config.Branch)
|
||||
if err != nil && !test.otherError {
|
||||
if test.maintainersFileErr == nil {
|
||||
t.Fatal("Unexpected error recieved", err)
|
||||
@@ -208,7 +208,6 @@ func TestMaintainershipFileWrite(t *testing.T) {
|
||||
name string
|
||||
is_dir bool
|
||||
maintainers map[string][]string
|
||||
raw []byte
|
||||
expected_output string
|
||||
expected_error error
|
||||
}{
|
||||
@@ -232,43 +231,6 @@ func TestMaintainershipFileWrite(t *testing.T) {
|
||||
},
|
||||
expected_output: "{\n \"\": [\"one\",\"two\"],\n \"foo\": [\"byte\",\"four\"],\n \"pkg1\": []\n}\n",
|
||||
},
|
||||
{
|
||||
name: "surgical modification",
|
||||
maintainers: map[string][]string{
|
||||
"": {"one", "two"},
|
||||
"foo": {"byte", "four", "newone"},
|
||||
"pkg1": {},
|
||||
},
|
||||
raw: []byte("{\n \"\": [\"one\",\"two\"],\n \"foo\": [\"byte\",\"four\"],\n \"pkg1\": []\n}\n"),
|
||||
expected_output: "{\n \"\": [\"one\",\"two\"],\n \"foo\": [\"byte\",\"four\",\"newone\"],\n \"pkg1\": []\n}\n",
|
||||
},
|
||||
{
|
||||
name: "no change",
|
||||
maintainers: map[string][]string{
|
||||
"": {"one", "two"},
|
||||
"foo": {"byte", "four"},
|
||||
"pkg1": {},
|
||||
},
|
||||
raw: []byte("{\n \"\": [\"one\",\"two\"],\n \"foo\": [\"byte\",\"four\"],\n \"pkg1\": []\n}\n"),
|
||||
expected_output: "{\n \"\": [\"one\",\"two\"],\n \"foo\": [\"byte\",\"four\"],\n \"pkg1\": []\n}\n",
|
||||
},
|
||||
{
|
||||
name: "surgical addition",
|
||||
maintainers: map[string][]string{
|
||||
"": {"one"},
|
||||
"new": {"user"},
|
||||
},
|
||||
raw: []byte("{\n \"\": [ \"one\" ]\n}\n"),
|
||||
expected_output: "{\n \"\": [ \"one\" ],\n \"new\": [\"user\"]\n}\n",
|
||||
},
|
||||
{
|
||||
name: "surgical deletion",
|
||||
maintainers: map[string][]string{
|
||||
"": {"one"},
|
||||
},
|
||||
raw: []byte("{\n \"\": [\"one\"],\n \"old\": [\"user\"]\n}\n"),
|
||||
expected_output: "{\n \"\": [\"one\"]\n}\n",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
@@ -277,7 +239,6 @@ func TestMaintainershipFileWrite(t *testing.T) {
|
||||
data := common.MaintainershipMap{
|
||||
Data: test.maintainers,
|
||||
IsDir: test.is_dir,
|
||||
Raw: test.raw,
|
||||
}
|
||||
|
||||
if err := data.WriteMaintainershipFile(&b); err != test.expected_error {
|
||||
@@ -287,134 +248,8 @@ func TestMaintainershipFileWrite(t *testing.T) {
|
||||
output := b.String()
|
||||
|
||||
if test.expected_output != output {
|
||||
t.Fatalf("unexpected output:\n%q\nExpecting:\n%q", output, test.expected_output)
|
||||
t.Fatal("unexpected output:", output, "Expecting:", test.expected_output)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestReviewRequired(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
maintainers []string
|
||||
config *common.AutogitConfig
|
||||
is_approved bool
|
||||
}{
|
||||
{
|
||||
name: "ReviewRequired=false",
|
||||
maintainers: []string{"maintainer1", "maintainer2"},
|
||||
config: &common.AutogitConfig{ReviewRequired: false},
|
||||
is_approved: true,
|
||||
},
|
||||
{
|
||||
name: "ReviewRequired=true",
|
||||
maintainers: []string{"maintainer1", "maintainer2"},
|
||||
config: &common.AutogitConfig{ReviewRequired: true},
|
||||
is_approved: false,
|
||||
},
|
||||
{
|
||||
name: "ReviewRequired=true",
|
||||
maintainers: []string{"maintainer1"},
|
||||
config: &common.AutogitConfig{ReviewRequired: true},
|
||||
is_approved: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
m := &common.MaintainershipMap{
|
||||
Data: map[string][]string{"": test.maintainers},
|
||||
}
|
||||
m.Config = test.config
|
||||
if approved := m.IsApproved("", nil, "maintainer1", nil); approved != test.is_approved {
|
||||
t.Error("Expected m.IsApproved()->", test.is_approved, "but didn't get it")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestMaintainershipDataCorruption_PackageAppend(t *testing.T) {
|
||||
// Test corruption when append happens (merging project maintainers)
|
||||
// If backing array has capacity, append writes to it.
|
||||
|
||||
// We construct a slice with capacity > len to simulate this common scenario
|
||||
backingArray := make([]string, 1, 10)
|
||||
backingArray[0] = "@g1"
|
||||
|
||||
initialData := map[string][]string{
|
||||
"pkg": backingArray, // len 1, cap 10
|
||||
"": {"prjUser"},
|
||||
}
|
||||
|
||||
m := &common.MaintainershipMap{
|
||||
Data: initialData,
|
||||
}
|
||||
|
||||
groups := []*common.ReviewGroup{
|
||||
{
|
||||
Name: "@g1",
|
||||
Reviewers: []string{"u1"},
|
||||
},
|
||||
}
|
||||
|
||||
// ListPackageMaintainers("pkg", groups)
|
||||
// 1. gets ["@g1"] (cap 10)
|
||||
// 2. Appends "prjUser" -> ["@g1", "prjUser"] (in backing array)
|
||||
// 3. Expands "@g1" -> "u1".
|
||||
// Replace: ["u1", "prjUser"]
|
||||
// Sort: ["prjUser", "u1"]
|
||||
//
|
||||
// The backing array is now ["prjUser", "u1", ...]
|
||||
// The map entry "pkg" is still len 1.
|
||||
// So it sees ["prjUser"].
|
||||
|
||||
list1 := m.ListPackageMaintainers("pkg", groups)
|
||||
t.Logf("List1: %v", list1)
|
||||
|
||||
// ListPackageMaintainers("pkg", nil)
|
||||
// Should be ["@g1", "prjUser"] (because prjUser is appended from project maintainers)
|
||||
// But since backing array is corrupted:
|
||||
// It sees ["prjUser"] (from map) + appends "prjUser" -> ["prjUser", "prjUser"].
|
||||
|
||||
list2 := m.ListPackageMaintainers("pkg", nil)
|
||||
t.Logf("List2: %v", list2)
|
||||
|
||||
if !slices.Contains(list2, "@g1") {
|
||||
t.Errorf("Corruption: '@g1' is missing from second call. Got %v", list2)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMaintainershipDataCorruption_ProjectInPlace(t *testing.T) {
|
||||
// Test corruption in ListProjectMaintainers when replacement fits in place
|
||||
// e.g. replacing 1 group with 1 user.
|
||||
|
||||
initialData := map[string][]string{
|
||||
"": {"@g1"},
|
||||
}
|
||||
|
||||
m := &common.MaintainershipMap{
|
||||
Data: initialData,
|
||||
}
|
||||
|
||||
groups := []*common.ReviewGroup{
|
||||
{
|
||||
Name: "@g1",
|
||||
Reviewers: []string{"u1"},
|
||||
},
|
||||
}
|
||||
|
||||
// First call with expansion
|
||||
// Replaces "@g1" with "u1". Length stays 1. Modifies backing array in place.
|
||||
list1 := m.ListProjectMaintainers(groups)
|
||||
t.Logf("List1: %v", list1)
|
||||
|
||||
// Second call without expansion
|
||||
// Should return ["@g1"]
|
||||
list2 := m.ListProjectMaintainers(nil)
|
||||
t.Logf("List2: %v", list2)
|
||||
|
||||
if !slices.Contains(list2, "@g1") {
|
||||
t.Errorf("Corruption: '@g1' is missing from second call (Project). Got %v", list2)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -116,18 +116,13 @@ type Flags struct {
|
||||
Contents string `xml:",innerxml"`
|
||||
}
|
||||
|
||||
type ProjectLinkMeta struct {
|
||||
Project string `xml:"project,attr"`
|
||||
}
|
||||
|
||||
type ProjectMeta struct {
|
||||
XMLName xml.Name `xml:"project"`
|
||||
Name string `xml:"name,attr"`
|
||||
Title string `xml:"title"`
|
||||
Description string `xml:"description"`
|
||||
Url string `xml:"url,omitempty"`
|
||||
ScmSync string `xml:"scmsync,omitempty"`
|
||||
Link []ProjectLinkMeta `xml:"link"`
|
||||
ScmSync string `xml:"scmsync"`
|
||||
Persons []PersonRepoMeta `xml:"person"`
|
||||
Groups []GroupRepoMeta `xml:"group"`
|
||||
Repositories []RepositoryMeta `xml:"repository"`
|
||||
@@ -143,8 +138,8 @@ type ProjectMeta struct {
|
||||
type PackageMeta struct {
|
||||
XMLName xml.Name `xml:"package"`
|
||||
Name string `xml:"name,attr"`
|
||||
Project string `xml:"project,attr,omitempty"`
|
||||
ScmSync string `xml:"scmsync,omitempty"`
|
||||
Project string `xml:"project,attr"`
|
||||
ScmSync string `xml:"scmsync"`
|
||||
Persons []PersonRepoMeta `xml:"person"`
|
||||
Groups []GroupRepoMeta `xml:"group"`
|
||||
|
||||
|
||||
338
common/pr.go
338
common/pr.go
@@ -4,8 +4,6 @@ import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
@@ -23,8 +21,7 @@ type PRSet struct {
|
||||
PRs []*PRInfo
|
||||
Config *AutogitConfig
|
||||
|
||||
BotUser string
|
||||
HasAutoStaging bool
|
||||
BotUser string
|
||||
}
|
||||
|
||||
func (prinfo *PRInfo) PRComponents() (org string, repo string, idx int64) {
|
||||
@@ -34,41 +31,6 @@ func (prinfo *PRInfo) PRComponents() (org string, repo string, idx int64) {
|
||||
return
|
||||
}
|
||||
|
||||
func (prinfo *PRInfo) RemoveReviewers(gitea GiteaUnreviewTimelineFetcher, Reviewers []string, BotUser string) {
|
||||
org, repo, idx := prinfo.PRComponents()
|
||||
tl, err := gitea.GetTimeline(org, repo, idx)
|
||||
if err != nil {
|
||||
LogError("Failed to fetch timeline for", PRtoString(prinfo.PR), err)
|
||||
}
|
||||
|
||||
// find review request for each reviewer
|
||||
ReviewersToUnrequest := Reviewers
|
||||
ReviewersAlreadyChecked := []string{}
|
||||
|
||||
for _, tlc := range tl {
|
||||
if tlc.Type == TimelineCommentType_ReviewRequested && tlc.Assignee != nil {
|
||||
user := tlc.Assignee.UserName
|
||||
|
||||
if idx := slices.Index(ReviewersToUnrequest, user); idx >= 0 && !slices.Contains(ReviewersAlreadyChecked, user) {
|
||||
if tlc.User != nil && tlc.User.UserName == BotUser {
|
||||
ReviewersAlreadyChecked = append(ReviewersAlreadyChecked, user)
|
||||
continue
|
||||
}
|
||||
ReviewersToUnrequest = slices.Delete(ReviewersToUnrequest, idx, idx+1)
|
||||
if len(Reviewers) == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LogDebug("Unrequesting reviewes for", PRtoString(prinfo.PR), ReviewersToUnrequest)
|
||||
err = gitea.UnrequestReview(org, repo, idx, ReviewersToUnrequest...)
|
||||
if err != nil {
|
||||
LogError("Failed to unrequest reviewers for", PRtoString(prinfo.PR), err)
|
||||
}
|
||||
}
|
||||
|
||||
func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []*PRInfo, config *AutogitConfig) ([]*PRInfo, error) {
|
||||
for _, p := range currentSet {
|
||||
if pr.Index == p.PR.Index && pr.Base.Repo.Name == p.PR.Base.Repo.Name && pr.Base.Repo.Owner.UserName == p.PR.Base.Repo.Owner.UserName {
|
||||
@@ -99,7 +61,7 @@ func readPRData(gitea GiteaPRFetcher, pr *models.PullRequest, currentSet []*PRIn
|
||||
|
||||
var Timeline_RefIssueNotFound error = errors.New("RefIssue not found on the timeline")
|
||||
|
||||
func LastPrjGitRefOnTimeline(botUser string, gitea GiteaPRTimelineReviewFetcher, org, repo string, num int64, config *AutogitConfig) (*models.PullRequest, error) {
|
||||
func LastPrjGitRefOnTimeline(botUser string, gitea GiteaPRTimelineFetcher, org, repo string, num int64, config *AutogitConfig) (*models.PullRequest, error) {
|
||||
timeline, err := gitea.GetTimeline(org, repo, num)
|
||||
if err != nil {
|
||||
LogError("Failed to fetch timeline for", org, repo, "#", num, err)
|
||||
@@ -124,19 +86,14 @@ func LastPrjGitRefOnTimeline(botUser string, gitea GiteaPRTimelineReviewFetcher,
|
||||
}
|
||||
|
||||
pr, err := gitea.GetPullRequest(prjGitOrg, prjGitRepo, issue.Index)
|
||||
if err != nil {
|
||||
switch err.(type) {
|
||||
case *repository.RepoGetPullRequestNotFound: // deleted?
|
||||
continue
|
||||
default:
|
||||
LogDebug("PrjGit RefIssue fetch error from timeline", issue.Index, err)
|
||||
continue
|
||||
}
|
||||
switch err.(type) {
|
||||
case *repository.RepoGetPullRequestNotFound: // deleted?
|
||||
continue
|
||||
default:
|
||||
LogDebug("PrjGit RefIssue fetch error from timeline", issue.Index, err)
|
||||
}
|
||||
|
||||
LogDebug("found ref PR on timeline:", PRtoString(pr))
|
||||
if pr.Base.Name != prjGitBranch {
|
||||
LogDebug(" -> not matching:", pr.Base.Name, prjGitBranch)
|
||||
if pr.Base.Ref != prjGitBranch {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -156,12 +113,10 @@ func LastPrjGitRefOnTimeline(botUser string, gitea GiteaPRTimelineReviewFetcher,
|
||||
return nil, Timeline_RefIssueNotFound
|
||||
}
|
||||
|
||||
func FetchPRSet(user string, gitea GiteaPRTimelineReviewFetcher, org, repo string, num int64, config *AutogitConfig) (*PRSet, error) {
|
||||
func FetchPRSet(user string, gitea GiteaPRTimelineFetcher, org, repo string, num int64, config *AutogitConfig) (*PRSet, error) {
|
||||
var pr *models.PullRequest
|
||||
var err error
|
||||
|
||||
gitea.ResetTimelineCache(org, repo, num)
|
||||
|
||||
prjGitOrg, prjGitRepo, _ := config.GetPrjGit()
|
||||
if prjGitOrg == org && prjGitRepo == repo {
|
||||
if pr, err = gitea.GetPullRequest(org, repo, num); err != nil {
|
||||
@@ -184,16 +139,6 @@ func FetchPRSet(user string, gitea GiteaPRTimelineReviewFetcher, org, repo strin
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, pr := range prs {
|
||||
org, repo, idx := pr.PRComponents()
|
||||
gitea.ResetTimelineCache(org, repo, idx)
|
||||
reviews, err := FetchGiteaReviews(gitea, org, repo, idx)
|
||||
if err != nil {
|
||||
LogError("Error fetching reviews for", PRtoString(pr.PR), ":", err)
|
||||
}
|
||||
pr.Reviews = reviews
|
||||
}
|
||||
|
||||
return &PRSet{
|
||||
PRs: prs,
|
||||
Config: config,
|
||||
@@ -201,12 +146,6 @@ func FetchPRSet(user string, gitea GiteaPRTimelineReviewFetcher, org, repo strin
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (prset *PRSet) RemoveReviewers(gitea GiteaUnreviewTimelineFetcher, reviewers []string) {
|
||||
for _, prinfo := range prset.PRs {
|
||||
prinfo.RemoveReviewers(gitea, reviewers, prset.BotUser)
|
||||
}
|
||||
}
|
||||
|
||||
func (rs *PRSet) Find(pr *models.PullRequest) (*PRInfo, bool) {
|
||||
for _, p := range rs.PRs {
|
||||
if p.PR.Base.RepoID == pr.Base.RepoID &&
|
||||
@@ -292,150 +231,67 @@ next_rs:
|
||||
}
|
||||
|
||||
for _, pr := range prjpr_set {
|
||||
if strings.EqualFold(prinfo.PR.Base.Repo.Owner.UserName, pr.Org) && strings.EqualFold(prinfo.PR.Base.Repo.Name, pr.Repo) && prinfo.PR.Index == pr.Num {
|
||||
if prinfo.PR.Base.Repo.Owner.UserName == pr.Org && prinfo.PR.Base.Repo.Name == pr.Repo && prinfo.PR.Index == pr.Num {
|
||||
continue next_rs
|
||||
}
|
||||
}
|
||||
LogDebug(" PR: ", PRtoString(prinfo.PR), "not found in project git PRSet")
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (rs *PRSet) FindMissingAndExtraReviewers(maintainers MaintainershipData, idx int) (missing, extra []string) {
|
||||
func (rs *PRSet) AssignReviewers(gitea GiteaReviewFetcherAndRequester, maintainers MaintainershipData) error {
|
||||
configReviewers := ParseReviewers(rs.Config.Reviewers)
|
||||
|
||||
// remove reviewers that were already requested and are not stale
|
||||
prjMaintainers := maintainers.ListProjectMaintainers(nil)
|
||||
LogDebug("project maintainers:", prjMaintainers)
|
||||
for _, pr := range rs.PRs {
|
||||
reviewers := []string{}
|
||||
|
||||
pr := rs.PRs[idx]
|
||||
if rs.IsPrjGitPR(pr.PR) {
|
||||
missing = slices.Concat(configReviewers.Prj, configReviewers.PrjOptional)
|
||||
if rs.HasAutoStaging {
|
||||
missing = append(missing, Bot_BuildReview)
|
||||
}
|
||||
LogDebug("PrjGit submitter:", pr.PR.User.UserName)
|
||||
// only need project maintainer reviews if:
|
||||
// * not created by a bot and has other PRs, or
|
||||
// * not created by maintainer
|
||||
noReviewPRCreators := []string{}
|
||||
if !rs.Config.ReviewRequired {
|
||||
noReviewPRCreators = prjMaintainers
|
||||
}
|
||||
if len(rs.PRs) > 1 {
|
||||
noReviewPRCreators = append(noReviewPRCreators, rs.BotUser)
|
||||
}
|
||||
if slices.Contains(noReviewPRCreators, pr.PR.User.UserName) || pr.Reviews.IsReviewedByOneOf(prjMaintainers...) {
|
||||
LogDebug("Project already reviewed by a project maintainer, remove rest")
|
||||
// do not remove reviewers if they are also maintainers
|
||||
prjMaintainers = slices.DeleteFunc(prjMaintainers, func(m string) bool { return slices.Contains(missing, m) })
|
||||
extra = slices.Concat(prjMaintainers, []string{rs.BotUser})
|
||||
if rs.IsPrjGitPR(pr.PR) {
|
||||
reviewers = slices.Concat(configReviewers.Prj, configReviewers.PrjOptional)
|
||||
LogDebug("PrjGit submitter:", pr.PR.User.UserName)
|
||||
if len(rs.PRs) == 1 {
|
||||
reviewers = slices.Concat(reviewers, maintainers.ListProjectMaintainers(nil))
|
||||
}
|
||||
} else {
|
||||
// if bot not created PrjGit or prj maintainer, we need to add project reviewers here
|
||||
if slices.Contains(noReviewPRCreators, pr.PR.User.UserName) {
|
||||
LogDebug("No need for project maintainers")
|
||||
extra = slices.Concat(prjMaintainers, []string{rs.BotUser})
|
||||
pkg := pr.PR.Base.Repo.Name
|
||||
reviewers = slices.Concat(configReviewers.Pkg, maintainers.ListProjectMaintainers(nil), maintainers.ListPackageMaintainers(pkg, nil), configReviewers.PkgOptional)
|
||||
}
|
||||
|
||||
slices.Sort(reviewers)
|
||||
reviewers = slices.Compact(reviewers)
|
||||
|
||||
// submitters do not need to review their own work
|
||||
if idx := slices.Index(reviewers, pr.PR.User.UserName); idx != -1 {
|
||||
reviewers = slices.Delete(reviewers, idx, idx+1)
|
||||
}
|
||||
|
||||
LogDebug("PR: ", pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
LogDebug("reviewers for PR:", reviewers)
|
||||
|
||||
// remove reviewers that were already requested and are not stale
|
||||
reviews, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
if err != nil {
|
||||
LogError("Error fetching reviews:", err)
|
||||
return err
|
||||
}
|
||||
|
||||
for idx := 0; idx < len(reviewers); {
|
||||
user := reviewers[idx]
|
||||
if reviews.HasPendingReviewBy(user) || reviews.IsReviewedBy(user) {
|
||||
reviewers = slices.Delete(reviewers, idx, idx+1)
|
||||
LogDebug("removing reviewer:", user)
|
||||
} else {
|
||||
LogDebug("Adding prjMaintainers to PrjGit")
|
||||
missing = append(missing, prjMaintainers...)
|
||||
idx++
|
||||
}
|
||||
}
|
||||
} else {
|
||||
pkg := pr.PR.Base.Repo.Name
|
||||
pkgMaintainers := maintainers.ListPackageMaintainers(pkg, nil)
|
||||
Maintainers := slices.Concat(prjMaintainers, pkgMaintainers)
|
||||
noReviewPkgPRCreators := []string{}
|
||||
if !rs.Config.ReviewRequired {
|
||||
noReviewPkgPRCreators = pkgMaintainers
|
||||
}
|
||||
|
||||
LogDebug("packakge maintainers:", Maintainers)
|
||||
|
||||
missing = slices.Concat(configReviewers.Pkg, configReviewers.PkgOptional)
|
||||
if slices.Contains(noReviewPkgPRCreators, pr.PR.User.UserName) || pr.Reviews.IsReviewedByOneOf(Maintainers...) {
|
||||
// submitter is maintainer or already reviewed
|
||||
LogDebug("Package reviewed by maintainer (or subitter is maintainer), remove the rest of them")
|
||||
// do not remove reviewers if they are also maintainers
|
||||
Maintainers = slices.DeleteFunc(Maintainers, func(m string) bool { return slices.Contains(missing, m) })
|
||||
extra = slices.Concat(Maintainers, []string{rs.BotUser})
|
||||
} else {
|
||||
// maintainer review is missing
|
||||
LogDebug("Adding package maintainers to package git")
|
||||
missing = append(missing, pkgMaintainers...)
|
||||
}
|
||||
}
|
||||
|
||||
slices.Sort(missing)
|
||||
missing = slices.Compact(missing)
|
||||
|
||||
slices.Sort(extra)
|
||||
extra = slices.Compact(extra)
|
||||
|
||||
// submitters cannot review their own work
|
||||
if idx := slices.Index(missing, pr.PR.User.UserName); idx != -1 {
|
||||
missing = slices.Delete(missing, idx, idx+1)
|
||||
}
|
||||
|
||||
LogDebug("PR: ", PRtoString(pr.PR))
|
||||
LogDebug(" preliminary add reviewers for PR:", missing)
|
||||
LogDebug(" preliminary rm reviewers for PR:", extra)
|
||||
|
||||
// remove missing reviewers that are already done or already pending
|
||||
for idx := 0; idx < len(missing); {
|
||||
user := missing[idx]
|
||||
if pr.Reviews.HasPendingReviewBy(user) || pr.Reviews.IsReviewedBy(user) {
|
||||
missing = slices.Delete(missing, idx, idx+1)
|
||||
LogDebug(" removing done/pending reviewer:", user)
|
||||
} else {
|
||||
idx++
|
||||
}
|
||||
}
|
||||
|
||||
// remove extra reviews that are actually only pending, and only pending by us
|
||||
for idx := 0; idx < len(extra); {
|
||||
user := extra[idx]
|
||||
rr := pr.Reviews.FindReviewRequester(user)
|
||||
if rr != nil && rr.User.UserName == rs.BotUser && pr.Reviews.HasPendingReviewBy(user) {
|
||||
// good to remove this review
|
||||
idx++
|
||||
} else {
|
||||
// this review should not be considered as extra by us
|
||||
LogDebug(" - cannot find? to remove", user)
|
||||
if rr != nil {
|
||||
LogDebug(" ", rr.User.UserName, "vs.", rs.BotUser, pr.Reviews.HasPendingReviewBy(user))
|
||||
}
|
||||
extra = slices.Delete(extra, idx, idx+1)
|
||||
}
|
||||
}
|
||||
|
||||
LogDebug(" add reviewers for PR:", missing)
|
||||
LogDebug(" rm reviewers for PR:", extra)
|
||||
|
||||
return missing, extra
|
||||
}
|
||||
|
||||
func (rs *PRSet) AssignReviewers(gitea GiteaReviewFetcherAndRequesterAndUnrequester, maintainers MaintainershipData) error {
|
||||
for idx, pr := range rs.PRs {
|
||||
missingReviewers, extraReviewers := rs.FindMissingAndExtraReviewers(maintainers, idx)
|
||||
|
||||
if len(missingReviewers) > 0 {
|
||||
LogDebug(" Requesting reviews from:", missingReviewers)
|
||||
// get maintainers associated with the PR too
|
||||
if len(reviewers) > 0 {
|
||||
LogDebug("Requesting reviews from:", reviewers)
|
||||
if !IsDryRun {
|
||||
for _, r := range missingReviewers {
|
||||
for _, r := range reviewers {
|
||||
if _, err := gitea.RequestReviews(pr.PR, r); err != nil {
|
||||
LogError("Cannot create reviews on", PRtoString(pr.PR), "for user:", r, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(extraReviewers) > 0 {
|
||||
LogDebug(" UnRequesting reviews from:", extraReviewers)
|
||||
if !IsDryRun {
|
||||
for _, r := range extraReviewers {
|
||||
org, repo, idx := pr.PRComponents()
|
||||
if err := gitea.UnrequestReview(org, repo, idx, r); err != nil {
|
||||
LogError("Cannot unrequest reviews on", PRtoString(pr.PR), "for user:", r, err)
|
||||
LogError("Cannot create reviews on", fmt.Sprintf("%s/%s!%d for [%s]", pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index, strings.Join(reviewers, ", ")), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -461,12 +317,11 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
|
||||
if err == nil && prjgit != nil {
|
||||
reviewers := slices.Concat(configReviewers.Prj, maintainers.ListProjectMaintainers(groups))
|
||||
LogDebug("Fetching reviews for", prjgit.PR.Base.Repo.Owner.UserName, prjgit.PR.Base.Repo.Name, prjgit.PR.Index)
|
||||
r, err := FetchGiteaReviews(gitea, prjgit.PR.Base.Repo.Owner.UserName, prjgit.PR.Base.Repo.Name, prjgit.PR.Index)
|
||||
r, err := FetchGiteaReviews(gitea, reviewers, prjgit.PR.Base.Repo.Owner.UserName, prjgit.PR.Base.Repo.Name, prjgit.PR.Index)
|
||||
if err != nil {
|
||||
LogError("Cannot fetch gita reaviews for PR:", err)
|
||||
return false
|
||||
}
|
||||
r.RequestedReviewers = reviewers
|
||||
prjgit.Reviews = r
|
||||
if prjgit.Reviews.IsManualMergeOK() {
|
||||
is_manually_reviewed_ok = true
|
||||
@@ -482,12 +337,11 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
|
||||
pkg := pr.PR.Base.Repo.Name
|
||||
reviewers := slices.Concat(configReviewers.Pkg, maintainers.ListPackageMaintainers(pkg, groups))
|
||||
LogDebug("Fetching reviews for", pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
r, err := FetchGiteaReviews(gitea, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
r, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
if err != nil {
|
||||
LogError("Cannot fetch gita reaviews for PR:", err)
|
||||
return false
|
||||
}
|
||||
r.RequestedReviewers = reviewers
|
||||
pr.Reviews = r
|
||||
if !pr.Reviews.IsManualMergeOK() {
|
||||
LogInfo("Not approved manual merge. PR:", pr.PR.URL)
|
||||
@@ -509,9 +363,6 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
|
||||
var pkg string
|
||||
if rs.IsPrjGitPR(pr.PR) {
|
||||
reviewers = configReviewers.Prj
|
||||
if rs.HasAutoStaging {
|
||||
reviewers = append(reviewers, Bot_BuildReview)
|
||||
}
|
||||
pkg = ""
|
||||
} else {
|
||||
reviewers = configReviewers.Pkg
|
||||
@@ -523,12 +374,11 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
|
||||
return false
|
||||
}
|
||||
|
||||
r, err := FetchGiteaReviews(gitea, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
r, err := FetchGiteaReviews(gitea, reviewers, pr.PR.Base.Repo.Owner.UserName, pr.PR.Base.Repo.Name, pr.PR.Index)
|
||||
if err != nil {
|
||||
LogError("Cannot fetch gitea reaviews for PR:", err)
|
||||
return false
|
||||
}
|
||||
r.RequestedReviewers = reviewers
|
||||
|
||||
is_manually_reviewed_ok = r.IsApproved()
|
||||
LogDebug("PR to", pr.PR.Base.Repo.Name, "reviewed?", is_manually_reviewed_ok)
|
||||
@@ -541,7 +391,7 @@ func (rs *PRSet) IsApproved(gitea GiteaPRChecker, maintainers MaintainershipData
|
||||
|
||||
if need_maintainer_review := !rs.IsPrjGitPR(pr.PR) || pr.PR.User.UserName != rs.BotUser; need_maintainer_review {
|
||||
// Do not expand groups here, as the group-review-bot will ACK if group has reviewed.
|
||||
if is_manually_reviewed_ok = maintainers.IsApproved(pkg, r.Reviews, pr.PR.User.UserName, nil); !is_manually_reviewed_ok {
|
||||
if is_manually_reviewed_ok = maintainers.IsApproved(pkg, r.reviews, pr.PR.User.UserName, nil); !is_manually_reviewed_ok {
|
||||
LogDebug(" not approved?", pkg)
|
||||
return false
|
||||
}
|
||||
@@ -581,80 +431,8 @@ func (rs *PRSet) Merge(gitea GiteaReviewUnrequester, git Git) error {
|
||||
|
||||
err = git.GitExec(DefaultGitPrj, "merge", "--no-ff", "-m", msg, prjgit.Head.Sha)
|
||||
if err != nil {
|
||||
status, statusErr := git.GitStatus(DefaultGitPrj)
|
||||
if statusErr != nil {
|
||||
return fmt.Errorf("Failed to merge: %w . Status also failed: %w", err, statusErr)
|
||||
}
|
||||
|
||||
// we can only resolve conflicts with .gitmodules
|
||||
for _, s := range status {
|
||||
if s.Status == GitStatus_Unmerged {
|
||||
panic("Can't handle conflicts yet")
|
||||
if s.Path != ".gitmodules" {
|
||||
return err
|
||||
}
|
||||
|
||||
submodules, err := git.GitSubmoduleList(DefaultGitPrj, "MERGE_HEAD")
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch submodules during merge resolution: %w", err)
|
||||
}
|
||||
s1, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[0])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
s2, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[1])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
s3, err := git.GitExecWithOutput(DefaultGitPrj, "cat-file", "blob", s.States[2])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed fetching data during .gitmodules merge resoulution: %w", err)
|
||||
}
|
||||
|
||||
subs1, err := ParseSubmodulesFile(strings.NewReader(s1))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
subs2, err := ParseSubmodulesFile(strings.NewReader(s2))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
subs3, err := ParseSubmodulesFile(strings.NewReader(s3))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing submodule file [%s] in merge: %w", s.States[0], err)
|
||||
}
|
||||
|
||||
// merge from subs3 (target), subs1 (orig), subs2 (2-nd base that is missing from target base)
|
||||
// this will update submodules
|
||||
mergedSubs := slices.Concat(subs1, subs2, subs3)
|
||||
|
||||
var filteredSubs []Submodule = make([]Submodule, 0, max(len(subs1), len(subs2), len(subs3)))
|
||||
nextSub:
|
||||
for subName := range submodules {
|
||||
|
||||
for i := range mergedSubs {
|
||||
if path.Base(mergedSubs[i].Path) == subName {
|
||||
filteredSubs = append(filteredSubs, mergedSubs[i])
|
||||
continue nextSub
|
||||
}
|
||||
}
|
||||
return fmt.Errorf("Cannot find submodule for path: %s", subName)
|
||||
}
|
||||
|
||||
out, err := os.Create(path.Join(git.GetPath(), DefaultGitPrj, ".gitmodules"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Can't open .gitmodules for writing: %w", err)
|
||||
}
|
||||
if err = WriteSubmodules(filteredSubs, out); err != nil {
|
||||
return fmt.Errorf("Can't write .gitmodules: %w", err)
|
||||
}
|
||||
if out.Close(); err != nil {
|
||||
return fmt.Errorf("Can't close .gitmodules: %w", err)
|
||||
}
|
||||
|
||||
git.GitExecOrPanic(DefaultGitPrj, "add", ".gitmodules")
|
||||
git.GitExecOrPanic(DefaultGitPrj, "-c", "core.editor=true", "merge", "--continue")
|
||||
}
|
||||
if resolveError := git.GitResolveConflicts(DefaultGitPrj, prjgit.MergeBase, prjgit.Base.Sha, prjgit.Head.Sha); resolveError != nil {
|
||||
return fmt.Errorf("Merge failed. (%w): %w", err, resolveError)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ package common_test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
@@ -75,7 +76,7 @@ func TestPR(t *testing.T) {
|
||||
consistentSet bool
|
||||
prjGitPRIndex int
|
||||
|
||||
reviewSetFetcher func(*mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error)
|
||||
reviewSetFetcher func(*mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error)
|
||||
}{
|
||||
{
|
||||
name: "Error fetching PullRequest",
|
||||
@@ -147,7 +148,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &baseConfig)
|
||||
},
|
||||
},
|
||||
@@ -179,7 +180,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: false,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &baseConfig)
|
||||
},
|
||||
},
|
||||
@@ -207,7 +208,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: false,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -241,7 +242,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -275,7 +276,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -311,7 +312,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: false,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -346,7 +347,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -388,7 +389,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -430,7 +431,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: false,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -473,7 +474,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: false,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
return common.FetchPRSet("test", mock, "foo", "barPrj", 42, &common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2"},
|
||||
Branch: "branch",
|
||||
@@ -500,7 +501,7 @@ func TestPR(t *testing.T) {
|
||||
prjGitPRIndex: 0,
|
||||
consistentSet: true,
|
||||
reviewed: true,
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineReviewFetcher) (*common.PRSet, error) {
|
||||
reviewSetFetcher: func(mock *mock_common.MockGiteaPRTimelineFetcher) (*common.PRSet, error) {
|
||||
config := common.AutogitConfig{
|
||||
Reviewers: []string{"+super1", "*super2", "m1", "-m2", "~*bot"},
|
||||
Branch: "branch",
|
||||
@@ -515,7 +516,7 @@ func TestPR(t *testing.T) {
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRTimelineReviewFetcher(ctl)
|
||||
pr_mock := mock_common.NewMockGiteaPRTimelineFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaPRChecker(ctl)
|
||||
// reviewer_mock := mock_common.NewMockGiteaReviewRequester(ctl)
|
||||
|
||||
@@ -619,566 +620,283 @@ func TestPR(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestFindMissingAndExtraReviewers(t *testing.T) {
|
||||
func TestPRAssignReviewers(t *testing.T) {
|
||||
t.Skip("FAIL: unexpected calls, missing calls")
|
||||
tests := []struct {
|
||||
name string
|
||||
config common.AutogitConfig
|
||||
reviewers []struct {
|
||||
org, repo string
|
||||
num int64
|
||||
reviewer string
|
||||
}
|
||||
|
||||
prset *common.PRSet
|
||||
maintainers common.MaintainershipData
|
||||
pkgReviews []*models.PullReview
|
||||
pkgTimeline []*models.TimelineComment
|
||||
prjReviews []*models.PullReview
|
||||
prjTimeline []*models.TimelineComment
|
||||
|
||||
noAutoStaging bool
|
||||
|
||||
expected_missing_reviewers [][]string
|
||||
expected_extra_reviewers [][]string
|
||||
expectedReviewerCall [2][]string
|
||||
}{
|
||||
{
|
||||
name: "No reviewers",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
},
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{}},
|
||||
expectedReviewerCall: [2][]string{{"autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "One project reviewer only",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1"},
|
||||
},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
[]string{},
|
||||
[]string{"autogits_obs_staging_bot", "user1"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "One project reviewer only and no auto staging",
|
||||
noAutoStaging: true,
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1"},
|
||||
},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
nil,
|
||||
{"user1"},
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "One project reviewer and one pkg reviewer only",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
[]string{"user2"},
|
||||
[]string{"autogits_obs_staging_bot", "user1"},
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"user2", "prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "No need to get reviews of submitter reviewer",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{{State: common.ReviewStateRequestReview, User: &models.User{UserName: "m1"}}},
|
||||
RequestedReviewers: []string{"m1"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "m1"}, Type: common.TimelineCommentType_ReviewRequested},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
},
|
||||
BotUser: "bot",
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"m1", "submitter"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
nil,
|
||||
{"autogits_obs_staging_bot", "user1"},
|
||||
},
|
||||
expected_extra_reviewers: [][]string{
|
||||
{"m1"},
|
||||
name: "No need to get reviews of submitter",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"prjmaintainer", "pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "No need to get reviews of submitter maintainer",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "foo"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
name: "Reviews are done",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
pkgReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "user2"},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "pkgmaintainer"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStatePending,
|
||||
User: &models.User{UserName: "prjmaintainer"},
|
||||
},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"submitter"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
[]string{},
|
||||
[]string{"autogits_obs_staging_bot", "user1"},
|
||||
prjReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateRequestChanges,
|
||||
User: &models.User{UserName: "user1"},
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateRequestReview,
|
||||
User: &models.User{UserName: "autogits_obs_staging_bot"},
|
||||
},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{},
|
||||
},
|
||||
{
|
||||
name: "Add reviewer if also maintainer where review by maintainer is not needed",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "bot"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter", "*reviewer"},
|
||||
},
|
||||
BotUser: "bot",
|
||||
name: "Stale review is not done, re-request it",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "org/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2"},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"submitter", "reviewer"}, "": []string{"reviewer"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
[]string{"reviewer"},
|
||||
[]string{"autogits_obs_staging_bot", "reviewer", "user1"},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "Dont remove reviewer if also maintainer",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{{State: common.ReviewStateRequestReview, User: &models.User{UserName: "reviewer"}}},
|
||||
RequestedReviewers: []string{"reviewer"},
|
||||
FullTimeline: []*models.TimelineComment{{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "reviewer"}}},
|
||||
},
|
||||
},
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "bot"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{{State: common.ReviewStateRequestReview, User: &models.User{UserName: "reviewer"}}},
|
||||
RequestedReviewers: []string{"reviewer"},
|
||||
FullTimeline: []*models.TimelineComment{{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "reviewer"}}},
|
||||
},
|
||||
},
|
||||
pkgReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "user2"},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter", "*reviewer"},
|
||||
{
|
||||
State: common.ReviewStatePending,
|
||||
User: &models.User{UserName: "prjmaintainer"},
|
||||
},
|
||||
BotUser: "bot",
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"submitter", "reviewer"}, "": []string{"reviewer"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{
|
||||
[]string{},
|
||||
[]string{"autogits_obs_staging_bot", "user1"},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "Extra project reviewer on the package",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
{State: common.ReviewStateApproved, User: &models.User{UserName: "pkgmaintainer"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "prjmaintainer"}},
|
||||
},
|
||||
RequestedReviewers: []string{"user2", "pkgmaintainer", "prjmaintainer"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "user2"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "pkgmaintainer"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prjmaintainer"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "bot"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateRequestChanges, User: &models.User{UserName: "user1"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "autogits_obs_staging_bot"}},
|
||||
},
|
||||
RequestedReviewers: []string{"user1", "autogits_obs_staging_bot"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "user1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "autogits_obs_staging_bot"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
prjReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateRequestChanges,
|
||||
User: &models.User{UserName: "user1"},
|
||||
Stale: true,
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
{
|
||||
State: common.ReviewStateRequestReview,
|
||||
Stale: true,
|
||||
User: &models.User{UserName: "autogits_obs_staging_bot"},
|
||||
},
|
||||
BotUser: "bot",
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"pkgmaintainer"}, "": {"prjmaintainer"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{},
|
||||
expected_extra_reviewers: [][]string{{"prjmaintainer"}},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"pkgmaintainer"}},
|
||||
},
|
||||
{
|
||||
name: "Extra project reviewers on the package and project",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
{State: common.ReviewStateApproved, User: &models.User{UserName: "pkgmaintainer"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "prjmaintainer"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "pkgm1"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "pkgm2"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "prj1"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "prj2"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "someother"}},
|
||||
},
|
||||
RequestedReviewers: []string{"user2", "pkgmaintainer", "prjmaintainer", "pkgm1", "pkgm2", "someother", "prj1", "prj2"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "pkgmaintainer"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prjmaintainer"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prj1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prj2"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "pkgm1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "pkgm2"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "bot"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prg"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateRequestChanges, User: &models.User{UserName: "user1"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "autogits_obs_staging_bot"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "prj1"}},
|
||||
{State: common.ReviewStatePending, User: &models.User{UserName: "prj2"}},
|
||||
},
|
||||
RequestedReviewers: []string{"user1", "autogits_obs_staging_bot", "prj1", "prj2"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "autogits_obs_staging_bot"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prj1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prj2"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
},
|
||||
BotUser: "bot",
|
||||
name: "Stale optional review is not done, re-request it",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "user2", "~bot"},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"pkgmaintainer", "pkgm1", "pkgm2"}, "": {"prjmaintainer", "prj1", "prj2"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{},
|
||||
expected_extra_reviewers: [][]string{{"pkgm1", "pkgm2", "prj1", "prj2", "prjmaintainer"}, {"prj1", "prj2"}},
|
||||
},
|
||||
{
|
||||
name: "No extra project reviewers on the package and project (all pending)",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "submitter"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "pkgmaintainer"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "prjmaintainer"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "pkgm1"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "prj1"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "someother"}},
|
||||
},
|
||||
RequestedReviewers: []string{"user2", "pkgmaintainer", "prjmaintainer", "pkgm1", "someother", "prj1"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "pkgm1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "pkgmaintainer"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prjmaintainer"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prj1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "!bot"}, Assignee: &models.User{UserName: "someother"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "bot"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "prj"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{
|
||||
Reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateRequestChanges, User: &models.User{UserName: "user1"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "autogits_obs_staging_bot"}},
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "prj1"}},
|
||||
},
|
||||
RequestedReviewers: []string{"user1", "autogits_obs_staging_bot", "prj1"},
|
||||
FullTimeline: []*models.TimelineComment{
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "autogits_obs_staging_bot"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "bot"}, Assignee: &models.User{UserName: "prj1"}},
|
||||
{Type: common.TimelineCommentType_ReviewRequested, User: &models.User{UserName: "!bot"}, Assignee: &models.User{UserName: "user1"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
pkgReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "bot"},
|
||||
Stale: true,
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prj/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{"-user1", "submitter"},
|
||||
{
|
||||
State: common.ReviewStateApproved,
|
||||
User: &models.User{UserName: "user2"},
|
||||
},
|
||||
BotUser: "bot",
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{Data: map[string][]string{"pkg": []string{"pkgmaintainer", "pkgm1", "pkgm2"}, "": {"prjmaintainer", "prj1", "prj2"}}},
|
||||
|
||||
expected_missing_reviewers: [][]string{{"pkgm2", "prj2"}},
|
||||
expected_extra_reviewers: [][]string{{}, {"prj1"}},
|
||||
},
|
||||
{
|
||||
name: "Package maintainer submitter, AlwaysRequireReview=false",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "pkgmaintainer"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
ReviewRequired: false,
|
||||
{
|
||||
State: common.ReviewStatePending,
|
||||
User: &models.User{UserName: "prjmaintainer"},
|
||||
},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{
|
||||
Data: map[string][]string{
|
||||
"pkg": {"pkgmaintainer", "pkgm1"},
|
||||
prjReviews: []*models.PullReview{
|
||||
{
|
||||
State: common.ReviewStateRequestChanges,
|
||||
User: &models.User{UserName: "user1"},
|
||||
Stale: true,
|
||||
},
|
||||
{
|
||||
State: common.ReviewStateRequestReview,
|
||||
Stale: true,
|
||||
User: &models.User{UserName: "autogits_obs_staging_bot"},
|
||||
},
|
||||
},
|
||||
noAutoStaging: true,
|
||||
expected_missing_reviewers: [][]string{
|
||||
{},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Package maintainer submitter, AlwaysRequireReview=true",
|
||||
prset: &common.PRSet{
|
||||
PRs: []*common.PRInfo{
|
||||
{
|
||||
PR: &models.PullRequest{
|
||||
User: &models.User{UserName: "pkgmaintainer"},
|
||||
Base: &models.PRBranchInfo{Name: "main", Repo: &models.Repository{Name: "pkg", Owner: &models.User{UserName: "org"}}},
|
||||
},
|
||||
Reviews: &common.PRReviews{},
|
||||
},
|
||||
},
|
||||
Config: &common.AutogitConfig{
|
||||
GitProjectName: "prg/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
ReviewRequired: true,
|
||||
},
|
||||
},
|
||||
maintainers: &common.MaintainershipMap{
|
||||
Data: map[string][]string{
|
||||
"pkg": {"pkgmaintainer", "pkgm1"},
|
||||
},
|
||||
},
|
||||
noAutoStaging: true,
|
||||
expected_missing_reviewers: [][]string{
|
||||
{"pkgm1"},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"user1", "autogits_obs_staging_bot"}, {"pkgmaintainer", "bot"}},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
test.prset.HasAutoStaging = !test.noAutoStaging
|
||||
for idx, pr := range test.prset.PRs {
|
||||
missing, extra := test.prset.FindMissingAndExtraReviewers(test.maintainers, idx)
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRTimelineFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaReviewFetcherAndRequester(ctl)
|
||||
maintainership_mock := mock_common.NewMockMaintainershipData(ctl)
|
||||
|
||||
// avoid nil dereference below, by adding empty array elements
|
||||
if idx >= len(test.expected_missing_reviewers) {
|
||||
test.expected_missing_reviewers = append(test.expected_missing_reviewers, nil)
|
||||
}
|
||||
if idx >= len(test.expected_extra_reviewers) {
|
||||
test.expected_extra_reviewers = append(test.expected_extra_reviewers, nil)
|
||||
}
|
||||
if test.pkgTimeline == nil {
|
||||
test.pkgTimeline = reviewsToTimeline(test.pkgReviews)
|
||||
}
|
||||
if test.prjTimeline == nil {
|
||||
test.prjTimeline = reviewsToTimeline(test.prjReviews)
|
||||
}
|
||||
|
||||
slices.Sort(test.expected_extra_reviewers[idx])
|
||||
slices.Sort(test.expected_missing_reviewers[idx])
|
||||
if slices.Compare(missing, test.expected_missing_reviewers[idx]) != 0 {
|
||||
t.Error("Expected missing reviewers for", common.PRtoString(pr.PR), ":", test.expected_missing_reviewers[idx], "but have:", missing)
|
||||
}
|
||||
pr_mock.EXPECT().GetPullRequest("other", "pkgrepo", int64(1)).Return(&models.PullRequest{
|
||||
Body: "Some description is here",
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "pkgrepo", Owner: &models.User{UserName: "other"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 1,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("other", "pkgrepo", int64(1)).Return(test.pkgReviews, nil)
|
||||
review_mock.EXPECT().GetTimeline("other", "pkgrepo", int64(1)).Return(test.pkgTimeline, nil)
|
||||
pr_mock.EXPECT().GetPullRequest("org", "repo", int64(1)).Return(&models.PullRequest{
|
||||
Body: fmt.Sprintf(common.PrPattern, "other", "pkgrepo", 1),
|
||||
User: &models.User{UserName: "bot1"},
|
||||
RequestedReviewers: []*models.User{{UserName: "main_reviewer"}},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "org"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 42,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(42)).Return(test.prjReviews, nil)
|
||||
review_mock.EXPECT().GetTimeline("org", "repo", int64(42)).Return(test.prjTimeline, nil)
|
||||
|
||||
if slices.Compare(extra, test.expected_extra_reviewers[idx]) != 0 {
|
||||
t.Error("Expected reviewers to remove for", common.PRtoString(pr.PR), ":", test.expected_extra_reviewers[idx], "but have:", extra)
|
||||
maintainership_mock.EXPECT().ListProjectMaintainers(gomock.Any()).Return([]string{"prjmaintainer"}).AnyTimes()
|
||||
maintainership_mock.EXPECT().ListPackageMaintainers("pkgrepo", gomock.Any()).Return([]string{"pkgmaintainer"}).AnyTimes()
|
||||
|
||||
prs, _ := common.FetchPRSet("test", pr_mock, "other", "pkgrepo", int64(1), &test.config)
|
||||
if len(prs.PRs) != 2 {
|
||||
t.Fatal("PRs not fetched")
|
||||
}
|
||||
for _, pr := range prs.PRs {
|
||||
r := test.expectedReviewerCall[0]
|
||||
if !prs.IsPrjGitPR(pr.PR) {
|
||||
r = test.expectedReviewerCall[1]
|
||||
}
|
||||
slices.Sort(r)
|
||||
for _, reviewer := range r {
|
||||
review_mock.EXPECT().RequestReviews(pr.PR, reviewer).Return(nil, nil)
|
||||
}
|
||||
}
|
||||
prs.AssignReviewers(review_mock, maintainership_mock)
|
||||
})
|
||||
}
|
||||
|
||||
prjgit_tests := []struct {
|
||||
name string
|
||||
config common.AutogitConfig
|
||||
reviewers []struct {
|
||||
org, repo string
|
||||
num int64
|
||||
reviewer string
|
||||
}
|
||||
|
||||
prjReviews []*models.PullReview
|
||||
|
||||
expectedReviewerCall [2][]string
|
||||
}{
|
||||
{
|
||||
name: "PrjMaintainers in prjgit review when not part of pkg set",
|
||||
config: common.AutogitConfig{
|
||||
GitProjectName: "org/repo#main",
|
||||
Organization: "org",
|
||||
Branch: "main",
|
||||
Reviewers: []string{},
|
||||
},
|
||||
expectedReviewerCall: [2][]string{{"autogits_obs_staging_bot", "prjmaintainer"}},
|
||||
},
|
||||
}
|
||||
for _, test := range prjgit_tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
pr_mock := mock_common.NewMockGiteaPRTimelineFetcher(ctl)
|
||||
review_mock := mock_common.NewMockGiteaReviewFetcherAndRequester(ctl)
|
||||
maintainership_mock := mock_common.NewMockMaintainershipData(ctl)
|
||||
|
||||
pr_mock.EXPECT().GetPullRequest("org", "repo", int64(1)).Return(&models.PullRequest{
|
||||
Body: "Some description is here",
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{},
|
||||
Base: &models.PRBranchInfo{Repo: &models.Repository{Name: "repo", Owner: &models.User{UserName: "org"}}},
|
||||
Head: &models.PRBranchInfo{},
|
||||
Index: 1,
|
||||
}, nil)
|
||||
review_mock.EXPECT().GetPullRequestReviews("org", "repo", int64(1)).Return(test.prjReviews, nil)
|
||||
review_mock.EXPECT().GetTimeline("org", "repo", int64(1)).Return(nil, nil)
|
||||
|
||||
maintainership_mock.EXPECT().ListProjectMaintainers(gomock.Any()).Return([]string{"prjmaintainer"}).AnyTimes()
|
||||
|
||||
prs, _ := common.FetchPRSet("test", pr_mock, "org", "repo", int64(1), &test.config)
|
||||
if len(prs.PRs) != 1 {
|
||||
t.Fatal("PRs not fetched")
|
||||
}
|
||||
for _, pr := range prs.PRs {
|
||||
r := test.expectedReviewerCall[0]
|
||||
if !prs.IsPrjGitPR(pr.PR) {
|
||||
t.Fatal("only prjgit pr here")
|
||||
}
|
||||
for _, reviewer := range r {
|
||||
review_mock.EXPECT().RequestReviews(pr.PR, reviewer).Return(nil, nil)
|
||||
}
|
||||
}
|
||||
prs.AssignReviewers(review_mock, maintainership_mock)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1223,6 +941,7 @@ func TestPRMerge(t *testing.T) {
|
||||
pr: &models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Sha: "e8b0de43d757c96a9d2c7101f4bff404e322f53a1fa4041fb85d646110c38ad4", // "base_add_b1"
|
||||
Name: "master",
|
||||
Repo: &models.Repository{
|
||||
Name: "prj",
|
||||
Owner: &models.User{
|
||||
@@ -1243,6 +962,7 @@ func TestPRMerge(t *testing.T) {
|
||||
pr: &models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Sha: "4fbd1026b2d7462ebe9229a49100c11f1ad6555520a21ba515122d8bc41328a8",
|
||||
Name: "master",
|
||||
Repo: &models.Repository{
|
||||
Name: "prj",
|
||||
Owner: &models.User{
|
||||
@@ -1261,7 +981,8 @@ func TestPRMerge(t *testing.T) {
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mock := mock_common.NewMockGiteaPRTimelineReviewFetcher(ctl)
|
||||
mock := mock_common.NewMockGiteaPRTimelineFetcher(ctl)
|
||||
|
||||
reviewUnrequestMock := mock_common.NewMockGiteaReviewUnrequester(ctl)
|
||||
|
||||
reviewUnrequestMock.EXPECT().UnrequestReview(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil)
|
||||
@@ -1320,7 +1041,7 @@ func TestPRChanges(t *testing.T) {
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mock_fetcher := mock_common.NewMockGiteaPRTimelineReviewFetcher(ctl)
|
||||
mock_fetcher := mock_common.NewMockGiteaPRTimelineFetcher(ctl)
|
||||
mock_fetcher.EXPECT().GetPullRequest("org", "prjgit", int64(42)).Return(test.PrjPRs, nil)
|
||||
for _, pr := range test.PRs {
|
||||
mock_fetcher.EXPECT().GetPullRequest(pr.Base.Repo.Owner.UserName, pr.Base.Repo.Name, pr.Index).Return(pr, nil)
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"slices"
|
||||
)
|
||||
|
||||
type Reviewers struct {
|
||||
Prj []string
|
||||
Pkg []string
|
||||
@@ -32,5 +36,10 @@ func ParseReviewers(input []string) *Reviewers {
|
||||
*pkg = append(*pkg, reviewer)
|
||||
}
|
||||
}
|
||||
|
||||
if !slices.Contains(r.Prj, Bot_BuildReview) {
|
||||
r.Prj = append(r.Prj, Bot_BuildReview)
|
||||
}
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
@@ -21,14 +21,14 @@ func TestReviewers(t *testing.T) {
|
||||
name: "project and package reviewers",
|
||||
input: []string{"1", "2", "3", "*5", "+6", "-7"},
|
||||
|
||||
prj: []string{"5", "7"},
|
||||
prj: []string{"5", "7", common.Bot_BuildReview},
|
||||
pkg: []string{"1", "2", "3", "5", "6"},
|
||||
},
|
||||
{
|
||||
name: "optional project and package reviewers",
|
||||
input: []string{"~1", "2", "3", "~*5", "+6", "-7"},
|
||||
|
||||
prj: []string{"7"},
|
||||
prj: []string{"7", common.Bot_BuildReview},
|
||||
pkg: []string{"2", "3", "6"},
|
||||
prj_optional: []string{"5"},
|
||||
pkg_optional: []string{"1", "5"},
|
||||
|
||||
@@ -9,14 +9,12 @@ import (
|
||||
)
|
||||
|
||||
type PRReviews struct {
|
||||
Reviews []*models.PullReview
|
||||
RequestedReviewers []string
|
||||
Comments []*models.TimelineComment
|
||||
|
||||
FullTimeline []*models.TimelineComment
|
||||
reviews []*models.PullReview
|
||||
reviewers []string
|
||||
comments []*models.TimelineComment
|
||||
}
|
||||
|
||||
func FetchGiteaReviews(rf GiteaReviewTimelineFetcher, org, repo string, no int64) (*PRReviews, error) {
|
||||
func FetchGiteaReviews(rf GiteaReviewTimelineFetcher, reviewers []string, org, repo string, no int64) (*PRReviews, error) {
|
||||
timeline, err := rf.GetTimeline(org, repo, no)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -27,14 +25,10 @@ func FetchGiteaReviews(rf GiteaReviewTimelineFetcher, org, repo string, no int64
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reviews := make([]*models.PullReview, 0, 10)
|
||||
needNewReviews := []string{}
|
||||
reviews := make([]*models.PullReview, 0, len(reviewers))
|
||||
var comments []*models.TimelineComment
|
||||
|
||||
alreadyHaveUserReview := func(user string) bool {
|
||||
if slices.Contains(needNewReviews, user) {
|
||||
return true
|
||||
}
|
||||
for _, r := range reviews {
|
||||
if r.User != nil && r.User.UserName == user {
|
||||
return true
|
||||
@@ -43,40 +37,32 @@ func FetchGiteaReviews(rf GiteaReviewTimelineFetcher, org, repo string, no int64
|
||||
return false
|
||||
}
|
||||
|
||||
LogDebug("FetchingGiteaReviews for", org, repo, no)
|
||||
LogDebug("Number of reviews:", len(rawReviews))
|
||||
LogDebug("Number of items in timeline:", len(timeline))
|
||||
|
||||
cutOffIdx := len(timeline)
|
||||
for idx, item := range timeline {
|
||||
if item.Type == TimelineCommentType_Review || item.Type == TimelineCommentType_ReviewRequested {
|
||||
if item.Type == TimelineCommentType_Review {
|
||||
for _, r := range rawReviews {
|
||||
if r.ID == item.ReviewID {
|
||||
if !alreadyHaveUserReview(r.User.UserName) {
|
||||
if item.Type == TimelineCommentType_Review && idx > cutOffIdx {
|
||||
needNewReviews = append(needNewReviews, r.User.UserName)
|
||||
} else {
|
||||
reviews = append(reviews, r)
|
||||
}
|
||||
reviews = append(reviews, r)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if item.Type == TimelineCommentType_Comment && cutOffIdx > idx {
|
||||
} else if item.Type == TimelineCommentType_Comment {
|
||||
comments = append(comments, item)
|
||||
} else if item.Type == TimelineCommentType_PushPull && cutOffIdx == len(timeline) {
|
||||
LogDebug("cut-off", item.Created, "@", idx)
|
||||
cutOffIdx = idx
|
||||
} else if item.Type == TimelineCommentType_PushPull {
|
||||
LogDebug("cut-off", item.Created)
|
||||
timeline = timeline[0:idx]
|
||||
break
|
||||
} else {
|
||||
LogDebug("Unhandled timeline type:", item.Type)
|
||||
}
|
||||
}
|
||||
LogDebug("num comments:", len(comments), "timeline:", len(reviews))
|
||||
LogDebug("num comments:", len(comments), "reviews:", len(reviews), len(timeline))
|
||||
|
||||
return &PRReviews{
|
||||
Reviews: reviews,
|
||||
Comments: comments,
|
||||
FullTimeline: timeline,
|
||||
reviews: reviews,
|
||||
reviewers: reviewers,
|
||||
comments: comments,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -95,27 +81,23 @@ func bodyCommandManualMergeOK(body string) bool {
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsManualMergeOK() bool {
|
||||
if r == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, c := range r.Comments {
|
||||
for _, c := range r.comments {
|
||||
if c.Updated != c.Created {
|
||||
continue
|
||||
}
|
||||
LogDebug("comment:", c.User.UserName, c.Body)
|
||||
if slices.Contains(r.RequestedReviewers, c.User.UserName) {
|
||||
if slices.Contains(r.reviewers, c.User.UserName) {
|
||||
if bodyCommandManualMergeOK(c.Body) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, c := range r.Reviews {
|
||||
for _, c := range r.reviews {
|
||||
if c.Updated != c.Submitted {
|
||||
continue
|
||||
}
|
||||
if slices.Contains(r.RequestedReviewers, c.User.UserName) {
|
||||
if slices.Contains(r.reviewers, c.User.UserName) {
|
||||
if bodyCommandManualMergeOK(c.Body) {
|
||||
return true
|
||||
}
|
||||
@@ -126,14 +108,11 @@ func (r *PRReviews) IsManualMergeOK() bool {
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsApproved() bool {
|
||||
if r == nil {
|
||||
return false
|
||||
}
|
||||
goodReview := true
|
||||
|
||||
for _, reviewer := range r.RequestedReviewers {
|
||||
for _, reviewer := range r.reviewers {
|
||||
goodReview = false
|
||||
for _, review := range r.Reviews {
|
||||
for _, review := range r.reviews {
|
||||
if review.User.UserName == reviewer && review.State == ReviewStateApproved && !review.Stale && !review.Dismissed {
|
||||
LogDebug(" -- found review: ", review.User.UserName)
|
||||
goodReview = true
|
||||
@@ -151,11 +130,7 @@ func (r *PRReviews) IsApproved() bool {
|
||||
|
||||
func (r *PRReviews) MissingReviews() []string {
|
||||
missing := []string{}
|
||||
if r == nil {
|
||||
return missing
|
||||
}
|
||||
|
||||
for _, reviewer := range r.RequestedReviewers {
|
||||
for _, reviewer := range r.reviewers {
|
||||
if !r.IsReviewedBy(reviewer) {
|
||||
missing = append(missing, reviewer)
|
||||
}
|
||||
@@ -163,64 +138,45 @@ func (r *PRReviews) MissingReviews() []string {
|
||||
return missing
|
||||
}
|
||||
|
||||
func (r *PRReviews) FindReviewRequester(reviewer string) *models.TimelineComment {
|
||||
if r == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, r := range r.FullTimeline {
|
||||
if r.Type == TimelineCommentType_ReviewRequested && r.Assignee.UserName == reviewer {
|
||||
return r
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *PRReviews) HasPendingReviewBy(reviewer string) bool {
|
||||
if r == nil {
|
||||
if !slices.Contains(r.reviewers, reviewer) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, r := range r.Reviews {
|
||||
if r.User.UserName == reviewer {
|
||||
isPending := false
|
||||
for _, r := range r.reviews {
|
||||
if r.User.UserName == reviewer && !r.Stale {
|
||||
switch r.State {
|
||||
case ReviewStateRequestReview, ReviewStatePending:
|
||||
return true
|
||||
default:
|
||||
case ReviewStateApproved:
|
||||
fallthrough
|
||||
case ReviewStateRequestChanges:
|
||||
return false
|
||||
case ReviewStateRequestReview:
|
||||
fallthrough
|
||||
case ReviewStatePending:
|
||||
isPending = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
return isPending
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsReviewedBy(reviewer string) bool {
|
||||
if r == nil {
|
||||
if !slices.Contains(r.reviewers, reviewer) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, r := range r.Reviews {
|
||||
for _, r := range r.reviews {
|
||||
if r.User.UserName == reviewer && !r.Stale {
|
||||
switch r.State {
|
||||
case ReviewStateApproved, ReviewStateRequestChanges:
|
||||
case ReviewStateApproved:
|
||||
return true
|
||||
case ReviewStateRequestChanges:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (r *PRReviews) IsReviewedByOneOf(reviewers ...string) bool {
|
||||
for _, reviewer := range reviewers {
|
||||
if r.IsReviewedBy(reviewer) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -62,23 +62,11 @@ func TestReviews(t *testing.T) {
|
||||
{
|
||||
name: "Two reviewer, one stale and pending",
|
||||
reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "user1"}, Stale: true},
|
||||
&models.PullReview{State: common.ReviewStateRequestReview, User: &models.User{UserName: "user1"}, Stale: true},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: false,
|
||||
isPendingByTest1: true,
|
||||
isReviewedByTest1: false,
|
||||
},
|
||||
{
|
||||
name: "Two reviewer, one stale and pending, other done",
|
||||
reviews: []*models.PullReview{
|
||||
{State: common.ReviewStateRequestReview, User: &models.User{UserName: "user1"}},
|
||||
{State: common.ReviewStateRequestChanges, User: &models.User{UserName: "user1"}},
|
||||
{State: common.ReviewStateApproved, User: &models.User{UserName: "user2"}},
|
||||
},
|
||||
reviewers: []string{"user1", "user2"},
|
||||
isApproved: false,
|
||||
isPendingByTest1: true,
|
||||
isPendingByTest1: false,
|
||||
isReviewedByTest1: false,
|
||||
},
|
||||
{
|
||||
@@ -151,7 +139,7 @@ func TestReviews(t *testing.T) {
|
||||
rf.EXPECT().GetTimeline("test", "pr", int64(1)).Return(test.timeline, nil)
|
||||
rf.EXPECT().GetPullRequestReviews("test", "pr", int64(1)).Return(test.reviews, test.fetchErr)
|
||||
|
||||
reviews, err := common.FetchGiteaReviews(rf, "test", "pr", 1)
|
||||
reviews, err := common.FetchGiteaReviews(rf, test.reviewers, "test", "pr", 1)
|
||||
|
||||
if test.fetchErr != nil {
|
||||
if err != test.fetchErr {
|
||||
@@ -159,7 +147,6 @@ func TestReviews(t *testing.T) {
|
||||
}
|
||||
return
|
||||
}
|
||||
reviews.RequestedReviewers = test.reviewers
|
||||
|
||||
if r := reviews.IsApproved(); r != test.isApproved {
|
||||
t.Fatal("Unexpected IsReviewed():", r, "vs. expected", test.isApproved)
|
||||
|
||||
@@ -40,10 +40,24 @@ create_prjgit_sample() {
|
||||
git submodule -q add ../pkgB2 pkgB2
|
||||
git commit -q -m "pkgB2 added"
|
||||
|
||||
git checkout main
|
||||
git checkout -b base_rm_c main
|
||||
git clean -ffxd
|
||||
git submodule -q add -f ../pkgB1 pkgB1
|
||||
git commit -q -m "main adding pkgB1"
|
||||
git rm pkgC
|
||||
git commit -q -m 'pkgC removed'
|
||||
|
||||
git checkout -b base_modify_c main
|
||||
git submodule update --init pkgC
|
||||
pushd pkgC
|
||||
echo "mofieid" >> README.md
|
||||
git commit -q -m "modified" README.md
|
||||
popd
|
||||
git commit pkgC -m "modifiedC"
|
||||
git submodule deinit -f pkgC
|
||||
|
||||
# git checkout main
|
||||
# git clean -ffxd
|
||||
# git submodule -q add -f ../pkgB1 pkgB1
|
||||
# git commit -q -m "main adding pkgB1"
|
||||
|
||||
popd
|
||||
}
|
||||
|
||||
110
common/utils.go
110
common/utils.go
@@ -27,87 +27,10 @@ import (
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type NewRepos struct {
|
||||
Repos []struct {
|
||||
Organization, Repository, Branch string
|
||||
PackageName string
|
||||
}
|
||||
IsMaintainer bool
|
||||
}
|
||||
|
||||
const maintainership_line = "MAINTAINER"
|
||||
|
||||
var true_lines []string = []string{"1", "TRUE", "YES", "OK", "T"}
|
||||
|
||||
func HasSpace(s string) bool {
|
||||
return strings.IndexFunc(s, unicode.IsSpace) >= 0
|
||||
}
|
||||
|
||||
func FindNewReposInIssueBody(body string) *NewRepos {
|
||||
Issues := &NewRepos{}
|
||||
for _, line := range strings.Split(body, "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if ul := strings.ToUpper(line); strings.HasPrefix(ul, "MAINTAINER") {
|
||||
value := ""
|
||||
if idx := strings.IndexRune(ul, ':'); idx > 0 && len(ul) > idx+2 {
|
||||
value = ul[idx+1:]
|
||||
} else if idx := strings.IndexRune(ul, ' '); idx > 0 && len(ul) > idx+2 {
|
||||
value = ul[idx+1:]
|
||||
}
|
||||
|
||||
if slices.Contains(true_lines, strings.TrimSpace(value)) {
|
||||
Issues.IsMaintainer = true
|
||||
}
|
||||
}
|
||||
// line = strings.TrimSpace(line)
|
||||
issue := struct{ Organization, Repository, Branch, PackageName string }{}
|
||||
|
||||
branch := strings.Split(line, "#")
|
||||
repo := strings.Split(branch[0], "/")
|
||||
|
||||
if len(branch) == 2 {
|
||||
issue.Branch = strings.TrimSpace(branch[1])
|
||||
}
|
||||
if len(repo) == 2 {
|
||||
issue.Organization = strings.TrimSpace(repo[0])
|
||||
issue.Repository = strings.TrimSpace(repo[1])
|
||||
issue.PackageName = issue.Repository
|
||||
|
||||
if idx := strings.Index(strings.ToUpper(issue.Branch), " AS "); idx > 0 && len(issue.Branch) > idx+5 {
|
||||
issue.PackageName = strings.TrimSpace(issue.Branch[idx+3:])
|
||||
issue.Branch = strings.TrimSpace(issue.Branch[0:idx])
|
||||
}
|
||||
|
||||
if HasSpace(issue.Organization) || HasSpace(issue.Repository) || HasSpace(issue.PackageName) || HasSpace(issue.Branch) {
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
Issues.Repos = append(Issues.Repos, issue)
|
||||
//PackageNameIdx := strings.Index(strings.ToUpper(line), " AS ")
|
||||
//words := strings.Split(line)
|
||||
}
|
||||
|
||||
if len(Issues.Repos) == 0 {
|
||||
return nil
|
||||
}
|
||||
return Issues
|
||||
}
|
||||
|
||||
func IssueToString(issue *models.Issue) string {
|
||||
if issue == nil {
|
||||
return "(nil)"
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s/%s#%d", issue.Repository.Owner, issue.Repository.Name, issue.Index)
|
||||
}
|
||||
|
||||
func SplitLines(str string) []string {
|
||||
return SplitStringNoEmpty(str, "\n")
|
||||
}
|
||||
@@ -245,10 +168,9 @@ func FetchDevelProjects() (DevelProjects, error) {
|
||||
}
|
||||
|
||||
var DevelProjectNotFound = errors.New("Devel project not found")
|
||||
|
||||
func (d DevelProjects) GetDevelProject(pkg string) (string, error) {
|
||||
for _, item := range d {
|
||||
if item.Package == pkg {
|
||||
if item.Package == pkg {
|
||||
return item.Project, nil
|
||||
}
|
||||
}
|
||||
@@ -256,33 +178,3 @@ func (d DevelProjects) GetDevelProject(pkg string) (string, error) {
|
||||
return "", DevelProjectNotFound
|
||||
}
|
||||
|
||||
var removedBranchNameSuffixes []string = []string{
|
||||
"-rm",
|
||||
"-removed",
|
||||
"-deleted",
|
||||
}
|
||||
|
||||
func findRemovedBranchSuffix(branchName string) string {
|
||||
branchName = strings.ToLower(branchName)
|
||||
|
||||
for _, suffix := range removedBranchNameSuffixes {
|
||||
if len(suffix) < len(branchName) && strings.HasSuffix(branchName, suffix) {
|
||||
return suffix
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func IsRemovedBranch(branchName string) bool {
|
||||
return len(findRemovedBranchSuffix(branchName)) > 0
|
||||
}
|
||||
|
||||
func TrimRemovedBranchSuffix(branchName string) string {
|
||||
suffix := findRemovedBranchSuffix(branchName)
|
||||
if len(suffix) > 0 {
|
||||
return branchName[0 : len(branchName)-len(suffix)]
|
||||
}
|
||||
|
||||
return branchName
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package common_test
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
@@ -166,142 +165,3 @@ func TestRemoteName(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemovedBranchName(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
branchName string
|
||||
isRemoved bool
|
||||
regularName string
|
||||
}{
|
||||
{
|
||||
name: "Empty branch",
|
||||
},
|
||||
{
|
||||
name: "Removed suffix only",
|
||||
branchName: "-rm",
|
||||
isRemoved: false,
|
||||
regularName: "-rm",
|
||||
},
|
||||
{
|
||||
name: "Capital suffix",
|
||||
branchName: "Foo-Rm",
|
||||
isRemoved: true,
|
||||
regularName: "Foo",
|
||||
},
|
||||
{
|
||||
name: "Other suffixes",
|
||||
isRemoved: true,
|
||||
branchName: "Goo-Rm-DeleteD",
|
||||
regularName: "Goo-Rm",
|
||||
},
|
||||
{
|
||||
name: "Other suffixes",
|
||||
isRemoved: true,
|
||||
branchName: "main-REMOVED",
|
||||
regularName: "main",
|
||||
},
|
||||
{
|
||||
name: "Not removed separator",
|
||||
isRemoved: false,
|
||||
branchName: "main;REMOVED",
|
||||
regularName: "main;REMOVED",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
if r := common.IsRemovedBranch(test.branchName); r != test.isRemoved {
|
||||
t.Error("Expecting isRemoved:", test.isRemoved, "but received", r)
|
||||
}
|
||||
|
||||
if tn := common.TrimRemovedBranchSuffix(test.branchName); tn != test.regularName {
|
||||
t.Error("Expected stripped branch name to be:", test.regularName, "but have:", tn)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewPackageIssueParsing(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
issues *common.NewRepos
|
||||
}{
|
||||
{
|
||||
name: "Nothing",
|
||||
},
|
||||
{
|
||||
name: "Basic repo",
|
||||
input: "org/repo#branch",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
{Organization: "org", Repository: "repo", Branch: "branch", PackageName: "repo"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Default branch and junk lines and approval for maintainership",
|
||||
input: "\n\nsome comments\n\norg1/repo2\n\nmaintainership: yes",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
{Organization: "org1", Repository: "repo2", Branch: "", PackageName: "repo2"},
|
||||
},
|
||||
IsMaintainer: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Default branch and junk lines and no maintainership",
|
||||
input: "\n\nsome comments\n\norg1/repo2\n\nmaintainership: NEVER",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
{Organization: "org1", Repository: "repo2", Branch: "", PackageName: "repo2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "3 repos with comments and maintainership",
|
||||
input: "\n\nsome comments for org1/repo2 are here and more\n\norg1/repo2#master\n org2/repo3#master\n some/repo3#m\nMaintainer ok",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
{Organization: "org1", Repository: "repo2", Branch: "master", PackageName: "repo2"},
|
||||
{Organization: "org2", Repository: "repo3", Branch: "master", PackageName: "repo3"},
|
||||
{Organization: "some", Repository: "repo3", Branch: "m", PackageName: "repo3"},
|
||||
},
|
||||
IsMaintainer: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Invalid repos with spaces",
|
||||
input: "or g/repo#branch\norg/r epo#branch\norg/repo#br anch\norg/repo#branch As foo ++",
|
||||
},
|
||||
{
|
||||
name: "Valid repos with spaces",
|
||||
input: " org / repo # branch",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
{Organization: "org", Repository: "repo", Branch: "branch", PackageName: "repo"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Package name is not repo name",
|
||||
input: " org / repo # branch as repo++ \nmaintainer true",
|
||||
issues: &common.NewRepos{
|
||||
Repos: []struct{ Organization, Repository, Branch, PackageName string }{
|
||||
{Organization: "org", Repository: "repo", Branch: "branch", PackageName: "repo++"},
|
||||
},
|
||||
IsMaintainer: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
issue := common.FindNewReposInIssueBody(test.input)
|
||||
if !reflect.DeepEqual(test.issues, issue) {
|
||||
t.Error("Expected", test.issues, "but have", issue)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,30 +58,6 @@ sub ListPackages {
|
||||
return @packages;
|
||||
}
|
||||
|
||||
sub FactoryMd5 {
|
||||
my ($package) = @_;
|
||||
my $out = "";
|
||||
|
||||
if (system("osc ls openSUSE:Factory $package | grep -q build.specials.obscpio") == 0) {
|
||||
system("mkdir _extract") == 0 || die "_extract exists or can't make it. Aborting.";
|
||||
chdir("_extract") || die;
|
||||
system("osc cat openSUSE:Factory $package build.specials.obscpio | cpio -dium 2> /dev/null") == 0 || die;
|
||||
system("rm .* 2> /dev/null");
|
||||
open( my $fh, "find -type f -exec /usr/bin/basename {} \\; | xargs md5sum | awk '{print \$1 FS \$2}' | grep -v d41d8cd98f00b204e9800998ecf8427e |") or die;
|
||||
while ( my $l = <$fh>) {
|
||||
$out = $out.$l;
|
||||
}
|
||||
close($fh);
|
||||
chdir("..") && system("rm -rf _extract") == 0 || die;
|
||||
}
|
||||
open( my $fh, "osc ls -v openSUSE:Factory $package | awk '{print \$1 FS \$7}' | grep -v -F '_scmsync.obsinfo\nbuild.specials.obscpio' |") or die;
|
||||
while (my $l = <$fh>) {
|
||||
$out = $out.$l;
|
||||
}
|
||||
close($fh);
|
||||
return $out;
|
||||
}
|
||||
|
||||
# Read project from first argument
|
||||
sub Usage {
|
||||
die "Usage: $0 <OBS Project> [org [package]]";
|
||||
@@ -89,7 +65,6 @@ sub Usage {
|
||||
|
||||
my $project = shift or Usage();
|
||||
my $org = shift;
|
||||
|
||||
if (not defined($org)) {
|
||||
$org = `osc meta prj $project | grep scmsync | sed -e 's,^.*src.opensuse.org/\\(.*\\)/_ObsPrj.*,\\1,'`;
|
||||
chomp($org);
|
||||
@@ -164,7 +139,7 @@ if ( scalar @tomove > 0 ) {
|
||||
system("git -C $pkg push origin factory") == 0 and
|
||||
system("git obs $super_user api -X PATCH --data '{\"default_branch\": \"factory\"}' /repos/pool/$pkg") == 0
|
||||
or die "Error in creating a pool repo";
|
||||
system("for i in \$(git -C $pkg for-each-ref --format='%(refname:lstrip=3)' refs/remotes/origin/ | grep -v '\\(^HEAD\$\\|^factory\$\\)'); do git -C $pkg push origin :\$i; done") == 0 or die "failed to cull branches";
|
||||
system("for i in \$(git for-each-ref --format='%(refname:lstrip=3)' refs/remotes/origin/ | grep -v '\\(^HEAD\$\\|^factory\$\)'); do git -C $pkg push origin :\$i; done") == 0 or die "failed to cull branches";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,7 +167,6 @@ for my $package ( sort(@packages) ) {
|
||||
or ( push( @tomove, $package ) and die "Can't fetch pool for $package" );
|
||||
|
||||
my @commits = FindFactoryCommit($package);
|
||||
my $Md5Hashes = FactoryMd5($package);
|
||||
my $c;
|
||||
my $match = 0;
|
||||
for my $commit (@commits) {
|
||||
@@ -205,27 +179,16 @@ for my $package ( sort(@packages) ) {
|
||||
system("git -C $package lfs fetch pool $commit") == 0
|
||||
and system("git -C $package checkout -B factory $commit") == 0
|
||||
and system("git -C $package lfs checkout") == 0
|
||||
and chdir($package)) {
|
||||
|
||||
open(my $fh, "|-", "md5sum -c --quiet") or die $!;
|
||||
print $fh $Md5Hashes;
|
||||
close $fh;
|
||||
if ($? >> 8 != 0) {
|
||||
chdir("..") || die;
|
||||
next;
|
||||
}
|
||||
open($fh, "|-", "awk '{print \$2}' | sort | bash -c \"diff <(ls -1 | sort) -\"") or die $!;
|
||||
print $fh $Md5Hashes;
|
||||
close $fh;
|
||||
my $ec = $? >> 8;
|
||||
chdir("..") || die;
|
||||
|
||||
if ($ec == 0) {
|
||||
$c = $commit;
|
||||
$match = 1;
|
||||
last;
|
||||
}
|
||||
and system(
|
||||
"cd $package; osc ls -v openSUSE:Factory $package | awk '{print \$1 FS \$7}' | grep -v -F '_scmsync.obsinfo\nbuild.specials.obscpio' | md5sum -c --quiet"
|
||||
) == 0
|
||||
and system("bash -c \"diff <(ls -1 $package | sort) <(osc ls openSUSE:Factory $package | grep -v -F '_scmsync.obsinfo\nbuild.specials.obscpio' | sort)\"") == 0
|
||||
)
|
||||
{
|
||||
|
||||
$c = $commit;
|
||||
$match = 1;
|
||||
last;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,24 +1,16 @@
|
||||
Java:packages
|
||||
Kernel:firmware
|
||||
Kernel:kdump
|
||||
devel:gcc
|
||||
devel:languages:clojure
|
||||
devel:languages:erlang
|
||||
devel:languages:erlang:Factory
|
||||
devel:languages:hare
|
||||
devel:languages:javascript
|
||||
devel:languages:lua
|
||||
devel:languages:nodejs
|
||||
devel:languages:perl
|
||||
devel:languages:python:Factory
|
||||
devel:languages:python:mailman
|
||||
devel:languages:python:pytest
|
||||
devel:openSUSE:Factory
|
||||
network:chromium
|
||||
network:dhcp
|
||||
network:im:whatsapp
|
||||
network:messaging:xmpp
|
||||
science:HPC
|
||||
server:dns
|
||||
systemsmanagement:cockpit
|
||||
X11:lxde
|
||||
|
||||
@@ -14,11 +14,15 @@ import (
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
type Status struct {
|
||||
Context string `json:"context"`
|
||||
State string `json:"state"`
|
||||
TargetUrl string `json:"target_url"`
|
||||
}
|
||||
|
||||
type StatusInput struct {
|
||||
Description string `json:"description"`
|
||||
Context string `json:"context"`
|
||||
State string `json:"state"`
|
||||
TargetUrl string `json:"target_url"`
|
||||
State string `json:"state"`
|
||||
TargetUrl string `json:"target_url"`
|
||||
}
|
||||
|
||||
func main() {
|
||||
@@ -55,26 +59,23 @@ func StatusProxy(w http.ResponseWriter, r *http.Request) {
|
||||
config, ok := r.Context().Value(configKey).(*Config)
|
||||
|
||||
if !ok {
|
||||
common.LogDebug("Config missing from context")
|
||||
common.LogError("Config missing from context")
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
header := r.Header.Get("Authorization")
|
||||
if header == "" {
|
||||
common.LogDebug("Authorization header not found")
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
token_arr := strings.Split(header, " ")
|
||||
if len(token_arr) != 2 {
|
||||
common.LogDebug("Authorization header malformed")
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
if !strings.EqualFold(token_arr[0], "token") {
|
||||
common.LogDebug("Token not found in Authorization header")
|
||||
if !strings.EqualFold(token_arr[0], "Bearer") {
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
@@ -82,7 +83,6 @@ func StatusProxy(w http.ResponseWriter, r *http.Request) {
|
||||
token := token_arr[1]
|
||||
|
||||
if !slices.Contains(config.Keys, token) {
|
||||
common.LogDebug("Provided token is not known")
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
@@ -104,8 +104,13 @@ func StatusProxy(w http.ResponseWriter, r *http.Request) {
|
||||
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
status := Status{
|
||||
Context: "Build in obs",
|
||||
State: statusinput.State,
|
||||
TargetUrl: statusinput.TargetUrl,
|
||||
}
|
||||
|
||||
status_payload, err := json.Marshal(statusinput)
|
||||
status_payload, err := json.Marshal(status)
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
|
||||
@@ -126,8 +131,8 @@ func StatusProxy(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authorization", fmt.Sprintf("token %s", ForgeToken))
|
||||
req.Header.Add("Content-Type", "Content-Type")
|
||||
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", ForgeToken))
|
||||
|
||||
resp, err := client.Do(req)
|
||||
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
# gitea_status_proxy
|
||||
|
||||
Allows bots without code owner permission to set Gitea's commit status
|
||||
|
||||
## Basic usage
|
||||
|
||||
To beging, you need the json config and a Gitea token with permissions to the repository you want to write to.
|
||||
|
||||
Keys should be randomly generated, i.e by using openssl: `openssl rand -base64 48`
|
||||
|
||||
Generate a json config file, with the key generated from running the command above, save as example.json:
|
||||
|
||||
```
|
||||
{
|
||||
"forge_url": "https://src.opensuse.org/api/v1",
|
||||
"keys": ["$YOUR_TOKEN_GOES_HERE"]
|
||||
}
|
||||
```
|
||||
|
||||
### start the proxy:
|
||||
|
||||
```
|
||||
GITEA_TOKEN=YOURTOKEN ./gitea_status_proxy -config example.json
|
||||
2025/10/30 12:53:18 [I] server up and listening on :3000
|
||||
```
|
||||
|
||||
Now the proxy should be able to accept requests under: `localhost:3000/repos/{owner}/{repo}/statuses/{sha}`, the token to be used when authenticating to the proxy must be in the `keys` list of the configuration json file (example.json above)
|
||||
|
||||
### example:
|
||||
|
||||
On a separate terminal, you can use curl to post a status to the proxy, if the GITEA_TOKEN has permissions on the target
|
||||
repository, it will result in a new status being set for the given commit
|
||||
|
||||
```
|
||||
curl -X 'POST' \
|
||||
'localhost:3000/repos/szarate/test-actions-gitea/statuses/cd5847c92fb65a628bdd6015f96ee7e569e1ad6e4fc487acc149b52e788262f9' \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Authorization: token $YOUR_TOKEN_GOES_HERE' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{
|
||||
"context": "Proxy test",
|
||||
"description": "Status posted from the proxy",
|
||||
"state": "success",
|
||||
"target_url": "https://src.opensuse.org"
|
||||
}'
|
||||
```
|
||||
|
||||
After this you should be able to the results in the pull request, e.g from above: https://src.opensuse.org/szarate/test-actions-gitea/pulls/1
|
||||
@@ -1,65 +1,41 @@
|
||||
Group Review Bot
|
||||
================
|
||||
|
||||
This workaround is mainly needed because Gitea does not track which team member performed a review on behalf of a team.
|
||||
Areas of responsibility
|
||||
-----------------------
|
||||
|
||||
Main Tasks
|
||||
----------
|
||||
1. Is used to handle reviews associated with groups defined in the
|
||||
ProjectGit.
|
||||
|
||||
Awaits a comment in the format “@groupreviewbot-name: approve”, then approves the PR with the comment “<user> approved a review on behalf of <groupreviewbot-name>.”
|
||||
2. Assumes: workflow-pr needs to associate and define the PR set from
|
||||
which the groups.json is read (Base of the PrjGit PR)
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
Projects where policy reviews are required.
|
||||
|
||||
Configuration
|
||||
Configiuration
|
||||
--------------
|
||||
|
||||
The bot is configured via the `ReviewGroups` field in the `workflow.config` file, located in the ProjectGit repository.
|
||||
Groups are defined in the workflow.config inside the project git. They take following options,
|
||||
|
||||
See `ReviewGroups` in the [workflow-pr configuration](../workflow-pr/README.md#config-file).
|
||||
|
||||
```json
|
||||
{
|
||||
...
|
||||
"ReviewGroups": [
|
||||
{
|
||||
"Name": "name of the group user",
|
||||
"Reviewers": ["members", "of", "group"],
|
||||
"Silent": "(true, false) -- if true, do not explicitly require review requests of group members"
|
||||
}
|
||||
],
|
||||
...
|
||||
...
|
||||
ReviewGroups: [
|
||||
{
|
||||
"Name": "name of the group user",
|
||||
"Reviewers": ["members", "of", "group"],
|
||||
"Silent": (true, false) -- if true, do not explicitly require review requests of group members
|
||||
},
|
||||
],
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
Server configuration
|
||||
--------------------------
|
||||
|
||||
**Configuration file:**
|
||||
|
||||
| Field | Type | Notes |
|
||||
| ----- | ----- | ----- |
|
||||
| root | Array of string | Format **org/repo\#branch** |
|
||||
|
||||
Requirements
|
||||
------------
|
||||
Gitea token with following permissions:
|
||||
- R/W PullRequest
|
||||
- R/W Notification
|
||||
- R User
|
||||
|
||||
Env Variables
|
||||
-------------
|
||||
The following variables can be used (and override) command line parameters.
|
||||
|
||||
* `AUTOGITS_CONFIG` - config file location
|
||||
* `AUTOGITS_URL` - Gitea URL
|
||||
* `AUTOGITS_RABBITURL` - RabbitMQ url
|
||||
* `AUTOGITS_DEBUG` - when set, debug level logging enabled
|
||||
|
||||
Authentication env variables
|
||||
* `GITEA_TOKEN` - Gitea user token
|
||||
* `AMQP_USERNAME`, `AMQP_PASSWORD` - username and password for rabbitmq
|
||||
* Gitea token to:
|
||||
+ R/W PullRequest
|
||||
+ R/W Notification
|
||||
+ R User
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"fmt"
|
||||
"log"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"runtime/debug"
|
||||
"slices"
|
||||
@@ -18,23 +17,20 @@ import (
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
)
|
||||
|
||||
type ReviewBot struct {
|
||||
configs common.AutogitConfigs
|
||||
acceptRx *regexp.Regexp
|
||||
rejectRx *regexp.Regexp
|
||||
groupName string
|
||||
gitea common.Gitea
|
||||
var configs common.AutogitConfigs
|
||||
var acceptRx *regexp.Regexp
|
||||
var rejectRx *regexp.Regexp
|
||||
var groupName string
|
||||
|
||||
func InitRegex(newGroupName string) {
|
||||
groupName = newGroupName
|
||||
acceptRx = regexp.MustCompile("^:\\s*(LGTM|approved?)")
|
||||
rejectRx = regexp.MustCompile("^:\\s*")
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) InitRegex(newGroupName string) {
|
||||
bot.groupName = newGroupName
|
||||
bot.acceptRx = regexp.MustCompile("^:\\s*(LGTM|approved?)")
|
||||
bot.rejectRx = regexp.MustCompile("^:\\s*")
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) ParseReviewLine(reviewText string) (bool, string) {
|
||||
func ParseReviewLine(reviewText string) (bool, string) {
|
||||
line := strings.TrimSpace(reviewText)
|
||||
groupTextName := "@" + bot.groupName
|
||||
groupTextName := "@" + groupName
|
||||
glen := len(groupTextName)
|
||||
if len(line) < glen || line[0:glen] != groupTextName {
|
||||
return false, line
|
||||
@@ -54,20 +50,20 @@ func (bot *ReviewBot) ParseReviewLine(reviewText string) (bool, string) {
|
||||
return false, line
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) ReviewAccepted(reviewText string) bool {
|
||||
func ReviewAccepted(reviewText string) bool {
|
||||
for _, line := range common.SplitStringNoEmpty(reviewText, "\n") {
|
||||
if matched, reviewLine := bot.ParseReviewLine(line); matched {
|
||||
return bot.acceptRx.MatchString(reviewLine)
|
||||
if matched, reviewLine := ParseReviewLine(line); matched {
|
||||
return acceptRx.MatchString(reviewLine)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) ReviewRejected(reviewText string) bool {
|
||||
func ReviewRejected(reviewText string) bool {
|
||||
for _, line := range common.SplitStringNoEmpty(reviewText, "\n") {
|
||||
if matched, reviewLine := bot.ParseReviewLine(line); matched {
|
||||
if bot.rejectRx.MatchString(reviewLine) {
|
||||
return !bot.acceptRx.MatchString(reviewLine)
|
||||
if matched, reviewLine := ParseReviewLine(line); matched {
|
||||
if rejectRx.MatchString(reviewLine) {
|
||||
return !acceptRx.MatchString(reviewLine)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -117,10 +113,10 @@ var commentStrings = []string{
|
||||
"change_time_estimate",
|
||||
}*/
|
||||
|
||||
func (bot *ReviewBot) FindAcceptableReviewInTimeline(user string, timeline []*models.TimelineComment, reviews []*models.PullReview) *models.TimelineComment {
|
||||
func FindAcceptableReviewInTimeline(user string, timeline []*models.TimelineComment, reviews []*models.PullReview) *models.TimelineComment {
|
||||
for _, t := range timeline {
|
||||
if t.Type == common.TimelineCommentType_Comment && t.User.UserName == user && t.Created == t.Updated {
|
||||
if bot.ReviewAccepted(t.Body) || bot.ReviewRejected(t.Body) {
|
||||
if ReviewAccepted(t.Body) || ReviewRejected(t.Body) {
|
||||
return t
|
||||
}
|
||||
}
|
||||
@@ -129,9 +125,9 @@ func (bot *ReviewBot) FindAcceptableReviewInTimeline(user string, timeline []*mo
|
||||
return nil
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) FindOurLastReviewInTimeline(timeline []*models.TimelineComment) *models.TimelineComment {
|
||||
func FindOurLastReviewInTimeline(timeline []*models.TimelineComment) *models.TimelineComment {
|
||||
for _, t := range timeline {
|
||||
if t.Type == common.TimelineCommentType_Review && t.User.UserName == bot.groupName && t.Created == t.Updated {
|
||||
if t.Type == common.TimelineCommentType_Review && t.User.UserName == groupName && t.Created == t.Updated {
|
||||
return t
|
||||
}
|
||||
}
|
||||
@@ -139,13 +135,13 @@ func (bot *ReviewBot) FindOurLastReviewInTimeline(timeline []*models.TimelineCom
|
||||
return nil
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) UnrequestReviews(org, repo string, id int64, users []string) {
|
||||
if err := bot.gitea.UnrequestReview(org, repo, id, users...); err != nil {
|
||||
func UnrequestReviews(gitea common.Gitea, org, repo string, id int64, users []string) {
|
||||
if err := gitea.UnrequestReview(org, repo, id, users...); err != nil {
|
||||
common.LogError("Can't remove reviewrs after a review:", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) ProcessNotifications(notification *models.NotificationThread) {
|
||||
func ProcessNotifications(notification *models.NotificationThread, gitea common.Gitea) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
common.LogInfo("panic cought --- recovered")
|
||||
@@ -153,7 +149,7 @@ func (bot *ReviewBot) ProcessNotifications(notification *models.NotificationThre
|
||||
}
|
||||
}()
|
||||
|
||||
rx := regexp.MustCompile(`^/?api/v\d+/repos/(?<org>[_\.a-zA-Z0-9-]+)/(?<project>[_\.a-zA-Z0-9-]+)/(?:issues|pulls)/(?<num>[0-9]+)$`)
|
||||
rx := regexp.MustCompile(`^/?api/v\d+/repos/(?<org>[_a-zA-Z0-9-]+)/(?<project>[_a-zA-Z0-9-]+)/(?:issues|pulls)/(?<num>[0-9]+)$`)
|
||||
subject := notification.Subject
|
||||
u, err := url.Parse(notification.Subject.URL)
|
||||
if err != nil {
|
||||
@@ -172,14 +168,14 @@ func (bot *ReviewBot) ProcessNotifications(notification *models.NotificationThre
|
||||
id, _ := strconv.ParseInt(match[3], 10, 64)
|
||||
|
||||
common.LogInfo("processing:", fmt.Sprintf("%s/%s!%d", org, repo, id))
|
||||
pr, err := bot.gitea.GetPullRequest(org, repo, id)
|
||||
pr, err := gitea.GetPullRequest(org, repo, id)
|
||||
if err != nil {
|
||||
common.LogError(" ** Cannot fetch PR associated with review:", subject.URL, "Error:", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := bot.ProcessPR(pr); err == nil && !common.IsDryRun {
|
||||
if err := bot.gitea.SetNotificationRead(notification.ID); err != nil {
|
||||
if err := ProcessPR(pr); err == nil && !common.IsDryRun {
|
||||
if err := gitea.SetNotificationRead(notification.ID); err != nil {
|
||||
common.LogDebug(" Cannot set notification as read", err)
|
||||
}
|
||||
} else if err != nil && err != ReviewNotFinished {
|
||||
@@ -189,24 +185,24 @@ func (bot *ReviewBot) ProcessNotifications(notification *models.NotificationThre
|
||||
|
||||
var ReviewNotFinished = fmt.Errorf("Review is not finished")
|
||||
|
||||
func (bot *ReviewBot) ProcessPR(pr *models.PullRequest) error {
|
||||
func ProcessPR(pr *models.PullRequest) error {
|
||||
org := pr.Base.Repo.Owner.UserName
|
||||
repo := pr.Base.Repo.Name
|
||||
id := pr.Index
|
||||
|
||||
found := false
|
||||
for _, reviewer := range pr.RequestedReviewers {
|
||||
if reviewer != nil && reviewer.UserName == bot.groupName {
|
||||
if reviewer != nil && reviewer.UserName == groupName {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
common.LogInfo(" review is not requested for", bot.groupName)
|
||||
common.LogInfo(" review is not requested for", groupName)
|
||||
return nil
|
||||
}
|
||||
|
||||
config := bot.configs.GetPrjGitConfig(org, repo, pr.Base.Name)
|
||||
config := configs.GetPrjGitConfig(org, repo, pr.Base.Name)
|
||||
if config == nil {
|
||||
return fmt.Errorf("Cannot find config for: %s", pr.URL)
|
||||
}
|
||||
@@ -216,17 +212,17 @@ func (bot *ReviewBot) ProcessPR(pr *models.PullRequest) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
reviews, err := bot.gitea.GetPullRequestReviews(org, repo, id)
|
||||
reviews, err := gitea.GetPullRequestReviews(org, repo, id)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch reviews for: %v: %w", pr.URL, err)
|
||||
}
|
||||
|
||||
timeline, err := common.FetchTimelineSinceReviewRequestOrPush(bot.gitea, bot.groupName, pr.Head.Sha, org, repo, id)
|
||||
timeline, err := common.FetchTimelineSinceReviewRequestOrPush(gitea, groupName, pr.Head.Sha, org, repo, id)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch timeline to review. %w", err)
|
||||
}
|
||||
|
||||
groupConfig, err := config.GetReviewGroup(bot.groupName)
|
||||
groupConfig, err := config.GetReviewGroup(groupName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch review group. %w", err)
|
||||
}
|
||||
@@ -237,30 +233,30 @@ func (bot *ReviewBot) ProcessPR(pr *models.PullRequest) error {
|
||||
// pr.Head.Sha
|
||||
|
||||
for _, reviewer := range requestReviewers {
|
||||
if review := bot.FindAcceptableReviewInTimeline(reviewer, timeline, reviews); review != nil {
|
||||
if bot.ReviewAccepted(review.Body) {
|
||||
if review := FindAcceptableReviewInTimeline(reviewer, timeline, reviews); review != nil {
|
||||
if ReviewAccepted(review.Body) {
|
||||
if !common.IsDryRun {
|
||||
text := reviewer + " approved a review on behalf of " + bot.groupName
|
||||
if review := bot.FindOurLastReviewInTimeline(timeline); review == nil || review.Body != text {
|
||||
_, err := bot.gitea.AddReviewComment(pr, common.ReviewStateApproved, text)
|
||||
text := reviewer + " approved a review on behalf of " + groupName
|
||||
if review := FindOurLastReviewInTimeline(timeline); review == nil || review.Body != text {
|
||||
_, err := gitea.AddReviewComment(pr, common.ReviewStateApproved, text)
|
||||
if err != nil {
|
||||
common.LogError(" -> failed to write approval comment", err)
|
||||
}
|
||||
bot.UnrequestReviews(org, repo, id, requestReviewers)
|
||||
UnrequestReviews(gitea, org, repo, id, requestReviewers)
|
||||
}
|
||||
}
|
||||
common.LogInfo(" -> approved by", reviewer)
|
||||
common.LogInfo(" review at", review.Created)
|
||||
return nil
|
||||
} else if bot.ReviewRejected(review.Body) {
|
||||
} else if ReviewRejected(review.Body) {
|
||||
if !common.IsDryRun {
|
||||
text := reviewer + " requested changes on behalf of " + bot.groupName + ". See " + review.HTMLURL
|
||||
if review := bot.FindOurLastReviewInTimeline(timeline); review == nil || review.Body != text {
|
||||
_, err := bot.gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, text)
|
||||
text := reviewer + " requested changes on behalf of " + groupName + ". See " + review.HTMLURL
|
||||
if review := FindOurLastReviewInTimeline(timeline); review == nil || review.Body != text {
|
||||
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Changes requested. See review by: "+reviewer)
|
||||
if err != nil {
|
||||
common.LogError(" -> failed to write rejecting comment", err)
|
||||
}
|
||||
bot.UnrequestReviews(org, repo, id, requestReviewers)
|
||||
UnrequestReviews(gitea, org, repo, id, requestReviewers)
|
||||
}
|
||||
}
|
||||
common.LogInfo(" -> declined by", reviewer)
|
||||
@@ -274,7 +270,7 @@ func (bot *ReviewBot) ProcessPR(pr *models.PullRequest) error {
|
||||
if !groupConfig.Silent && len(requestReviewers) > 0 {
|
||||
common.LogDebug(" Requesting reviews for:", requestReviewers)
|
||||
if !common.IsDryRun {
|
||||
if _, err := bot.gitea.RequestReviews(pr, requestReviewers...); err != nil {
|
||||
if _, err := gitea.RequestReviews(pr, requestReviewers...); err != nil {
|
||||
common.LogDebug(" -> err:", err)
|
||||
}
|
||||
} else {
|
||||
@@ -287,40 +283,42 @@ func (bot *ReviewBot) ProcessPR(pr *models.PullRequest) error {
|
||||
// add a helpful comment, if not yet added
|
||||
found_help_comment := false
|
||||
for _, t := range timeline {
|
||||
if t.Type == common.TimelineCommentType_Comment && t.User != nil && t.User.UserName == bot.groupName {
|
||||
if t.Type == common.TimelineCommentType_Comment && t.User != nil && t.User.UserName == groupName {
|
||||
found_help_comment = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found_help_comment && !common.IsDryRun {
|
||||
helpComment := fmt.Sprintln("Review by", bot.groupName, "represents a group of reviewers:", strings.Join(requestReviewers, ", "), ".\n\n"+
|
||||
helpComment := fmt.Sprintln("Review by", groupName, "represents a group of reviewers:", strings.Join(requestReviewers, ", "), ".\n\n"+
|
||||
"Do **not** use standard review interface to review on behalf of the group.\n"+
|
||||
"To accept the review on behalf of the group, create the following comment: `@"+bot.groupName+": approve`.\n"+
|
||||
"To request changes on behalf of the group, create the following comment: `@"+bot.groupName+": decline` followed with lines justifying the decision.\n"+
|
||||
"To accept the review on behalf of the group, create the following comment: `@"+groupName+": approve`.\n"+
|
||||
"To request changes on behalf of the group, create the following comment: `@"+groupName+": decline` followed with lines justifying the decision.\n"+
|
||||
"Future edits of the comments are ignored, a new comment is required to change the review state.")
|
||||
if slices.Contains(groupConfig.Reviewers, pr.User.UserName) {
|
||||
helpComment = helpComment + "\n\n" +
|
||||
"Submitter is member of this review group, hence they are excluded from being one of the reviewers here"
|
||||
}
|
||||
bot.gitea.AddComment(pr, helpComment)
|
||||
gitea.AddComment(pr, helpComment)
|
||||
}
|
||||
|
||||
return ReviewNotFinished
|
||||
}
|
||||
|
||||
func (bot *ReviewBot) PeriodReviewCheck() {
|
||||
notifications, err := bot.gitea.GetNotifications(common.GiteaNotificationType_Pull, nil)
|
||||
func PeriodReviewCheck() {
|
||||
notifications, err := gitea.GetNotifications(common.GiteaNotificationType_Pull, nil)
|
||||
if err != nil {
|
||||
common.LogError(" Error fetching unread notifications: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, notification := range notifications {
|
||||
bot.ProcessNotifications(notification)
|
||||
ProcessNotifications(notification, gitea)
|
||||
}
|
||||
}
|
||||
|
||||
var gitea common.Gitea
|
||||
|
||||
func main() {
|
||||
giteaUrl := flag.String("gitea-url", "https://src.opensuse.org", "Gitea instance used for reviews")
|
||||
rabbitMqHost := flag.String("rabbit-url", "amqps://rabbit.opensuse.org", "RabbitMQ instance where Gitea webhook notifications are sent")
|
||||
@@ -330,24 +328,6 @@ func main() {
|
||||
flag.BoolVar(&common.IsDryRun, "dry", false, "Dry run, no effect. For debugging")
|
||||
flag.Parse()
|
||||
|
||||
if err := common.SetLoggingLevelFromString(*logging); err != nil {
|
||||
common.LogError(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
if cf := os.Getenv("AUTOGITS_CONFIG"); len(cf) > 0 {
|
||||
*configFile = cf
|
||||
}
|
||||
if url := os.Getenv("AUTOGITS_URL"); len(url) > 0 {
|
||||
*giteaUrl = url
|
||||
}
|
||||
if url := os.Getenv("AUTOGITS_RABBITURL"); len(url) > 0 {
|
||||
*rabbitMqHost = url
|
||||
}
|
||||
if debug := os.Getenv("AUTOGITS_DEBUG"); len(debug) > 0 {
|
||||
common.SetLoggingLevel(common.LogLevelDebug)
|
||||
}
|
||||
|
||||
args := flag.Args()
|
||||
if len(args) != 1 {
|
||||
log.Println(" syntax:")
|
||||
@@ -356,7 +336,7 @@ func main() {
|
||||
flag.Usage()
|
||||
return
|
||||
}
|
||||
targetGroupName := args[0]
|
||||
groupName = args[0]
|
||||
|
||||
if *configFile == "" {
|
||||
common.LogError("Missing config file")
|
||||
@@ -379,35 +359,36 @@ func main() {
|
||||
return
|
||||
}
|
||||
|
||||
giteaTransport := common.AllocateGiteaTransport(*giteaUrl)
|
||||
configs, err := common.ResolveWorkflowConfigs(giteaTransport, configData)
|
||||
gitea = common.AllocateGiteaTransport(*giteaUrl)
|
||||
configs, err = common.ResolveWorkflowConfigs(gitea, configData)
|
||||
if err != nil {
|
||||
common.LogError("Cannot parse workflow configs:", err)
|
||||
return
|
||||
}
|
||||
|
||||
reviewer, err := giteaTransport.GetCurrentUser()
|
||||
reviewer, err := gitea.GetCurrentUser()
|
||||
if err != nil {
|
||||
common.LogError("Cannot fetch review user:", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := common.SetLoggingLevelFromString(*logging); err != nil {
|
||||
common.LogError(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
if *interval < 1 {
|
||||
*interval = 1
|
||||
}
|
||||
|
||||
bot := &ReviewBot{
|
||||
gitea: giteaTransport,
|
||||
configs: configs,
|
||||
}
|
||||
bot.InitRegex(targetGroupName)
|
||||
InitRegex(groupName)
|
||||
|
||||
common.LogInfo(" ** processing group reviews for group:", bot.groupName)
|
||||
common.LogInfo(" ** processing group reviews for group:", groupName)
|
||||
common.LogInfo(" ** username in Gitea:", reviewer.UserName)
|
||||
common.LogInfo(" ** polling interval:", *interval, "min")
|
||||
common.LogInfo(" ** connecting to RabbitMQ:", *rabbitMqHost)
|
||||
|
||||
if bot.groupName != reviewer.UserName {
|
||||
if groupName != reviewer.UserName {
|
||||
common.LogError(" ***** Reviewer does not match group name. Aborting. *****")
|
||||
return
|
||||
}
|
||||
@@ -419,13 +400,10 @@ func main() {
|
||||
}
|
||||
|
||||
config_update := ConfigUpdatePush{
|
||||
bot: bot,
|
||||
config_modified: make(chan *common.AutogitConfig),
|
||||
}
|
||||
|
||||
process_issue_pr := IssueCommentProcessor{
|
||||
bot: bot,
|
||||
}
|
||||
process_issue_pr := IssueCommentProcessor{}
|
||||
|
||||
configUpdates := &common.RabbitMQGiteaEventsProcessor{
|
||||
Orgs: []string{},
|
||||
@@ -435,7 +413,7 @@ func main() {
|
||||
},
|
||||
}
|
||||
configUpdates.Connection().RabbitURL = u
|
||||
for _, c := range bot.configs {
|
||||
for _, c := range configs {
|
||||
if org, _, _ := c.GetPrjGit(); !slices.Contains(configUpdates.Orgs, org) {
|
||||
configUpdates.Orgs = append(configUpdates.Orgs, org)
|
||||
}
|
||||
@@ -448,17 +426,17 @@ func main() {
|
||||
select {
|
||||
case configTouched, ok := <-config_update.config_modified:
|
||||
if ok {
|
||||
for idx, c := range bot.configs {
|
||||
for idx, c := range configs {
|
||||
if c == configTouched {
|
||||
org, repo, branch := c.GetPrjGit()
|
||||
prj := fmt.Sprintf("%s/%s#%s", org, repo, branch)
|
||||
common.LogInfo("Detected config update for", prj)
|
||||
|
||||
new_config, err := common.ReadWorkflowConfig(bot.gitea, prj)
|
||||
new_config, err := common.ReadWorkflowConfig(gitea, prj)
|
||||
if err != nil {
|
||||
common.LogError("Failed parsing Project config for", prj, err)
|
||||
} else {
|
||||
bot.configs[idx] = new_config
|
||||
configs[idx] = new_config
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -468,7 +446,7 @@ func main() {
|
||||
}
|
||||
}
|
||||
|
||||
bot.PeriodReviewCheck()
|
||||
PeriodReviewCheck()
|
||||
time.Sleep(time.Duration(*interval * int64(time.Minute)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,359 +1,6 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestProcessPR(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
mockGitea := mock_common.NewMockGitea(ctrl)
|
||||
groupName := "testgroup"
|
||||
|
||||
bot := &ReviewBot{
|
||||
gitea: mockGitea,
|
||||
groupName: groupName,
|
||||
}
|
||||
bot.InitRegex(groupName)
|
||||
|
||||
org := "myorg"
|
||||
repo := "myrepo"
|
||||
prIndex := int64(1)
|
||||
headSha := "abcdef123456"
|
||||
|
||||
pr := &models.PullRequest{
|
||||
Index: prIndex,
|
||||
URL: "http://gitea/pr/1",
|
||||
State: "open",
|
||||
Base: &models.PRBranchInfo{
|
||||
Name: "main",
|
||||
Repo: &models.Repository{
|
||||
Name: repo,
|
||||
Owner: &models.User{
|
||||
UserName: org,
|
||||
},
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: headSha,
|
||||
},
|
||||
User: &models.User{
|
||||
UserName: "submitter",
|
||||
},
|
||||
RequestedReviewers: []*models.User{
|
||||
{UserName: groupName},
|
||||
},
|
||||
}
|
||||
|
||||
prjConfig := &common.AutogitConfig{
|
||||
GitProjectName: org + "/" + repo + "#main",
|
||||
ReviewGroups: []*common.ReviewGroup{
|
||||
{
|
||||
Name: groupName,
|
||||
Reviewers: []string{"reviewer1", "reviewer2"},
|
||||
},
|
||||
},
|
||||
}
|
||||
bot.configs = common.AutogitConfigs{prjConfig}
|
||||
|
||||
t.Run("Review not requested for group", func(t *testing.T) {
|
||||
prNoRequest := *pr
|
||||
prNoRequest.RequestedReviewers = nil
|
||||
err := bot.ProcessPR(&prNoRequest)
|
||||
if err != nil {
|
||||
t.Errorf("Expected no error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("PR is closed", func(t *testing.T) {
|
||||
prClosed := *pr
|
||||
prClosed.State = "closed"
|
||||
err := bot.ProcessPR(&prClosed)
|
||||
if err != nil {
|
||||
t.Errorf("Expected no error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Successful Approval", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
// reviewer1 approved in timeline
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: "reviewer1"},
|
||||
Body: "@" + groupName + ": approve",
|
||||
},
|
||||
}
|
||||
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
|
||||
expectedText := "reviewer1 approved a review on behalf of " + groupName
|
||||
mockGitea.EXPECT().AddReviewComment(pr, common.ReviewStateApproved, expectedText).Return(nil, nil)
|
||||
mockGitea.EXPECT().UnrequestReview(org, repo, prIndex, gomock.Any()).Return(nil)
|
||||
|
||||
err := bot.ProcessPR(pr)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Dry Run - No actions taken", func(t *testing.T) {
|
||||
common.IsDryRun = true
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: "reviewer1"},
|
||||
Body: "@" + groupName + ": approve",
|
||||
},
|
||||
}
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
|
||||
// No AddReviewComment or UnrequestReview should be called
|
||||
err := bot.ProcessPR(pr)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Approval already exists - No new comment", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
|
||||
approvalText := "reviewer1 approved a review on behalf of " + groupName
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Review,
|
||||
User: &models.User{UserName: groupName},
|
||||
Body: approvalText,
|
||||
},
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: "reviewer1"},
|
||||
Body: "@" + groupName + ": approve",
|
||||
},
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: groupName},
|
||||
Body: "Help comment",
|
||||
},
|
||||
}
|
||||
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
|
||||
// No AddReviewComment, UnrequestReview, or AddComment should be called
|
||||
err := bot.ProcessPR(pr)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Rejection already exists - No new comment", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
|
||||
rejectionText := "reviewer1 requested changes on behalf of " + groupName + ". See http://gitea/comment/123"
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Review,
|
||||
User: &models.User{UserName: groupName},
|
||||
Body: rejectionText,
|
||||
},
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: "reviewer1"},
|
||||
Body: "@" + groupName + ": decline",
|
||||
HTMLURL: "http://gitea/comment/123",
|
||||
},
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: groupName},
|
||||
Body: "Help comment",
|
||||
},
|
||||
}
|
||||
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
|
||||
err := bot.ProcessPR(pr)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Pending review - Help comment already exists", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: groupName},
|
||||
Body: "Some help comment",
|
||||
},
|
||||
}
|
||||
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
|
||||
// It will try to request reviews
|
||||
mockGitea.EXPECT().RequestReviews(pr, "reviewer1", "reviewer2").Return(nil, nil)
|
||||
|
||||
// AddComment should NOT be called because bot already has a comment in timeline
|
||||
err := bot.ProcessPR(pr)
|
||||
if err != ReviewNotFinished {
|
||||
t.Errorf("Expected ReviewNotFinished error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Submitter is group member - Excluded from review request", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
prSubmitterMember := *pr
|
||||
prSubmitterMember.User = &models.User{UserName: "reviewer1"}
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(nil, nil)
|
||||
mockGitea.EXPECT().RequestReviews(&prSubmitterMember, "reviewer2").Return(nil, nil)
|
||||
mockGitea.EXPECT().AddComment(&prSubmitterMember, gomock.Any()).Return(nil)
|
||||
err := bot.ProcessPR(&prSubmitterMember)
|
||||
if err != ReviewNotFinished {
|
||||
t.Errorf("Expected ReviewNotFinished error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Successful Rejection", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: "reviewer2"},
|
||||
Body: "@" + groupName + ": decline",
|
||||
HTMLURL: "http://gitea/comment/999",
|
||||
},
|
||||
}
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
expectedText := "reviewer2 requested changes on behalf of " + groupName + ". See http://gitea/comment/999"
|
||||
mockGitea.EXPECT().AddReviewComment(pr, common.ReviewStateRequestChanges, expectedText).Return(nil, nil)
|
||||
mockGitea.EXPECT().UnrequestReview(org, repo, prIndex, gomock.Any()).Return(nil)
|
||||
err := bot.ProcessPR(pr)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Config not found", func(t *testing.T) {
|
||||
bot.configs = common.AutogitConfigs{}
|
||||
err := bot.ProcessPR(pr)
|
||||
if err == nil {
|
||||
t.Error("Expected error when config is missing, got nil")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Gitea error in GetPullRequestReviews", func(t *testing.T) {
|
||||
bot.configs = common.AutogitConfigs{prjConfig}
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, fmt.Errorf("gitea error"))
|
||||
err := bot.ProcessPR(pr)
|
||||
if err == nil {
|
||||
t.Error("Expected error from gitea, got nil")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestProcessNotifications(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
mockGitea := mock_common.NewMockGitea(ctrl)
|
||||
groupName := "testgroup"
|
||||
|
||||
bot := &ReviewBot{
|
||||
gitea: mockGitea,
|
||||
groupName: groupName,
|
||||
}
|
||||
bot.InitRegex(groupName)
|
||||
|
||||
org := "myorg"
|
||||
repo := "myrepo"
|
||||
prIndex := int64(123)
|
||||
notificationID := int64(456)
|
||||
|
||||
notification := &models.NotificationThread{
|
||||
ID: notificationID,
|
||||
Subject: &models.NotificationSubject{
|
||||
URL: fmt.Sprintf("http://gitea/api/v1/repos/%s/%s/pulls/%d", org, repo, prIndex),
|
||||
},
|
||||
}
|
||||
|
||||
t.Run("Notification Success", func(t *testing.T) {
|
||||
common.IsDryRun = false
|
||||
pr := &models.PullRequest{
|
||||
Index: prIndex,
|
||||
Base: &models.PRBranchInfo{
|
||||
Name: "main",
|
||||
Repo: &models.Repository{
|
||||
Name: repo,
|
||||
Owner: &models.User{UserName: org},
|
||||
},
|
||||
},
|
||||
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: "headsha",
|
||||
Repo: &models.Repository{
|
||||
Name: repo,
|
||||
Owner: &models.User{UserName: org},
|
||||
},
|
||||
},
|
||||
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{{UserName: groupName}},
|
||||
}
|
||||
|
||||
mockGitea.EXPECT().GetPullRequest(org, repo, prIndex).Return(pr, nil)
|
||||
|
||||
prjConfig := &common.AutogitConfig{
|
||||
GitProjectName: org + "/" + repo + "#main",
|
||||
ReviewGroups: []*common.ReviewGroup{{Name: groupName, Reviewers: []string{"r1"}}},
|
||||
}
|
||||
bot.configs = common.AutogitConfigs{prjConfig}
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, prIndex).Return(nil, nil)
|
||||
timeline := []*models.TimelineComment{
|
||||
{
|
||||
Type: common.TimelineCommentType_Comment,
|
||||
User: &models.User{UserName: "r1"},
|
||||
Body: "@" + groupName + ": approve",
|
||||
},
|
||||
}
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, prIndex).Return(timeline, nil)
|
||||
expectedText := "r1 approved a review on behalf of " + groupName
|
||||
mockGitea.EXPECT().AddReviewComment(pr, common.ReviewStateApproved, expectedText).Return(nil, nil)
|
||||
mockGitea.EXPECT().UnrequestReview(org, repo, prIndex, gomock.Any()).Return(nil)
|
||||
|
||||
mockGitea.EXPECT().SetNotificationRead(notificationID).Return(nil)
|
||||
|
||||
bot.ProcessNotifications(notification)
|
||||
|
||||
})
|
||||
|
||||
t.Run("Invalid Notification URL", func(t *testing.T) {
|
||||
badNotification := &models.NotificationThread{
|
||||
Subject: &models.NotificationSubject{
|
||||
URL: "http://gitea/invalid/url",
|
||||
},
|
||||
}
|
||||
bot.ProcessNotifications(badNotification)
|
||||
})
|
||||
|
||||
t.Run("Gitea error in GetPullRequest", func(t *testing.T) {
|
||||
mockGitea.EXPECT().GetPullRequest(org, repo, prIndex).Return(nil, fmt.Errorf("gitea error"))
|
||||
bot.ProcessNotifications(notification)
|
||||
})
|
||||
}
|
||||
import "testing"
|
||||
|
||||
func TestReviewApprovalCheck(t *testing.T) {
|
||||
tests := []struct {
|
||||
@@ -413,78 +60,16 @@ func TestReviewApprovalCheck(t *testing.T) {
|
||||
InString: "@group2: disapprove",
|
||||
Rejected: true,
|
||||
},
|
||||
{
|
||||
Name: "Whitespace before colon",
|
||||
GroupName: "group",
|
||||
InString: "@group : LGTM",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "No whitespace after colon",
|
||||
GroupName: "group",
|
||||
InString: "@group:LGTM",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "Leading and trailing whitespace on line",
|
||||
GroupName: "group",
|
||||
InString: " @group: LGTM ",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "Multiline: Approved on second line",
|
||||
GroupName: "group",
|
||||
InString: "Random noise\n@group: approved",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "Multiline: Multiple group mentions, first wins",
|
||||
GroupName: "group",
|
||||
InString: "@group: decline\n@group: approve",
|
||||
Rejected: true,
|
||||
},
|
||||
{
|
||||
Name: "Multiline: Approved on second line",
|
||||
GroupName: "group",
|
||||
InString: "noise\n@group: approve\nmore noise",
|
||||
Approved: true,
|
||||
},
|
||||
{
|
||||
Name: "Not at start of line (even with whitespace)",
|
||||
GroupName: "group",
|
||||
InString: "Hello @group: approve",
|
||||
Approved: false,
|
||||
},
|
||||
{
|
||||
Name: "Rejecting with reason",
|
||||
GroupName: "group",
|
||||
InString: "@group: decline because of X, Y and Z",
|
||||
Rejected: true,
|
||||
},
|
||||
{
|
||||
Name: "No colon after group",
|
||||
GroupName: "group",
|
||||
InString: "@group LGTM",
|
||||
Approved: false,
|
||||
Rejected: false,
|
||||
},
|
||||
{
|
||||
Name: "Invalid char after group",
|
||||
GroupName: "group",
|
||||
InString: "@group! LGTM",
|
||||
Approved: false,
|
||||
Rejected: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.Name, func(t *testing.T) {
|
||||
bot := &ReviewBot{}
|
||||
bot.InitRegex(test.GroupName)
|
||||
InitRegex(test.GroupName)
|
||||
|
||||
if r := bot.ReviewAccepted(test.InString); r != test.Approved {
|
||||
if r := ReviewAccepted(test.InString); r != test.Approved {
|
||||
t.Error("ReviewAccepted() returned", r, "expecting", test.Approved)
|
||||
}
|
||||
if r := bot.ReviewRejected(test.InString); r != test.Rejected {
|
||||
if r := ReviewRejected(test.InString); r != test.Rejected {
|
||||
t.Error("ReviewRejected() returned", r, "expecting", test.Rejected)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -7,9 +7,7 @@ import (
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
type IssueCommentProcessor struct {
|
||||
bot *ReviewBot
|
||||
}
|
||||
type IssueCommentProcessor struct{}
|
||||
|
||||
func (s *IssueCommentProcessor) ProcessFunc(req *common.Request) error {
|
||||
if req.Type != common.RequestType_IssueComment {
|
||||
@@ -21,15 +19,14 @@ func (s *IssueCommentProcessor) ProcessFunc(req *common.Request) error {
|
||||
repo := data.Repository.Name
|
||||
index := int64(data.Issue.Number)
|
||||
|
||||
pr, err := s.bot.gitea.GetPullRequest(org, repo, index)
|
||||
pr, err := gitea.GetPullRequest(org, repo, index)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to fetch PullRequest from event: %s/%s!%d Error: %w", org, repo, index, err)
|
||||
}
|
||||
return s.bot.ProcessPR(pr)
|
||||
return ProcessPR(pr)
|
||||
}
|
||||
|
||||
type ConfigUpdatePush struct {
|
||||
bot *ReviewBot
|
||||
config_modified chan *common.AutogitConfig
|
||||
}
|
||||
|
||||
@@ -49,7 +46,7 @@ func (s *ConfigUpdatePush) ProcessFunc(req *common.Request) error {
|
||||
}
|
||||
branch := data.Ref[len(branch_ref):]
|
||||
|
||||
c := s.bot.configs.GetPrjGitConfig(org, repo, branch)
|
||||
c := configs.GetPrjGitConfig(org, repo, branch)
|
||||
if c == nil {
|
||||
return nil
|
||||
}
|
||||
@@ -67,7 +64,7 @@ func (s *ConfigUpdatePush) ProcessFunc(req *common.Request) error {
|
||||
}
|
||||
|
||||
if modified_config {
|
||||
for _, config := range s.bot.configs {
|
||||
for _, config := range configs {
|
||||
if o, r, _ := config.GetPrjGit(); o == org && r == repo {
|
||||
s.config_modified <- config
|
||||
}
|
||||
|
||||
@@ -1,203 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
|
||||
func TestIssueCommentProcessor(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
mockGitea := mock_common.NewMockGitea(ctrl)
|
||||
groupName := "testgroup"
|
||||
bot := &ReviewBot{
|
||||
gitea: mockGitea,
|
||||
groupName: groupName,
|
||||
}
|
||||
bot.InitRegex(groupName)
|
||||
|
||||
processor := &IssueCommentProcessor{bot: bot}
|
||||
|
||||
org := "myorg"
|
||||
repo := "myrepo"
|
||||
index := 123
|
||||
|
||||
event := &common.IssueCommentWebhookEvent{
|
||||
Repository: &common.Repository{
|
||||
Name: repo,
|
||||
Owner: &common.Organization{
|
||||
Username: org,
|
||||
},
|
||||
},
|
||||
Issue: &common.IssueDetail{
|
||||
Number: index,
|
||||
},
|
||||
}
|
||||
|
||||
req := &common.Request{
|
||||
Type: common.RequestType_IssueComment,
|
||||
Data: event,
|
||||
}
|
||||
|
||||
t.Run("Successful Processing", func(t *testing.T) {
|
||||
pr := &models.PullRequest{
|
||||
Index: int64(index),
|
||||
Base: &models.PRBranchInfo{
|
||||
Name: "main",
|
||||
Repo: &models.Repository{
|
||||
Name: repo,
|
||||
Owner: &models.User{UserName: org},
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: "headsha",
|
||||
Repo: &models.Repository{
|
||||
Name: repo,
|
||||
Owner: &models.User{UserName: org},
|
||||
},
|
||||
},
|
||||
User: &models.User{UserName: "submitter"},
|
||||
RequestedReviewers: []*models.User{{UserName: groupName}},
|
||||
}
|
||||
|
||||
mockGitea.EXPECT().GetPullRequest(org, repo, int64(index)).Return(pr, nil)
|
||||
|
||||
prjConfig := &common.AutogitConfig{
|
||||
GitProjectName: org + "/" + repo + "#main",
|
||||
ReviewGroups: []*common.ReviewGroup{{Name: groupName, Reviewers: []string{"r1"}}},
|
||||
}
|
||||
bot.configs = common.AutogitConfigs{prjConfig}
|
||||
mockGitea.EXPECT().GetPullRequestReviews(org, repo, int64(index)).Return(nil, nil)
|
||||
mockGitea.EXPECT().GetTimeline(org, repo, int64(index)).Return(nil, nil)
|
||||
mockGitea.EXPECT().RequestReviews(pr, "r1").Return(nil, nil)
|
||||
mockGitea.EXPECT().AddComment(pr, gomock.Any()).Return(nil)
|
||||
|
||||
err := processor.ProcessFunc(req)
|
||||
if err != ReviewNotFinished {
|
||||
t.Errorf("Expected ReviewNotFinished, got %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Gitea error in GetPullRequest", func(t *testing.T) {
|
||||
mockGitea.EXPECT().GetPullRequest(org, repo, int64(index)).Return(nil, fmt.Errorf("gitea error"))
|
||||
err := processor.ProcessFunc(req)
|
||||
if err == nil {
|
||||
t.Error("Expected error, got nil")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Wrong Request Type", func(t *testing.T) {
|
||||
wrongReq := &common.Request{Type: common.RequestType_Push}
|
||||
err := processor.ProcessFunc(wrongReq)
|
||||
if err == nil {
|
||||
t.Error("Expected error for wrong request type, got nil")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestConfigUpdatePush(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
|
||||
groupName := "testgroup"
|
||||
bot := &ReviewBot{
|
||||
groupName: groupName,
|
||||
}
|
||||
bot.InitRegex(groupName)
|
||||
|
||||
configChan := make(chan *common.AutogitConfig, 1)
|
||||
processor := &ConfigUpdatePush{
|
||||
bot: bot,
|
||||
config_modified: configChan,
|
||||
}
|
||||
|
||||
org := "myorg"
|
||||
repo := "myrepo"
|
||||
branch := "main"
|
||||
|
||||
prjConfig := &common.AutogitConfig{
|
||||
GitProjectName: org + "/" + repo + "#" + branch,
|
||||
Organization: org,
|
||||
Branch: branch,
|
||||
}
|
||||
bot.configs = common.AutogitConfigs{prjConfig}
|
||||
|
||||
event := &common.PushWebhookEvent{
|
||||
Ref: "refs/heads/" + branch,
|
||||
Repository: &common.Repository{
|
||||
Name: repo,
|
||||
Owner: &common.Organization{
|
||||
Username: org,
|
||||
},
|
||||
},
|
||||
Commits: []common.Commit{
|
||||
{
|
||||
Modified: []string{common.ProjectConfigFile},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
req := &common.Request{
|
||||
Type: common.RequestType_Push,
|
||||
Data: event,
|
||||
}
|
||||
|
||||
t.Run("Config Modified", func(t *testing.T) {
|
||||
err := processor.ProcessFunc(req)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
|
||||
select {
|
||||
case modified := <-configChan:
|
||||
if modified != prjConfig {
|
||||
t.Errorf("Expected modified config to be %v, got %v", prjConfig, modified)
|
||||
}
|
||||
default:
|
||||
t.Error("Expected config modification signal, but none received")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("No Config Modified", func(t *testing.T) {
|
||||
noConfigEvent := *event
|
||||
noConfigEvent.Commits = []common.Commit{{Modified: []string{"README.md"}}}
|
||||
noConfigReq := &common.Request{Type: common.RequestType_Push, Data: &noConfigEvent}
|
||||
|
||||
err := processor.ProcessFunc(noConfigReq)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error, got %v", err)
|
||||
}
|
||||
|
||||
select {
|
||||
case <-configChan:
|
||||
t.Error("Did not expect config modification signal")
|
||||
default:
|
||||
// Success
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Wrong Branch Ref", func(t *testing.T) {
|
||||
wrongBranchEvent := *event
|
||||
wrongBranchEvent.Ref = "refs/tags/v1.0"
|
||||
wrongBranchReq := &common.Request{Type: common.RequestType_Push, Data: &wrongBranchEvent}
|
||||
|
||||
err := processor.ProcessFunc(wrongBranchReq)
|
||||
if err == nil {
|
||||
t.Error("Expected error for wrong branch ref, got nil")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Config Not Found", func(t *testing.T) {
|
||||
bot.configs = common.AutogitConfigs{}
|
||||
err := processor.ProcessFunc(req)
|
||||
if err != nil {
|
||||
t.Errorf("Expected nil error even if config not found, got %v", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
|
||||
FROM opensuse/tumbleweed
|
||||
ENV container=podman
|
||||
|
||||
ENV LANG=en_US.UTF-8
|
||||
|
||||
RUN zypper -vvvn install podman podman-compose vim make python3-pytest python3-requests python3-pytest-dependency
|
||||
|
||||
COPY . /opt/project/
|
||||
|
||||
WORKDIR /opt/project/integration
|
||||
@@ -1,76 +0,0 @@
|
||||
# We want to be able to test in two **modes**:
|
||||
# A. bots are used from official packages as defined in */Dockerfile.package
|
||||
# B. bots are just picked up from binaries that are placed in corresponding parent directory.
|
||||
|
||||
# The topology is defined in podman-compose file and can be spawned in two ways:
|
||||
# 1. Privileged container (needs no additional dependancies)
|
||||
# 2. podman-compose on a local machine (needs dependencies as defined in the Dockerfile)
|
||||
|
||||
|
||||
# Typical workflow:
|
||||
# A1: - run 'make test_package'
|
||||
# B1: - run 'make test_local' (make sure that the go binaries in parent folder are built)
|
||||
# A2:
|
||||
# 1. 'make build_package' - prepares images (recommended, otherwise there might be surprises if image fails to build during `make up`)
|
||||
# 2. 'make up' - spawns podman-compose
|
||||
# 3. 'pytest -v tests/*' - run tests
|
||||
# 4. 'make down' - once the containers are not needed
|
||||
# B2: (make sure the go binaries in the parent folder are built)
|
||||
# 4. 'make build_local' - prepared images (recommended, otherwise there might be surprises if image fails to build during `make up`)
|
||||
# 5. 'make up' - spawns podman-compose
|
||||
# 6. 'pytest -v tests/*' - run tests
|
||||
# 7. 'make down' - once the containers are not needed
|
||||
|
||||
|
||||
AUTO_DETECT_MODE := $(shell if test -e ../workflow-pr/workflow-pr; then echo .local; else echo .package; fi)
|
||||
|
||||
# try to detect mode B1, otherwise mode A1
|
||||
test: GIWTF_IMAGE_SUFFIX=$(AUTO_DETECT_MODE)
|
||||
test: build_container test_container
|
||||
|
||||
# mode A1
|
||||
test_package: GIWTF_IMAGE_SUFFIX=.package
|
||||
test_package: build_container test_container
|
||||
|
||||
# mode B1
|
||||
test_local: GIWTF_IMAGE_SUFFIX=.local
|
||||
test_local: build_container test_container
|
||||
|
||||
MODULES := gitea-events-rabbitmq-publisher obs-staging-bot workflow-pr
|
||||
|
||||
# Prepare topology 1
|
||||
build_container:
|
||||
podman build ../ -f integration/Dockerfile -t autogits_integration
|
||||
|
||||
# Run tests in topology 1
|
||||
test_container:
|
||||
podman run --rm --privileged -t --network integration_gitea-network -e GIWTF_IMAGE_SUFFIX=$(GIWTF_IMAGE_SUFFIX) autogits_integration /usr/bin/bash -c "make build && make up && sleep 25 && pytest -v tests/*"
|
||||
|
||||
|
||||
build_local: AUTO_DETECT_MODE=.local
|
||||
build_local: build
|
||||
|
||||
build_package: AUTO_DETECT_MODE=.package
|
||||
build_package: build
|
||||
|
||||
# parse all service images from podman-compose and build them (topology 2)
|
||||
build:
|
||||
podman pull docker.io/library/rabbitmq:3.13.7-management
|
||||
for i in $$(grep -A 1000 services: podman-compose.yml | grep -oE '^ [^: ]+'); do GIWTF_IMAGE_SUFFIX=$(AUTO_DETECT_MODE) podman-compose build $$i || exit 1; done
|
||||
|
||||
# this will spawn prebuilt containers (topology 2)
|
||||
up:
|
||||
podman-compose up -d
|
||||
|
||||
# tear down (topology 2)
|
||||
down:
|
||||
podman-compose down
|
||||
|
||||
# mode A
|
||||
up-bots-package:
|
||||
GIWTF_IMAGE_SUFFIX=.package podman-compose up -d
|
||||
|
||||
# mode B
|
||||
up-bots-local:
|
||||
GIWTF_IMAGE_SUFFIX=.local podman-compose up -d
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
sudo rm -rf gitea-data/ gitea-logs/ rabbitmq-data/ workflow-pr-repos/
|
||||
@@ -1 +0,0 @@
|
||||
Dockerfile.package
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
# Add the custom CA to the trust store
|
||||
COPY integration/rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||
RUN update-ca-certificates
|
||||
|
||||
RUN zypper -n in which binutils
|
||||
|
||||
# Copy the pre-built binary into the container
|
||||
# The user will build this and place it in the same directory as this Dockerfile
|
||||
COPY gitea-events-rabbitmq-publisher/gitea-events-rabbitmq-publisher /usr/local/bin/
|
||||
COPY integration/gitea-events-rabbitmq-publisher/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
# Add the custom CA to the trust store
|
||||
COPY integration/rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||
RUN update-ca-certificates
|
||||
|
||||
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
|
||||
RUN zypper --gpg-auto-import-keys ref
|
||||
|
||||
RUN zypper -n in git-core curl autogits-gitea-events-rabbitmq-publisher binutils
|
||||
|
||||
COPY integration/gitea-events-rabbitmq-publisher/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
@@ -1,13 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
exe=$(which gitea-events-rabbitmq-publisher 2>/dev/null) || :
|
||||
exe=${exe:-/usr/local/bin/gitea-events-rabbitmq-publisher}
|
||||
|
||||
package=$(rpm -qa | grep autogits-gitea-events-rabbitmq-publisher) || :
|
||||
|
||||
echo "!!!!!!!!!!!!!!!! using binary $exe; installed package: $package"
|
||||
which strings > /dev/null 2>&1 && strings "$exe" | grep -A 2 vcs.revision= | head -4 || :
|
||||
echo "RABBITMQ_HOST: $RABBITMQ_HOST"
|
||||
|
||||
exec $exe "$@"
|
||||
@@ -1,25 +0,0 @@
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
RUN zypper ar --repo https://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo \
|
||||
&& zypper -n --gpg-auto-import-keys refresh
|
||||
|
||||
RUN zypper -n install \
|
||||
git \
|
||||
sqlite3 \
|
||||
curl \
|
||||
gawk \
|
||||
openssh \
|
||||
jq \
|
||||
devel_Factory_git-workflow:gitea \
|
||||
&& rm -rf /var/cache/zypp/*
|
||||
|
||||
# Copy the minimal set of required files from the local 'container-files' directory
|
||||
COPY container-files/ /
|
||||
|
||||
RUN chmod -R 777 /etc/gitea/conf
|
||||
|
||||
# Make the setup and entrypoint scripts executable
|
||||
RUN chmod +x /opt/setup/setup-gitea.sh && chmod +x /opt/setup/entrypoint.sh && chmod +x /opt/setup/setup-webhook.sh && chmod +x /opt/setup/setup-dummy-data.sh
|
||||
|
||||
# Use the new entrypoint script to start the container
|
||||
ENTRYPOINT ["/opt/setup/entrypoint.sh"]
|
||||
@@ -1,42 +0,0 @@
|
||||
WORK_PATH = /var/lib/gitea
|
||||
|
||||
[server]
|
||||
CERT_FILE = /etc/gitea/https/cert.pem
|
||||
KEY_FILE = /etc/gitea/https/key.pem
|
||||
STATIC_ROOT_PATH = /usr/share/gitea
|
||||
APP_DATA_PATH = /var/lib/gitea/data
|
||||
PPROF_DATA_PATH = /var/lib/gitea/data/tmp/pprof
|
||||
PROTOCOL = http
|
||||
DOMAIN = gitea-test
|
||||
SSH_DOMAIN = gitea-test
|
||||
ROOT_URL = http://gitea-test:3000/
|
||||
HTTP_PORT = 3000
|
||||
DISABLE_SSH = false
|
||||
START_SSH_SERVER = true
|
||||
SSH_PORT = 3022
|
||||
LFS_START_SERVER = true
|
||||
|
||||
[lfs]
|
||||
PATH = /var/lib/gitea/data/lfs
|
||||
|
||||
[database]
|
||||
DB_TYPE = sqlite3
|
||||
PATH = /var/lib/gitea/data/gitea.db
|
||||
|
||||
[security]
|
||||
INSTALL_LOCK = true
|
||||
|
||||
[oauth2]
|
||||
ENABLED = false
|
||||
|
||||
[log]
|
||||
ROOT_PATH = /var/log/gitea
|
||||
MODE = console, file
|
||||
; Either "Trace", "Debug", "Info", "Warn", "Error" or "None", default is "Info"
|
||||
LEVEL = Debug
|
||||
|
||||
[service]
|
||||
ENABLE_BASIC_AUTHENTICATION = true
|
||||
|
||||
[webhook]
|
||||
ALLOWED_HOST_LIST = gitea-publisher
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Run setup to ensure permissions, migrations, and the admin user are ready.
|
||||
# The setup script is now idempotent.
|
||||
/opt/setup/setup-gitea.sh
|
||||
|
||||
# Start the webhook setup script in the background.
|
||||
# It will wait for the main Gitea process to be ready before creating the webhook.
|
||||
/opt/setup/setup-webhook.sh &
|
||||
|
||||
echo "Starting Gitea..."
|
||||
|
||||
# The original systemd service ran as user 'gitea' and group 'gitea'
|
||||
# with a working directory of '/var/lib/gitea'.
|
||||
# We will switch to that user and run the web command.
|
||||
# Using exec means Gitea will become PID 1, allowing it to receive signals correctly.
|
||||
cd /var/lib/gitea
|
||||
exec su -s /bin/bash gitea -c "/usr/bin/gitea web --config /etc/gitea/conf/app.ini"
|
||||
@@ -1,2 +0,0 @@
|
||||
#!/bin/bash
|
||||
# This script is now empty as dummy data setup is handled by pytest fixtures.
|
||||
@@ -1,100 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -x
|
||||
set -e
|
||||
|
||||
# Set ownership on the volume mounts. This allows the 'gitea' user to write to them.
|
||||
# We use -R to ensure all subdirectories (like /var/lib/gitea/data) are covered.
|
||||
chown -R gitea:gitea /var/lib/gitea /var/log/gitea
|
||||
|
||||
# Set ownership on the config directory.
|
||||
chown -R gitea:gitea /etc/gitea
|
||||
|
||||
# Run database migrations to initialize the sqlite3 db based on app.ini.
|
||||
su -s /bin/bash gitea -c 'gitea migrate'
|
||||
|
||||
# Create a default admin user if it doesn't exist
|
||||
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "admin" {found=1} END {exit !found}'; then
|
||||
echo "Creating admin user..."
|
||||
su -s /bin/bash gitea -c 'gitea admin user create --username admin --password opensuse --email admin@example.com --must-change-password=false --admin'
|
||||
else
|
||||
echo "Admin user already exists."
|
||||
fi
|
||||
|
||||
# Generate an access token for the admin user
|
||||
ADMIN_TOKEN_FILE="/var/lib/gitea/admin.token"
|
||||
if [ -f "$ADMIN_TOKEN_FILE" ]; then
|
||||
echo "Admin token already exists at $ADMIN_TOKEN_FILE."
|
||||
else
|
||||
echo "Generating admin token..."
|
||||
ADMIN_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u admin -t admin-token")
|
||||
if [ -n "$ADMIN_TOKEN" ]; then
|
||||
printf "%s" "$ADMIN_TOKEN" > "$ADMIN_TOKEN_FILE"
|
||||
chmod 777 "$ADMIN_TOKEN_FILE"
|
||||
chown gitea:gitea "$ADMIN_TOKEN_FILE"
|
||||
echo "Admin token generated and saved to $ADMIN_TOKEN_FILE."
|
||||
else
|
||||
echo "Failed to generate admin token."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Generate SSH key for the admin user if it doesn't exist
|
||||
SSH_KEY_DIR="/var/lib/gitea/ssh-keys"
|
||||
mkdir -p "$SSH_KEY_DIR"
|
||||
if [ ! -f "$SSH_KEY_DIR/id_ed25519" ]; then
|
||||
echo "Generating SSH key for admin user..."
|
||||
ssh-keygen -t ed25519 -N "" -f "$SSH_KEY_DIR/id_ed25519"
|
||||
chown -R gitea:gitea "$SSH_KEY_DIR"
|
||||
chmod 700 "$SSH_KEY_DIR"
|
||||
chmod 600 "$SSH_KEY_DIR/id_ed25519"
|
||||
chmod 644 "$SSH_KEY_DIR/id_ed25519.pub"
|
||||
fi
|
||||
|
||||
# Create a autogits_obs_staging_bot user if it doesn't exist
|
||||
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "autogits_obs_staging_bot" {found=1} END {exit !found}'; then
|
||||
echo "Creating autogits_obs_staging_bot user..."
|
||||
su -s /bin/bash gitea -c 'gitea admin user create --username autogits_obs_staging_bot --password opensuse --email autogits_obs_staging_bot@example.com --must-change-password=false'
|
||||
else
|
||||
echo "autogits_obs_staging_bot user already exists."
|
||||
fi
|
||||
|
||||
# Generate an access token for the autogits_obs_staging_bot user
|
||||
BOT_TOKEN_FILE="/var/lib/gitea/autogits_obs_staging_bot.token"
|
||||
if [ -f "$BOT_TOKEN_FILE" ]; then
|
||||
echo "autogits_obs_staging_bot token already exists at $BOT_TOKEN_FILE."
|
||||
else
|
||||
echo "Generating autogits_obs_staging_bot token..."
|
||||
BOT_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u autogits_obs_staging_bot -t autogits_obs_staging_bot-token")
|
||||
if [ -n "$BOT_TOKEN" ]; then
|
||||
printf "%s" "$BOT_TOKEN" > "$BOT_TOKEN_FILE"
|
||||
chmod 666 "$BOT_TOKEN_FILE"
|
||||
chown gitea:gitea "$BOT_TOKEN_FILE"
|
||||
echo "autogits_obs_staging_bot token generated and saved to $BOT_TOKEN_FILE."
|
||||
else
|
||||
echo "Failed to generate autogits_obs_staging_bot token."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create a workflow-pr user if it doesn't exist
|
||||
if ! su -s /bin/bash gitea -c 'gitea admin user list' | awk 'NR>1 && $2 == "workflow-pr" {found=1} END {exit !found}'; then
|
||||
echo "Creating workflow-pr user..."
|
||||
su -s /bin/bash gitea -c 'gitea admin user create --username workflow-pr --password opensuse --email workflow-pr@example.com --must-change-password=false'
|
||||
else
|
||||
echo "workflow-pr user already exists."
|
||||
fi
|
||||
|
||||
# Generate an access token for the workflow-pr user
|
||||
BOT_TOKEN_FILE="/var/lib/gitea/workflow-pr.token"
|
||||
if [ -f "$BOT_TOKEN_FILE" ]; then
|
||||
echo "workflow-pr token already exists at $BOT_TOKEN_FILE."
|
||||
else
|
||||
echo "Generating workflow-pr token..."
|
||||
BOT_TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u workflow-pr -t workflow-pr-token")
|
||||
if [ -n "$BOT_TOKEN" ]; then
|
||||
printf "%s" "$BOT_TOKEN" > "$BOT_TOKEN_FILE"
|
||||
chmod 666 "$BOT_TOKEN_FILE"
|
||||
chown gitea:gitea "$BOT_TOKEN_FILE"
|
||||
echo "workflow-pr token generated and saved to $BOT_TOKEN_FILE."
|
||||
else
|
||||
echo "Failed to generate workflow-pr token."
|
||||
fi
|
||||
fi
|
||||
@@ -1,92 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
GITEA_URL="http://localhost:3000"
|
||||
WEBHOOK_URL="http://gitea-publisher:8002/rabbitmq-forwarder"
|
||||
TOKEN_NAME="webhook-creator"
|
||||
|
||||
echo "Webhook setup script started in background."
|
||||
|
||||
# Wait 10s for the main Gitea process to start
|
||||
sleep 10
|
||||
|
||||
# Wait for Gitea API to be ready
|
||||
echo "Waiting for Gitea API at $GITEA_URL..."
|
||||
while ! curl -s -f "$GITEA_URL/api/v1/version" > /dev/null; do
|
||||
echo "Gitea API not up yet, waiting 5s..."
|
||||
sleep 5
|
||||
done
|
||||
echo "Gitea API is up."
|
||||
|
||||
# The `gitea admin` command needs to be run as the gitea user.
|
||||
# The -raw flag gives us the token directly.
|
||||
echo "Generating or retrieving admin token..."
|
||||
TOKEN_FILE="/var/lib/gitea/admin.token"
|
||||
|
||||
if [ -f "$TOKEN_FILE" ]; then
|
||||
TOKEN=$(cat "$TOKEN_FILE" | tr -d '\n\r ')
|
||||
echo "Admin token loaded from $TOKEN_FILE."
|
||||
else
|
||||
TOKEN=$(su -s /bin/bash gitea -c "gitea admin user generate-access-token -raw -u admin -t $TOKEN_NAME")
|
||||
if [ -n "$TOKEN" ]; then
|
||||
printf "%s" "$TOKEN" > "$TOKEN_FILE"
|
||||
chmod 666 "$TOKEN_FILE"
|
||||
chown gitea:gitea "$TOKEN_FILE"
|
||||
echo "Admin token generated and saved to $TOKEN_FILE."
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$TOKEN" ]; then
|
||||
echo "Failed to generate or retrieve admin token. This might be because the token already exists in Gitea but not in $TOKEN_FILE. Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run the dummy data setup script
|
||||
/opt/setup/setup-dummy-data.sh "$GITEA_URL" "$TOKEN"
|
||||
|
||||
# Add SSH key via API
|
||||
PUB_KEY_FILE="/var/lib/gitea/ssh-keys/id_ed25519.pub"
|
||||
if [ -f "$PUB_KEY_FILE" ]; then
|
||||
echo "Checking for existing SSH key 'bot-key'..."
|
||||
KEYS_URL="$GITEA_URL/api/v1/admin/users/workflow-pr/keys"
|
||||
EXISTING_KEYS=$(curl -s -X GET -H "Authorization: token $TOKEN" "$KEYS_URL")
|
||||
|
||||
if ! echo "$EXISTING_KEYS" | grep -q "\"title\":\"bot-key\""; then
|
||||
echo "Registering SSH key 'bot-key' via API..."
|
||||
KEY_CONTENT=$(cat "$PUB_KEY_FILE")
|
||||
curl -s -X POST "$KEYS_URL" \
|
||||
-H "Authorization: token $TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{
|
||||
\"key\": \"$KEY_CONTENT\",
|
||||
\"read_only\": false,
|
||||
\"title\": \"bot-key\"
|
||||
}"
|
||||
echo -e "\nSSH key registered."
|
||||
else
|
||||
echo "SSH key 'bot-key' already registered."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if the webhook already exists
|
||||
echo "Checking for existing system webhook..."
|
||||
DB_PATH="/var/lib/gitea/data/gitea.db"
|
||||
EXISTS=$(su -s /bin/bash gitea -c "sqlite3 '$DB_PATH' \"SELECT 1 FROM webhook WHERE url = '$WEBHOOK_URL' AND is_system_webhook = 1 LIMIT 1;\"")
|
||||
|
||||
if [ "$EXISTS" = "1" ]; then
|
||||
echo "System webhook for $WEBHOOK_URL already exists. Exiting."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Creating Gitea system webhook for $WEBHOOK_URL via direct database INSERT..."
|
||||
# The events JSON requires escaped double quotes for the sqlite3 command.
|
||||
EVENTS_JSON='{\"push_only\":false,\"send_everything\":true,\"choose_events\":false,\"branch_filter\":\"*\",\"events\":{\"create\":false,\"delete\":false,\"fork\":false,\"issue_assign\":false,\"issue_comment\":false,\"issue_label\":false,\"issue_milestone\":false,\"issues\":false,\"package\":false,\"pull_request\":false,\"pull_request_assign\":false,\"pull_request_comment\":false,\"pull_request_label\":false,\"pull_request_milestone\":false,\"pull_request_review\":false,\"pull_request_review_request\":false,\"pull_request_sync\":false,\"push\":false,\"release\":false,\"repository\":false,\"status\":false,\"wiki\":false,\"workflow_job\":false,\"workflow_run\":false}}'
|
||||
NOW_UNIX=$(date +%s)
|
||||
|
||||
INSERT_CMD="INSERT INTO webhook (repo_id, owner_id, is_system_webhook, url, http_method, content_type, events, is_active, type, meta, created_unix, updated_unix) VALUES (0, 0, 1, '$WEBHOOK_URL', 'POST', 1, '$EVENTS_JSON', 1, 'gitea', '', $NOW_UNIX, $NOW_UNIX);"
|
||||
|
||||
su -s /bin/bash gitea -c "sqlite3 '$DB_PATH' \"$INSERT_CMD\""
|
||||
|
||||
echo "System webhook created successfully."
|
||||
|
||||
exit 0
|
||||
@@ -1,14 +0,0 @@
|
||||
# Use a base Python image
|
||||
FROM registry.suse.com/bci/python:3.11
|
||||
|
||||
# Set the working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the server script
|
||||
COPY server.py .
|
||||
|
||||
# Expose the port the server will run on
|
||||
EXPOSE 8080
|
||||
|
||||
# Command to run the server
|
||||
CMD ["python3", "-u", "server.py"]
|
||||
@@ -1,18 +0,0 @@
|
||||
<project name="openSUSE:Leap:16.0:PullRequest">
|
||||
<title>Leap 16.0 PullRequest area</title>
|
||||
<description>Base project to define the pull request builds</description>
|
||||
<person userid="autogits_obs_staging_bot" role="maintainer"/>
|
||||
<person userid="maxlin_factory" role="maintainer"/>
|
||||
<group groupid="maintenance-opensuse.org" role="maintainer"/>
|
||||
<debuginfo>
|
||||
<enable/>
|
||||
</debuginfo>
|
||||
<repository name="standard">
|
||||
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||
<arch>x86_64</arch>
|
||||
<arch>i586</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
</project>
|
||||
@@ -1,59 +0,0 @@
|
||||
<project name="openSUSE:Leap:16.0">
|
||||
<title>openSUSE Leap 16.0 based on SLFO</title>
|
||||
<description>Leap 16.0 based on SLES 16.0 (specifically SLFO:1.2)</description>
|
||||
<link project="openSUSE:Backports:SLE-16.0"/>
|
||||
<scmsync>http://gitea-test:3000/products/SLFO#main</scmsync>
|
||||
<person userid="dimstar_suse" role="maintainer"/>
|
||||
<person userid="lkocman-factory" role="maintainer"/>
|
||||
<person userid="maxlin_factory" role="maintainer"/>
|
||||
<person userid="factory-auto" role="reviewer"/>
|
||||
<person userid="licensedigger" role="reviewer"/>
|
||||
<group groupid="autobuild-team" role="maintainer"/>
|
||||
<group groupid="factory-maintainers" role="maintainer"/>
|
||||
<group groupid="maintenance-opensuse.org" role="maintainer"/>
|
||||
<group groupid="factory-staging" role="reviewer"/>
|
||||
<build>
|
||||
<disable repository="ports"/>
|
||||
</build>
|
||||
<debuginfo>
|
||||
<enable/>
|
||||
</debuginfo>
|
||||
<repository name="standard" rebuild="local">
|
||||
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||
<arch>local</arch>
|
||||
<arch>i586</arch>
|
||||
<arch>x86_64</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
<repository name="product">
|
||||
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="product" trigger="manual"/>
|
||||
<path project="openSUSE:Leap:16.0:NonFree" repository="standard"/>
|
||||
<path project="openSUSE:Leap:16.0" repository="images"/>
|
||||
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||
<arch>local</arch>
|
||||
<arch>i586</arch>
|
||||
<arch>x86_64</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
<repository name="ports">
|
||||
<arch>armv7l</arch>
|
||||
</repository>
|
||||
<repository name="images">
|
||||
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="images" trigger="manual"/>
|
||||
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||
<arch>i586</arch>
|
||||
<arch>x86_64</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
</project>
|
||||
@@ -1,140 +0,0 @@
|
||||
import http.server
|
||||
import socketserver
|
||||
import os
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
import fnmatch
|
||||
|
||||
PORT = 8080
|
||||
RESPONSE_DIR = "/app/responses"
|
||||
STATE_DIR = "/tmp/mock_obs_state"
|
||||
|
||||
class MockOBSHandler(http.server.SimpleHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
logging.info(f"GET request for: {self.path}")
|
||||
path_without_query = self.path.split('?')[0]
|
||||
|
||||
# Check for state stored by a PUT request first
|
||||
sanitized_put_path = 'PUT' + path_without_query.replace('/', '_')
|
||||
state_file_path = os.path.join(STATE_DIR, sanitized_put_path)
|
||||
if os.path.exists(state_file_path):
|
||||
logging.info(f"Found stored PUT state for {self.path} at {state_file_path}")
|
||||
self.send_response(200)
|
||||
self.send_header("Content-type", "application/xml")
|
||||
file_size = os.path.getsize(state_file_path)
|
||||
self.send_header("Content-Length", str(file_size))
|
||||
self.end_headers()
|
||||
with open(state_file_path, 'rb') as f:
|
||||
self.wfile.write(f.read())
|
||||
return
|
||||
|
||||
# If no PUT state file, fall back to the glob/exact match logic
|
||||
self.handle_request('GET')
|
||||
|
||||
def do_PUT(self):
|
||||
logging.info(f"PUT request for: {self.path}")
|
||||
logging.info(f"Headers: {self.headers}")
|
||||
path_without_query = self.path.split('?')[0]
|
||||
|
||||
body = b''
|
||||
if self.headers.get('Transfer-Encoding', '').lower() == 'chunked':
|
||||
logging.info("Chunked transfer encoding detected")
|
||||
while True:
|
||||
line = self.rfile.readline().strip()
|
||||
if not line:
|
||||
break
|
||||
chunk_length = int(line, 16)
|
||||
if chunk_length == 0:
|
||||
self.rfile.readline()
|
||||
break
|
||||
body += self.rfile.read(chunk_length)
|
||||
self.rfile.read(2) # Read the trailing CRLF
|
||||
else:
|
||||
content_length = int(self.headers.get('Content-Length', 0))
|
||||
body = self.rfile.read(content_length)
|
||||
|
||||
logging.info(f"Body: {body.decode('utf-8')}")
|
||||
sanitized_path = 'PUT' + path_without_query.replace('/', '_')
|
||||
state_file_path = os.path.join(STATE_DIR, sanitized_path)
|
||||
|
||||
logging.info(f"Saving state for {self.path} to {state_file_path}")
|
||||
os.makedirs(os.path.dirname(state_file_path), exist_ok=True)
|
||||
with open(state_file_path, 'wb') as f:
|
||||
f.write(body)
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header("Content-type", "text/plain")
|
||||
response_body = b"OK"
|
||||
self.send_header("Content-Length", str(len(response_body)))
|
||||
self.end_headers()
|
||||
self.wfile.write(response_body)
|
||||
|
||||
def do_POST(self):
|
||||
logging.info(f"POST request for: {self.path}")
|
||||
self.handle_request('POST')
|
||||
|
||||
def do_DELETE(self):
|
||||
logging.info(f"DELETE request for: {self.path}")
|
||||
self.handle_request('DELETE')
|
||||
|
||||
def handle_request(self, method):
|
||||
path_without_query = self.path.split('?')[0]
|
||||
sanitized_request_path = method + path_without_query.replace('/', '_')
|
||||
logging.info(f"Handling request, looking for match for: {sanitized_request_path}")
|
||||
|
||||
response_file = None
|
||||
# Check for glob match first
|
||||
if os.path.exists(RESPONSE_DIR):
|
||||
for filename in os.listdir(RESPONSE_DIR):
|
||||
if fnmatch.fnmatch(sanitized_request_path, filename):
|
||||
response_file = os.path.join(RESPONSE_DIR, filename)
|
||||
logging.info(f"Found matching response file (glob): {response_file}")
|
||||
break
|
||||
|
||||
# Fallback to exact match if no glob match
|
||||
if response_file is None:
|
||||
exact_file = os.path.join(RESPONSE_DIR, sanitized_request_path)
|
||||
if os.path.exists(exact_file):
|
||||
response_file = exact_file
|
||||
logging.info(f"Found matching response file (exact): {response_file}")
|
||||
|
||||
if response_file:
|
||||
logging.info(f"Serving content from {response_file}")
|
||||
self.send_response(200)
|
||||
self.send_header("Content-type", "application/xml")
|
||||
file_size = os.path.getsize(response_file)
|
||||
self.send_header("Content-Length", str(file_size))
|
||||
self.end_headers()
|
||||
with open(response_file, 'rb') as f:
|
||||
self.wfile.write(f.read())
|
||||
else:
|
||||
logging.info(f"Response file not found for {sanitized_request_path}. Sending 404.")
|
||||
self.send_response(404)
|
||||
self.send_header("Content-type", "text/plain")
|
||||
body = f"Mock response not found for {sanitized_request_path}".encode('utf-8')
|
||||
self.send_header("Content-Length", str(len(body)))
|
||||
self.end_headers()
|
||||
self.wfile.write(body)
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s')
|
||||
|
||||
if not os.path.exists(STATE_DIR):
|
||||
logging.info(f"Creating state directory: {STATE_DIR}")
|
||||
os.makedirs(STATE_DIR)
|
||||
if not os.path.exists(RESPONSE_DIR):
|
||||
os.makedirs(RESPONSE_DIR)
|
||||
|
||||
with socketserver.TCPServer(("", PORT), MockOBSHandler) as httpd:
|
||||
logging.info(f"Serving mock OBS API on port {PORT}")
|
||||
|
||||
def graceful_shutdown(sig, frame):
|
||||
logging.info("Received SIGTERM, shutting down gracefully...")
|
||||
threading.Thread(target=httpd.shutdown).start()
|
||||
|
||||
signal.signal(signal.SIGTERM, graceful_shutdown)
|
||||
|
||||
httpd.serve_forever()
|
||||
logging.info("Server has shut down.")
|
||||
@@ -1 +0,0 @@
|
||||
./Dockerfile.package
|
||||
@@ -1,18 +0,0 @@
|
||||
# Use a base Python image
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
# Install any necessary dependencies for the bot
|
||||
# e.g., git, curl, etc.
|
||||
RUN zypper -n in git-core curl binutils
|
||||
|
||||
# Copy the bot binary and its entrypoint script
|
||||
COPY obs-staging-bot/obs-staging-bot /usr/local/bin/obs-staging-bot
|
||||
COPY integration/obs-staging-bot/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||
|
||||
# Create a non-root user to run the bot
|
||||
RUN useradd -m -u 1001 bot
|
||||
USER 1001
|
||||
|
||||
# Set the entrypoint
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
@@ -1,19 +0,0 @@
|
||||
# Use a base Python image
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
|
||||
RUN zypper --gpg-auto-import-keys ref
|
||||
|
||||
# Install any necessary dependencies for the bot
|
||||
# e.g., git, curl, etc.
|
||||
RUN zypper -n in git-core curl autogits-obs-staging-bot binutils
|
||||
|
||||
COPY integration/obs-staging-bot/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||
|
||||
# Create a non-root user to run the bot
|
||||
RUN useradd -m -u 1001 bot
|
||||
USER 1001
|
||||
|
||||
# Set the entrypoint
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
@@ -1,28 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
# This script waits for the Gitea admin token to be created,
|
||||
# exports it as an environment variable, and then executes the main container command.
|
||||
|
||||
TOKEN_FILE="/gitea-data/autogits_obs_staging_bot.token"
|
||||
|
||||
echo "OBS Staging Bot: Waiting for Gitea autogits_obs_staging_bot token at $TOKEN_FILE..."
|
||||
while [ ! -s "$TOKEN_FILE" ]; do
|
||||
sleep 2
|
||||
done
|
||||
|
||||
export GITEA_TOKEN=$(cat "$TOKEN_FILE" | tr -d '\n\r ')
|
||||
echo "OBS Staging Bot: GITEA_TOKEN exported."
|
||||
|
||||
# Execute the bot as the current user (root), using 'env' to pass required variables.
|
||||
echo "OBS Staging Bot: Executing bot..."
|
||||
|
||||
exe=$(which obs-staging-bot)
|
||||
exe=${exe:-/usr/local/bin/obs-staging-bot}
|
||||
|
||||
package=$(rpm -qa | grep autogits-obs-staging-bot) || :
|
||||
|
||||
echo "!!!!!!!!!!!!!!!! using binary $exe; installed package: $package"
|
||||
which strings > /dev/null 2>&1 && strings "$exe" | grep -A 2 vcs.revision= | head -4 || :
|
||||
|
||||
exec $exe "$@"
|
||||
@@ -1,136 +0,0 @@
|
||||
version: "3.8"
|
||||
|
||||
networks:
|
||||
gitea-network:
|
||||
driver: bridge
|
||||
|
||||
services:
|
||||
gitea:
|
||||
build: ./gitea
|
||||
container_name: gitea-test
|
||||
environment:
|
||||
- GITEA_WORK_DIR=/var/lib/gitea
|
||||
networks:
|
||||
- gitea-network
|
||||
ports:
|
||||
# Map the HTTP and SSH ports defined in your app.ini
|
||||
- "3000:3000"
|
||||
- "3022:3022"
|
||||
volumes:
|
||||
# Persist Gitea's data (repositories, sqlite db, etc.) to a local directory
|
||||
# The :z flag allows sharing between containers
|
||||
- ./gitea-data:/var/lib/gitea:z
|
||||
# Persist Gitea's logs to a local directory
|
||||
- ./gitea-logs:/var/log/gitea:Z
|
||||
restart: unless-stopped
|
||||
|
||||
rabbitmq:
|
||||
image: rabbitmq:3.13.7-management
|
||||
container_name: rabbitmq-test
|
||||
healthcheck:
|
||||
test: ["CMD", "rabbitmq-diagnostics", "check_running", "-q"]
|
||||
interval: 30s
|
||||
timeout: 30s
|
||||
retries: 3
|
||||
networks:
|
||||
- gitea-network
|
||||
ports:
|
||||
# AMQP protocol port with TLS
|
||||
- "5671:5671"
|
||||
# HTTP management UI
|
||||
- "15672:15672"
|
||||
volumes:
|
||||
# Persist RabbitMQ data
|
||||
- ./rabbitmq-data:/var/lib/rabbitmq:Z
|
||||
# Mount TLS certs
|
||||
- ./rabbitmq-config/certs:/etc/rabbitmq/certs:Z
|
||||
# Mount rabbitmq config
|
||||
- ./rabbitmq-config/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:Z
|
||||
# Mount exchange definitions
|
||||
- ./rabbitmq-config/definitions.json:/etc/rabbitmq/definitions.json:Z
|
||||
restart: unless-stopped
|
||||
|
||||
gitea-publisher:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: integration/gitea-events-rabbitmq-publisher/Dockerfile${GIWTF_IMAGE_SUFFIX}
|
||||
container_name: gitea-publisher
|
||||
networks:
|
||||
- gitea-network
|
||||
depends_on:
|
||||
gitea:
|
||||
condition: service_started
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
- RABBITMQ_HOST=rabbitmq-test
|
||||
- RABBITMQ_USERNAME=gitea
|
||||
- RABBITMQ_PASSWORD=gitea
|
||||
- SSL_CERT_FILE=/usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||
command: [ "-listen", "0.0.0.0:8002", "-topic-domain", "suse", "-debug" ]
|
||||
restart: unless-stopped
|
||||
|
||||
workflow-pr:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: integration/workflow-pr/Dockerfile${GIWTF_IMAGE_SUFFIX}
|
||||
container_name: workflow-pr
|
||||
networks:
|
||||
- gitea-network
|
||||
depends_on:
|
||||
gitea:
|
||||
condition: service_started
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
- AMQP_USERNAME=gitea
|
||||
- AMQP_PASSWORD=gitea
|
||||
- SSL_CERT_FILE=/usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||
volumes:
|
||||
- ./gitea-data:/var/lib/gitea:ro,z
|
||||
- ./workflow-pr/workflow-pr.json:/etc/workflow-pr.json:ro,z
|
||||
- ./workflow-pr-repos:/var/lib/workflow-pr/repos:Z
|
||||
command: [
|
||||
"-check-on-start",
|
||||
"-debug",
|
||||
"-gitea-url", "http://gitea-test:3000",
|
||||
"-url", "amqps://rabbitmq-test:5671",
|
||||
"-config", "/etc/workflow-pr.json",
|
||||
"-repo-path", "/var/lib/workflow-pr/repos"
|
||||
]
|
||||
restart: unless-stopped
|
||||
|
||||
mock-obs:
|
||||
build: ./mock-obs
|
||||
container_name: mock-obs
|
||||
networks:
|
||||
- gitea-network
|
||||
ports:
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- ./mock-obs/responses:/app/responses:z # Use :z for shared SELinux label
|
||||
restart: unless-stopped
|
||||
|
||||
obs-staging-bot:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: integration/obs-staging-bot/Dockerfile${GIWTF_IMAGE_SUFFIX}
|
||||
container_name: obs-staging-bot
|
||||
networks:
|
||||
- gitea-network
|
||||
depends_on:
|
||||
gitea:
|
||||
condition: service_started
|
||||
mock-obs:
|
||||
condition: service_started
|
||||
environment:
|
||||
- OBS_USER=mock
|
||||
- OBS_PASSWORD=mock-long-password
|
||||
volumes:
|
||||
- ./gitea-data:/gitea-data:ro,z
|
||||
command:
|
||||
- "-debug"
|
||||
- "-gitea-url=http://gitea-test:3000"
|
||||
- "-obs=http://mock-obs:8080"
|
||||
- "-obs-web=http://mock-obs:8080"
|
||||
restart: unless-stopped
|
||||
@@ -1,30 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFKzCCAxOgAwIBAgIUJsg/r0ZyIVxtAkrlZKOr4LvYEvMwDQYJKoZIhvcNAQEL
|
||||
BQAwGDEWMBQGA1UEAwwNcmFiYml0bXEtdGVzdDAeFw0yNjAxMjQxMjQyMjNaFw0z
|
||||
NjAxMjIxMjQyMjNaMBgxFjAUBgNVBAMMDXJhYmJpdG1xLXRlc3QwggIiMA0GCSqG
|
||||
SIb3DQEBAQUAA4ICDwAwggIKAoICAQC9OjTq4DgqVo0mRpS8DGRR6SFrSpb2bqnl
|
||||
YI7xSI3y67i/oP4weiZSawk2+euxhsN4FfOlsAgvpg4WyRQH5PwnXOA1Lxz51qp1
|
||||
t0VumE3B1RDheiBTE8loG1FvmikOiek2gzz76nK0R1sbKY1+/NVJpMs6dL6NzJXG
|
||||
N6aCpWTk7oeY+lW5bPBG0VRA7RUG80w9R9RDtqYc0SYUmm43tjjxPZ81rhCXFx/F
|
||||
v1kxnNTQJdATNrTn9SofymSfm42f4loOGyGBsqJYybKXOPDxrM1erBN5eCwTpJMS
|
||||
4J30aMSdQTzza2Z4wi2LR0vq/FU/ouqzlRp7+7tNJbVAsqhiUa2eeAVkFwZl9wRw
|
||||
lddY0W85U507nw5M3iQv2GTOhJRXwhWpzDUFQ0fT56hAY/V+VbF1iHGAVIz4XlUj
|
||||
gC21wuXz0xRdqP8cCd8UHLSbp8dmie161GeKVwO037aP+1hZJbm7ePsS5Na+qYG1
|
||||
LCy0GhfQn71BsYUaGJtfRcaMwIbqaNIYn+Y6S1FVjxDPXCxFXDrIcFvldmJYTyeK
|
||||
7KrkO2P1RbEiwYyPPUhthbb1Agi9ZutZsnadmPRk27t9bBjNnWaY2z17hijnzVVz
|
||||
jOHuPlpb7cSaagVzLTT0zrZ+ifnZWwdl0S2ZrjBAeVrkNt7DOCUqwBnuBqYiRZFt
|
||||
A1QicHxaEQIDAQABo20wazAdBgNVHQ4EFgQU3l25Ghab2k7UhwxftZ2vZ1HO9Sow
|
||||
HwYDVR0jBBgwFoAU3l25Ghab2k7UhwxftZ2vZ1HO9SowDwYDVR0TAQH/BAUwAwEB
|
||||
/zAYBgNVHREEETAPgg1yYWJiaXRtcS10ZXN0MA0GCSqGSIb3DQEBCwUAA4ICAQB9
|
||||
ilcsRqIvnyN25Oh668YC/xxyeNTIaIxjMLyJaMylBRjNwo1WfbdpXToaEXgot5gK
|
||||
5HGlu3OIBBwBryNAlBtf/usxzLzmkEsm1Dsn9sJNY1ZTkD8MO9yyOtLqBlqAsIse
|
||||
oPVjzSdjk1fP3uyoG/ZUVAFZHZD3/9BEsftfS13oUVxo7vYz1DSyUATT/4QTYMQB
|
||||
PytL6EKJ0dLyuy7rIkZVkaUi+P7GuDXj25Mi6Zkxaw2QnssSuoqy1bAMkzEyNFK5
|
||||
0wlNWEY8H3jRZuAz1T4AXb9sjeCgBKZoWXgmGbzleOophdzvlq66UGAWPWYFGp8Q
|
||||
4GJognovhKzSY9+3n+rMPLAXSao48SYDlyTOZeBo1DTluR5QjVd+NWbEdIsA6buQ
|
||||
a6uPTSVKsulm7hyUlEZp+SsYAtVoZx3jzKKjZXjnaxOfUFWx6pTxNXvxR7pQ/8Ls
|
||||
IfduGy4VjKVQdyuwCE7eVEPDK6d53WWs6itziuj7gfq8mHvZivIA65z05lTwqkvb
|
||||
1WS2aht+zacqVSYyNrK+/kJA2CST3ggc1EO73lRvbfO9LJZWMdO+f/tkXH4zkfmL
|
||||
A3JtJcLOWuv+ZrZvHMpKlBFNMySxE3IeGX+Ad9bGyhZvZULut95/QD7Xy4cPRZHF
|
||||
R3SRn0rn/BeTly+5fkEoFk+ttah8IbwzhduPyPIxng==
|
||||
-----END CERTIFICATE-----
|
||||
@@ -1,52 +0,0 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQC9OjTq4DgqVo0m
|
||||
RpS8DGRR6SFrSpb2bqnlYI7xSI3y67i/oP4weiZSawk2+euxhsN4FfOlsAgvpg4W
|
||||
yRQH5PwnXOA1Lxz51qp1t0VumE3B1RDheiBTE8loG1FvmikOiek2gzz76nK0R1sb
|
||||
KY1+/NVJpMs6dL6NzJXGN6aCpWTk7oeY+lW5bPBG0VRA7RUG80w9R9RDtqYc0SYU
|
||||
mm43tjjxPZ81rhCXFx/Fv1kxnNTQJdATNrTn9SofymSfm42f4loOGyGBsqJYybKX
|
||||
OPDxrM1erBN5eCwTpJMS4J30aMSdQTzza2Z4wi2LR0vq/FU/ouqzlRp7+7tNJbVA
|
||||
sqhiUa2eeAVkFwZl9wRwlddY0W85U507nw5M3iQv2GTOhJRXwhWpzDUFQ0fT56hA
|
||||
Y/V+VbF1iHGAVIz4XlUjgC21wuXz0xRdqP8cCd8UHLSbp8dmie161GeKVwO037aP
|
||||
+1hZJbm7ePsS5Na+qYG1LCy0GhfQn71BsYUaGJtfRcaMwIbqaNIYn+Y6S1FVjxDP
|
||||
XCxFXDrIcFvldmJYTyeK7KrkO2P1RbEiwYyPPUhthbb1Agi9ZutZsnadmPRk27t9
|
||||
bBjNnWaY2z17hijnzVVzjOHuPlpb7cSaagVzLTT0zrZ+ifnZWwdl0S2ZrjBAeVrk
|
||||
Nt7DOCUqwBnuBqYiRZFtA1QicHxaEQIDAQABAoICAA+AWvDpzNgVDouV6R3NkxNN
|
||||
upXgPqUx9BuNETCtbal6i4AxR1l/zC9gwti82QTKQi2OeM74MHd8zjcqIkiyRsDP
|
||||
wDNDKIfEAONTT+4LLoWEN5WNDGRZ4Nw1LrLqiVX+ULtNPXvynRJtLQa43PVL74oQ
|
||||
pLBle23A1n0uNmcJ9w21B6ktysN9q+JVSCZodZpD6Jk1jus8JXgDXy/9Za2NMTV8
|
||||
A5ShbYz/ETSBJCSnERz7GARW7TN6V0jS6vLTSqMQJyn0KYbHNDr7TPTL7psRuaI5
|
||||
jP/cqxmx1/WKLo5k3cR3IW/cesDGQXZhMRQvNymXJkxvWMPS36lmfyZtbFNflw4Z
|
||||
9OD+2RKt5jFDJjG8fYiYoYBdLiTj2Wdvo4mbRPNkTL75o65riDkDCQuZhDXFBm3s
|
||||
B1aDv5y1AXrzNZ5JSikszKgbLNPYB0rI3unp6i0P1985w6dyel0MGG+ouaeiyrxS
|
||||
9IgJDnE4BJ79mEzHTXtbZ/+3aGAK/Y6mU8Pz2s6/+6ccT0miievsMS+si1KESF31
|
||||
WLnsMdcrJcxqcm7Ypo24G0yBJluSDKtD1cqQUGN1MKp+EEv1SCH+4csaa3ooRB0o
|
||||
YveySjqxtmhVpQuY3egCOaXhPmX7lgYwoe+G4UIkUMwPn20WMg+jFxgPASdh4lqE
|
||||
mzpePP7STvEZAr+rrLu1AoIBAQDmCEiKOsUTtJlX3awOIRtCkIqBxS1E6rpyjfxK
|
||||
A6+zpXnE++8MhIJ07+9bPdOshGjS3JbJ+hu+IocbNg++rjRArYQnJh8/qBZ2GB2v
|
||||
Ryfptsoxtk/xUsmOfchvk4tOjvDHZrJehUtGc+LzX/WUqpgtEk1Gnx7RGRuDNnqS
|
||||
Q1+yU4NubHwOHPswBBXOnVtopcAHFpKhbKRFOHOwMZN99qcWVIkv4J9c6emcPMLI
|
||||
I/QPIvwB6WmbLa0o3JNXlD4kPdqCgNW36KEFiW8m+4tgzF3HWYSAyIeBRFG7ouE6
|
||||
yk5hiptPKhZlTmTAkQSssCXksiTw1rsspFULZSRyaaaPunvVAoIBAQDSlrKu+B2h
|
||||
AJtxWy5MQDOiroqT3KDneIGXPYgH3/tiDmxy0CIEbSb5SqZ6zAmihs3dWWCmc1JH
|
||||
YObRrqIxu+qVi4K+Uz8l7WBrS7DkjZjajq+y/mrZYUNRoL2q9mnNqRNan7zxWDJc
|
||||
U4u2NH9P4LOz6ttE4OG9SC3/gZLoepA+ANZatu93749IT7z8ske0MVPP76jVI1Gl
|
||||
D7cPIlzcBUdJgNV8UOkxeqU3+S6Jn17Tkx5qMWND/2BCN4voQ4pfGWSkbaHlMLh1
|
||||
2SbVuR+HYPY3aPJeSY7MEPoc7d2SSVOcVDr2AQwSDSCCgIFZOZlawehUz9R51hK8
|
||||
LlaccFWXhS9NAoIBAEFZNRJf48DXW4DErq5M5WuhmFeJZnTfohwNDhEQvwdwCQnW
|
||||
8HBD7LO/veXTyKCH9SeCFyxF6z+2m181mn93Cc0d/h8JC3OQEuF1tGko88PHc+Vv
|
||||
f4J1HGFohlp8NeUZYnmjSSTlBR98qIqvRhr348daHa3kYmLQmSpLfcKzdSo542qp
|
||||
UwzHWuynHHLX7THrdIQO+5T0Qi6P/P2e9+GfApSra1W4oE1K/lyuPj+RRzJNo/3/
|
||||
C0tUTI8BKrKEoKq3D65nX0+hvKzQAE24xD25kSKi4aucTDKC8B04BngnJOE8+SYi
|
||||
NL6O6Lxz9joAyKMRoMDyn7Xs8WQNVa9TKEhImAkCggEBAMljmIm/egZIoF7thf8h
|
||||
vr+rD5eL/Myf776E95wgVTVW+dtqs71r7UOmYkM48VXeeO1f1hAYZO0h/Fs2GKJb
|
||||
RWGyQ1xkHBXXRsgVYJuR1kXdAqW4rNIqM8jSYdAnStOFB5849+YOJEsrEocy+TWY
|
||||
fAJpbTwXm4n6hxK8BZQR8fN5tYSXQbd+/5V1vBQlInFuYuqOFPWPizrBJp1wjUFU
|
||||
QvJGJON4NSo+UdaPlDPEl1jabtG7XWTfylxI5qE+RgvgKuEcfyDBUQZSntLw8Pf0
|
||||
gEJJOM92pPr+mVIlICoPucfcvW4ZXkO9DgP/hLOhY8jpe5fwERBa6xvPbMC6pP/8
|
||||
PFkCggEBAOLtvboBThe57QRphsKHmCtRJHmT4oZzhMYsE+5GMGYzPNWod1hSyfXn
|
||||
EB8iTmAFP5r7FdC10B8mMpACXuDdi2jbmlYOTU6xNTprSKtv8r8CvorWJdsQwRsy
|
||||
pZ7diSCeyi0z/sIx//ov0b3WD0E8BG/HWsFbX0p5xXpaljYEv5dK7xUiWgBW+15a
|
||||
N1AeVcPiXRDwhQMVcvVOvzgwKsw+Rpls/9W4hihcBHaiMcBUDFWxJtnf4ZAGAZS3
|
||||
/694MOYlmfgT/cDqF9oOsCdxM0w24kL0dcUM7zPk314ixAAfUwXaxisBhS2roJ88
|
||||
HsuK9JPSK/AS0IqUtKiq4LZ9ErixYF0=
|
||||
-----END PRIVATE KEY-----
|
||||
@@ -1,35 +0,0 @@
|
||||
{
|
||||
"users": [
|
||||
{
|
||||
"name": "gitea",
|
||||
"password_hash": "5IdZmMJhNb4otX/nz9Xtmkpj9khl6+5eAmXNs/oHYwQNO3jg",
|
||||
"hashing_algorithm": "rabbit_password_hashing_sha256",
|
||||
"tags": "administrator"
|
||||
}
|
||||
],
|
||||
"vhosts": [
|
||||
{
|
||||
"name": "/"
|
||||
}
|
||||
],
|
||||
"permissions": [
|
||||
{
|
||||
"user": "gitea",
|
||||
"vhost": "/",
|
||||
"configure": ".*",
|
||||
"write": ".*",
|
||||
"read": ".*"
|
||||
}
|
||||
],
|
||||
"exchanges": [
|
||||
{
|
||||
"name": "pubsub",
|
||||
"vhost": "/",
|
||||
"type": "topic",
|
||||
"durable": true,
|
||||
"auto_delete": false,
|
||||
"internal": false,
|
||||
"arguments": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,83 +0,0 @@
|
||||
# Test Plan: workflow-pr Bot
|
||||
|
||||
## 1. Introduction
|
||||
|
||||
This document outlines the test plan for the `workflow-pr` bot. The bot is responsible for synchronizing pull requests between ProjectGit and PackageGit repositories, managing reviews, and handling merges. This test plan aims to ensure the bot's functionality, reliability, and performance.
|
||||
|
||||
## 2. Scope
|
||||
|
||||
### In Scope
|
||||
|
||||
* Pull Request synchronization (creation, update, closing).
|
||||
* Reviewer management (adding, re-adding, mandatory vs. advisory).
|
||||
* Merge management, including `ManualMergeOnly` and `ManualMergeProject` flags.
|
||||
* Configuration parsing (`workflow.config`).
|
||||
* Label management (`staging/Auto`, `review/Pending`, `review/Done`).
|
||||
* Maintainership and permissions handling.
|
||||
|
||||
### Out of Scope
|
||||
|
||||
* Package deletion requests (planned feature).
|
||||
* Underlying infrastructure (Gitea, RabbitMQ, OBS).
|
||||
* Performance and load testing.
|
||||
* Closing a PackageGit PR (currently disabled).
|
||||
|
||||
## 3. Test Objectives
|
||||
|
||||
* Verify that pull requests are correctly synchronized between ProjectGit and PackageGit.
|
||||
* Ensure that reviewers are correctly added to pull requests based on the configuration.
|
||||
* Validate that pull requests are merged only when all conditions are met.
|
||||
* Confirm that the bot correctly handles various configurations in `workflow.config`.
|
||||
* Verify that labels are correctly applied to pull requests.
|
||||
* Ensure that maintainership and permissions are correctly enforced.
|
||||
|
||||
## 4. Test Strategy
|
||||
|
||||
The testing will be conducted in a dedicated test environment that mimics the production environment. The strategy will involve a combination of:
|
||||
|
||||
* **Component Testing:** Testing individual components of the bot in isolation using unit tests written in Go.
|
||||
* **Integration Testing:** Testing the bot's interaction with Gitea, RabbitMQ, and a mock OBS server using `pytest`.
|
||||
* **End-to-End Testing:** Testing the complete workflow from creating a pull request to merging it using `pytest`.
|
||||
|
||||
### Test Automation
|
||||
|
||||
* **Unit Tests:** Go's built-in testing framework will be used to write unit tests for individual functions and methods.
|
||||
* **Integration and End-to-End Tests:** `pytest` will be used to write integration and end-to-end tests that use the Gitea API to create pull requests and verify the bot's behavior.
|
||||
|
||||
### Success Metrics
|
||||
|
||||
* **Test Coverage:** The goal is to achieve at least 80% test coverage for the bot's codebase.
|
||||
* **Bug Detection Rate:** The number of bugs found during the testing phase.
|
||||
* **Test Pass Rate:** The percentage of test cases that pass without any issues.
|
||||
|
||||
|
||||
## 5. Test Cases
|
||||
|
||||
| Test Case ID | Description | Steps to Reproduce | Expected Results | Priority |
|
||||
| :--- | :--- | :--- | :--- | :--- |
|
||||
| **TC-SYNC-001** | **Create ProjectGit PR from PackageGit PR** | 1. Create a new PR in a PackageGit repository. | 1. A new PR is created in the corresponding ProjectGit repository with the title "Forwarded PRs: <package_name>".<br>2. The ProjectGit PR description contains a link to the PackageGit PR (e.g., `PR: org/package_repo!pr_number`).<br>3. The package submodule in the ProjectGit PR points to the PackageGit PR's commit. | High |
|
||||
| **TC-SYNC-002** | **Update ProjectGit PR from PackageGit PR** | 1. Push a new commit to an existing PackageGit PR. | 1. The corresponding ProjectGit PR's head branch is updated with the new commit. | High |
|
||||
| **TC-SYNC-003** | **WIP Flag Synchronization** | 1. Mark a PackageGit PR as "Work In Progress".<br>2. Remove the WIP flag from the PackageGit PR. | 1. The corresponding ProjectGit PR is also marked as "Work In Progress".<br>2. The WIP flag on the ProjectGit PR is removed. | Medium |
|
||||
| **TC-SYNC-004** | **WIP Flag (multiple referenced package PRs)** | 1. Create a ProjectGit PR that references multiple PackageGit PRs.<br>2. Mark one of the PackageGit PRs as "Work In Progress".<br>3. Remove the "Work In Progress" flag from all PackageGit PRs. | 1. The ProjectGit PR is marked as "Work In Progress".<br>2. The "Work In Progress" flag is removed from the ProjectGit PR only after it has been removed from all associated PackageGit PRs. | Medium |
|
||||
| **TC-SYNC-005** | **NoProjectGitPR = true, edits disabled** | 1. Set `NoProjectGitPR = true` in `workflow.config`.<br>2. Create a PackageGit PR without "Allow edits from maintainers" enabled. <br>3. Push a new commit to the PackageGit PR. | 1. No ProjectGit PR is created.<br>2. The bot adds a warning comment to the PackageGit PR explaining that it cannot update the PR. | High |
|
||||
| **TC-SYNC-006** | **NoProjectGitPR = true, edits enabled** | 1. Set `NoProjectGitPR = true` in `workflow.config`.<br>2. Create a PackageGit PR with "Allow edits from maintainers" enabled.<br>3. Push a new commit to the PackageGit PR. | 1. No ProjectGit PR is created.<br>2. The submodule commit on the project PR is updated with the new commit from the PackageGit PR. | High |
|
||||
| **TC-COMMENT-001** | **Detect duplicate comments** | 1. Create a PackageGit PR.<br>2. Wait for the `workflow-pr` bot to act on the PR.<br>3. Edit the body of the PR to trigger the bot a second time. | 1. The bot should not post a duplicate comment. | High |
|
||||
| **TC-REVIEW-001** | **Add mandatory reviewers** | 1. Create a new PackageGit PR. | 1. All mandatory reviewers are added to both the PackageGit and ProjectGit PRs. | High |
|
||||
| **TC-REVIEW-002** | **Add advisory reviewers** | 1. Create a new PackageGit PR with advisory reviewers defined in the configuration. | 1. Advisory reviewers are added to the PR, but their approval is not required for merging. | Medium |
|
||||
| **TC-REVIEW-003** | **Re-add reviewers** | 1. Push a new commit to a PackageGit PR after it has been approved. | 1. The original reviewers are re-added to the PR. | Medium |
|
||||
| **TC-REVIEW-004** | **Package PR created by a maintainer** | 1. Create a PackageGit PR from the account of a package maintainer. | 1. No review is requested from other package maintainers. | High |
|
||||
| **TC-REVIEW-005** | **Package PR created by an external user (approve)** | 1. Create a PackageGit PR from the account of a user who is not a package maintainer.<br>2. One of the package maintainers approves the PR. | 1. All package maintainers are added as reviewers.<br>2. Once one maintainer approves the PR, the other maintainers are removed as reviewers. | High |
|
||||
| **TC-REVIEW-006** | **Package PR created by an external user (reject)** | 1. Create a PackageGit PR from the account of a user who is not a package maintainer.<br>2. One of the package maintainers rejects the PR. | 1. All package maintainers are added as reviewers.<br>2. Once one maintainer rejects the PR, the other maintainers are removed as reviewers. | High |
|
||||
| **TC-REVIEW-007** | **Package PR created by a maintainer with ReviewRequired=true** | 1. Set `ReviewRequired = true` in `workflow.config`.<br>2. Create a PackageGit PR from the account of a package maintainer. | 1. A review is requested from other package maintainers if available. | High |
|
||||
| **TC-MERGE-001** | **Automatic Merge** | 1. Create a PackageGit PR.<br>2. Ensure all mandatory reviews are completed on both project and package PRs. | 1. The PR is automatically merged. | High |
|
||||
| **TC-MERGE-002** | **ManualMergeOnly with Package Maintainer** | 1. Create a PackageGit PR with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the package PR from the account of a package maintainer for that package. | 1. The PR is merged. | High |
|
||||
| **TC-MERGE-003** | **ManualMergeOnly with unauthorized user** | 1. Create a PackageGit PR with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the package PR from the account of a user who is not a maintainer for that package. | 1. The PR is not merged. | High |
|
||||
| **TC-MERGE-004** | **ManualMergeOnly with multiple packages** | 1. Create a ProjectGit PR that references multiple PackageGit PRs with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on each package PR from the account of a package maintainer. | 1. The PR is merged only after "merge ok" is commented on all associated PackageGit PRs. | High |
|
||||
| **TC-MERGE-005** | **ManualMergeOnly with Project Maintainer** | 1. Create a PackageGit PR with `ManualMergeOnly` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the package PR from the account of a project maintainer. | 1. The PR is merged. | High |
|
||||
| **TC-MERGE-006** | **ManualMergeProject with Project Maintainer** | 1. Create a PackageGit PR with `ManualMergeProject` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the project PR from the account of a project maintainer. | 1. The PR is merged. | High |
|
||||
| **TC-MERGE-007** | **ManualMergeProject with unauthorized user** | 1. Create a PackageGit PR with `ManualMergeProject` set to `true`.<br>2. Ensure all mandatory reviews are completed on both project and package PRs.<br>3. Comment "merge ok" on the project PR from the account of a package maintainer. | 1. The PR is not merged. | High |
|
||||
| **TC-CONFIG-001** | **Invalid Configuration** | 1. Provide an invalid `workflow.config` file. | 1. The bot reports an error and does not process any PRs. | High |
|
||||
| **TC-LABEL-001** | **Apply `staging/Auto` label** | 1. Create a new PackageGit PR. | 1. The `staging/Auto` label is applied to the ProjectGit PR. | High |
|
||||
| **TC-LABEL-002** | **Apply `review/Pending` label** | 1. Create a new PackageGit PR. | 1. The `review/Pending` label is applied to the ProjectGit PR when there are pending reviews. | Medium |
|
||||
| **TC-LABEL-003** | **Apply `review/Done` label** | 1. Ensure all mandatory reviews for a PR are completed. | 1. The `review/Done` label is applied to the ProjectGit PR when all mandatory reviews are completed. | Medium |
|
||||
|
||||
@@ -1,78 +0,0 @@
|
||||
"""
|
||||
This module contains pytest fixtures for setting up the test environment.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import time
|
||||
import os
|
||||
|
||||
# Assuming GiteaAPIClient is in tests/lib/common_test_utils.py
|
||||
from tests.lib.common_test_utils import GiteaAPIClient
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def gitea_env():
|
||||
"""
|
||||
Sets up the Gitea environment with dummy data and provides a GiteaAPIClient instance.
|
||||
"""
|
||||
gitea_url = "http://127.0.0.1:3000"
|
||||
|
||||
# Read admin token
|
||||
admin_token_path = "./gitea-data/admin.token" # Corrected path
|
||||
admin_token = None
|
||||
try:
|
||||
with open(admin_token_path, "r") as f:
|
||||
admin_token = f.read().strip()
|
||||
except FileNotFoundError:
|
||||
raise Exception(f"Admin token file not found at {admin_token_path}. Ensure it's generated and accessible.")
|
||||
|
||||
# Headers for authenticated requests
|
||||
auth_headers = {"Authorization": f"token {admin_token}", "Content-Type": "application/json"}
|
||||
|
||||
# Wait for Gitea to be available
|
||||
print(f"Waiting for Gitea at {gitea_url}...")
|
||||
max_retries = 30
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
# Check a specific API endpoint that indicates readiness
|
||||
response = requests.get(f"{gitea_url}/api/v1/version", headers=auth_headers, timeout=5)
|
||||
if response.status_code == 200:
|
||||
print("Gitea API is available.")
|
||||
break
|
||||
except requests.exceptions.ConnectionError:
|
||||
pass
|
||||
print(f"Gitea not ready ({response.status_code if 'response' in locals() else 'ConnectionError'}), retrying in 5 seconds... ({i+1}/{max_retries})")
|
||||
time.sleep(5)
|
||||
else:
|
||||
raise Exception("Gitea did not become available within the expected time.")
|
||||
|
||||
client = GiteaAPIClient(base_url=gitea_url, token=admin_token)
|
||||
|
||||
# Setup dummy data
|
||||
print("--- Starting Gitea Dummy Data Setup from Pytest Fixture ---")
|
||||
client.create_org("products")
|
||||
client.create_org("pool")
|
||||
|
||||
client.create_repo("products", "SLFO")
|
||||
client.create_repo("pool", "pkgA")
|
||||
client.create_repo("pool", "pkgB")
|
||||
|
||||
# The add_submodules method also creates workflow.config and staging.config
|
||||
client.add_submodules("products", "SLFO")
|
||||
|
||||
client.add_collaborator("products", "SLFO", "autogits_obs_staging_bot", "write")
|
||||
client.add_collaborator("products", "SLFO", "workflow-pr", "write")
|
||||
client.add_collaborator("pool", "pkgA", "workflow-pr", "write")
|
||||
client.add_collaborator("pool", "pkgB", "workflow-pr", "write")
|
||||
|
||||
client.update_repo_settings("products", "SLFO")
|
||||
client.update_repo_settings("pool", "pkgA")
|
||||
client.update_repo_settings("pool", "pkgB")
|
||||
print("--- Gitea Dummy Data Setup Complete ---")
|
||||
time.sleep(5) # Add a small delay for Gitea to fully process changes
|
||||
|
||||
yield client
|
||||
|
||||
# Teardown (optional, depending on test strategy)
|
||||
# For now, we'll leave resources for inspection. If a clean slate is needed for each test,
|
||||
# this fixture's scope would be 'function' and teardown logic would be added here.
|
||||
@@ -1,23 +0,0 @@
|
||||
<resultlist state="0fef640bfb56c3e76fcfb698b19b59c0">
|
||||
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="aarch64" code="unpublished" state="unpublished">
|
||||
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
|
||||
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
|
||||
<status package="openjpeg2" code="succeeded"/>
|
||||
</result>
|
||||
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="ppc64le" code="unpublished" state="unpublished">
|
||||
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
|
||||
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
|
||||
<status package="openjpeg2" code="succeeded"/>
|
||||
</result>
|
||||
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="x86_64" code="unpublished" state="unpublished">
|
||||
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
|
||||
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
|
||||
<status package="openjpeg2" code="succeeded"/>
|
||||
</result>
|
||||
<result project="SUSE:SLFO:Main:PullRequest:1881" repository="standard" arch="s390x" code="unpublished" state="unpublished">
|
||||
<scmsync>https://src.suse.de/products/SLFO.git?onlybuild=openjpeg2#d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scmsync>
|
||||
<scminfo>d99ac14dedf9f44e1744c71aaf221d15f6bed479ca11f15738e98f3bf9ae05a1</scminfo>
|
||||
<status package="openjpeg2" code="succeeded"/>
|
||||
</result>
|
||||
</resultlist>
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
<project name="openSUSE:Leap:16.0:PullRequest">
|
||||
<title>Leap 16.0 PullRequest area</title>
|
||||
<description>Base project to define the pull request builds</description>
|
||||
<person userid="autogits_obs_staging_bot" role="maintainer"/>
|
||||
<person userid="maxlin_factory" role="maintainer"/>
|
||||
<group groupid="maintenance-opensuse.org" role="maintainer"/>
|
||||
<debuginfo>
|
||||
<enable/>
|
||||
</debuginfo>
|
||||
<repository name="standard">
|
||||
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||
<arch>x86_64</arch>
|
||||
<arch>i586</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
</project>
|
||||
@@ -1,59 +0,0 @@
|
||||
<project name="openSUSE:Leap:16.0">
|
||||
<title>openSUSE Leap 16.0 based on SLFO</title>
|
||||
<description>Leap 16.0 based on SLES 16.0 (specifically SLFO:1.2)</description>
|
||||
<link project="openSUSE:Backports:SLE-16.0"/>
|
||||
<scmsync>http://gitea-test:3000/products/SLFO#main</scmsync>
|
||||
<person userid="dimstar_suse" role="maintainer"/>
|
||||
<person userid="lkocman-factory" role="maintainer"/>
|
||||
<person userid="maxlin_factory" role="maintainer"/>
|
||||
<person userid="factory-auto" role="reviewer"/>
|
||||
<person userid="licensedigger" role="reviewer"/>
|
||||
<group groupid="autobuild-team" role="maintainer"/>
|
||||
<group groupid="factory-maintainers" role="maintainer"/>
|
||||
<group groupid="maintenance-opensuse.org" role="maintainer"/>
|
||||
<group groupid="factory-staging" role="reviewer"/>
|
||||
<build>
|
||||
<disable repository="ports"/>
|
||||
</build>
|
||||
<debuginfo>
|
||||
<enable/>
|
||||
</debuginfo>
|
||||
<repository name="standard" rebuild="local">
|
||||
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||
<arch>local</arch>
|
||||
<arch>i586</arch>
|
||||
<arch>x86_64</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
<repository name="product">
|
||||
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="product" trigger="manual"/>
|
||||
<path project="openSUSE:Leap:16.0:NonFree" repository="standard"/>
|
||||
<path project="openSUSE:Leap:16.0" repository="images"/>
|
||||
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||
<arch>local</arch>
|
||||
<arch>i586</arch>
|
||||
<arch>x86_64</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
<repository name="ports">
|
||||
<arch>armv7l</arch>
|
||||
</repository>
|
||||
<repository name="images">
|
||||
<releasetarget project="openSUSE:Leap:16.0:ToTest" repository="images" trigger="manual"/>
|
||||
<path project="openSUSE:Leap:16.0" repository="standard"/>
|
||||
<path project="openSUSE:Backports:SLE-16.0" repository="standard"/>
|
||||
<path project="SUSE:SLFO:1.2" repository="standard"/>
|
||||
<arch>i586</arch>
|
||||
<arch>x86_64</arch>
|
||||
<arch>aarch64</arch>
|
||||
<arch>ppc64le</arch>
|
||||
<arch>s390x</arch>
|
||||
</repository>
|
||||
</project>
|
||||
@@ -1,301 +0,0 @@
|
||||
import os
|
||||
import time
|
||||
import pytest
|
||||
import requests
|
||||
import json
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
import base64
|
||||
|
||||
TEST_DATA_DIR = Path(__file__).parent.parent / "data"
|
||||
BUILD_RESULT_TEMPLATE = TEST_DATA_DIR / "build_result.xml.template"
|
||||
MOCK_RESPONSES_DIR = Path(__file__).parent.parent.parent / "mock-obs" / "responses"
|
||||
MOCK_BUILD_RESULT_FILE = (
|
||||
MOCK_RESPONSES_DIR / "GET_build_openSUSE:Leap:16.0:PullRequest:*__result"
|
||||
)
|
||||
MOCK_BUILD_RESULT_FILE1 = MOCK_RESPONSES_DIR / "GET_build_openSUSE:Leap:16.0__result"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_build_result():
|
||||
"""
|
||||
Fixture to create a mock build result file from the template.
|
||||
Returns a factory function that the test can call with parameters.
|
||||
"""
|
||||
|
||||
def _create_result_file(package_name: str, code: str):
|
||||
tree = ET.parse(BUILD_RESULT_TEMPLATE)
|
||||
root = tree.getroot()
|
||||
for status_tag in root.findall(".//status"):
|
||||
status_tag.set("package", package_name)
|
||||
status_tag.set("code", code)
|
||||
|
||||
MOCK_RESPONSES_DIR.mkdir(exist_ok=True)
|
||||
tree.write(MOCK_BUILD_RESULT_FILE)
|
||||
tree.write(MOCK_BUILD_RESULT_FILE1)
|
||||
return str(MOCK_BUILD_RESULT_FILE)
|
||||
|
||||
yield _create_result_file
|
||||
|
||||
if MOCK_BUILD_RESULT_FILE.exists():
|
||||
MOCK_BUILD_RESULT_FILE.unlink()
|
||||
MOCK_BUILD_RESULT_FILE1.unlink()
|
||||
|
||||
|
||||
class GiteaAPIClient:
|
||||
def __init__(self, base_url, token):
|
||||
self.base_url = base_url
|
||||
self.headers = {"Authorization": f"token {token}", "Content-Type": "application/json"}
|
||||
|
||||
def _request(self, method, path, **kwargs):
|
||||
url = f"{self.base_url}/api/v1/{path}"
|
||||
response = requests.request(method, url, headers=self.headers, **kwargs)
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
print(f"HTTPError in _request: {e}")
|
||||
print(f"Response Content: {e.response.text}")
|
||||
raise
|
||||
return response
|
||||
|
||||
def create_org(self, org_name):
|
||||
print(f"--- Checking organization: {org_name} ---")
|
||||
try:
|
||||
self._request("GET", f"orgs/{org_name}")
|
||||
print(f"Organization '{org_name}' already exists.")
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
print(f"Creating organization '{org_name}'...")
|
||||
data = {"username": org_name, "full_name": org_name}
|
||||
self._request("POST", "orgs", json=data)
|
||||
print(f"Organization '{org_name}' created.")
|
||||
else:
|
||||
raise
|
||||
|
||||
def create_repo(self, org_name, repo_name):
|
||||
print(f"--- Checking repository: {org_name}/{repo_name} ---")
|
||||
try:
|
||||
self._request("GET", f"repos/{org_name}/{repo_name}")
|
||||
print(f"Repository '{org_name}/{repo_name}' already exists.")
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
print(f"Creating repository '{org_name}/{repo_name}'...")
|
||||
data = {
|
||||
"name": repo_name,
|
||||
"auto_init": True,
|
||||
"default_branch": "main",
|
||||
"gitignores": "Go",
|
||||
"license": "MIT",
|
||||
"private": False,
|
||||
"readme": "Default"
|
||||
}
|
||||
self._request("POST", f"orgs/{org_name}/repos", json=data)
|
||||
print(f"Repository '{org_name}/{repo_name}' created with a README.")
|
||||
time.sleep(1) # Added delay to allow Git operations to become available
|
||||
else:
|
||||
raise
|
||||
|
||||
def add_collaborator(self, org_name, repo_name, collaborator_name, permission="write"):
|
||||
print(f"--- Adding {collaborator_name} as a collaborator to {org_name}/{repo_name} with '{permission}' permission ---")
|
||||
data = {"permission": permission}
|
||||
# Gitea API returns 204 No Content on success and doesn't fail if already present.
|
||||
self._request("PUT", f"repos/{org_name}/{repo_name}/collaborators/{collaborator_name}", json=data)
|
||||
print(f"Attempted to add {collaborator_name} to {org_name}/{repo_name}.")
|
||||
|
||||
def add_submodules(self, org_name, repo_name):
|
||||
print(f"--- Adding submodules to {org_name}/{repo_name} using diffpatch ---")
|
||||
parent_repo_path = f"repos/{org_name}/{repo_name}"
|
||||
|
||||
try:
|
||||
self._request("GET", f"{parent_repo_path}/contents/.gitmodules")
|
||||
print("Submodules appear to be already added. Skipping.")
|
||||
return
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code != 404:
|
||||
raise
|
||||
|
||||
# Get latest commit SHAs for the submodules
|
||||
pkg_a_sha = self._request("GET", "repos/pool/pkgA/branches/main").json()["commit"]["id"]
|
||||
pkg_b_sha = self._request("GET", "repos/pool/pkgB/branches/main").json()["commit"]["id"]
|
||||
|
||||
if not pkg_a_sha or not pkg_b_sha:
|
||||
raise Exception("Error: Could not get submodule commit SHAs. Cannot apply patch.")
|
||||
|
||||
diff_content = f"""diff --git a/.gitmodules b/.gitmodules
|
||||
new file mode 100644
|
||||
index 0000000..f1838bd
|
||||
--- /dev/null
|
||||
+++ b/.gitmodules
|
||||
@@ -0,0 +1,6 @@
|
||||
+[submodule "pkgA"]
|
||||
+ path = pkgA
|
||||
+ url = ../../pool/pkgA.git
|
||||
+[submodule "pkgB"]
|
||||
+ path = pkgB
|
||||
+ url = ../../pool/pkgB.git
|
||||
diff --git a/pkgA b/pkgA
|
||||
new file mode 160000
|
||||
index 0000000..{pkg_a_sha}
|
||||
--- /dev/null
|
||||
+++ b/pkgA
|
||||
@@ -0,0 +1 @@
|
||||
+Subproject commit {pkg_a_sha}
|
||||
diff --git a/pkgB b/pkgB
|
||||
new file mode 160000
|
||||
index 0000000..{pkg_b_sha}
|
||||
--- /dev/null
|
||||
+++ b/pkgB
|
||||
@@ -0,0 +1 @@
|
||||
+Subproject commit {pkg_b_sha}
|
||||
diff --git a/workflow.config b/workflow.config
|
||||
new file mode 100644
|
||||
--- /dev/null
|
||||
+++ b/workflow.config
|
||||
@@ -0,0 +7 @@
|
||||
+{{
|
||||
+ "Workflows": ["pr"],
|
||||
+ "GitProjectName": "products/SLFO#main",
|
||||
+ "Organization": "pool",
|
||||
+ "Branch": "main",
|
||||
+ "ManualMergeProject": true,
|
||||
+ "Reviewers": [ "-autogits_obs_staging_bot" ]
|
||||
+}}
|
||||
diff --git a/staging.config b/staging.config
|
||||
new file mode 100644
|
||||
--- /dev/null
|
||||
+++ b/staging.config
|
||||
@@ -0,0 +3 @@
|
||||
+{{
|
||||
+ "ObsProject": "openSUSE:Leap:16.0",
|
||||
+ "StagingProject": "openSUSE:Leap:16.0:PullRequest"
|
||||
+}}
|
||||
"""
|
||||
message = "Add pkgA and pkgB as submodules and config files"
|
||||
data = {
|
||||
"branch": "main",
|
||||
"content": diff_content,
|
||||
"message": message
|
||||
}
|
||||
print(f"Applying submodule patch to {org_name}/{repo_name}...")
|
||||
self._request("POST", f"{parent_repo_path}/diffpatch", json=data)
|
||||
print("Submodule patch applied.")
|
||||
|
||||
def update_repo_settings(self, org_name, repo_name):
|
||||
print(f"--- Updating repository settings for: {org_name}/{repo_name} ---")
|
||||
repo_data = self._request("GET", f"repos/{org_name}/{repo_name}").json()
|
||||
|
||||
# Ensure these are boolean values, not string
|
||||
repo_data["allow_manual_merge"] = True
|
||||
repo_data["autodetect_manual_merge"] = True
|
||||
|
||||
self._request("PATCH", f"repos/{org_name}/{repo_name}", json=repo_data)
|
||||
print(f"Repository settings for '{org_name}/{repo_name}' updated.")
|
||||
|
||||
|
||||
def create_gitea_pr(self, repo_full_name: str, diff_content: str, title: str):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
url = f"repos/{owner}/{repo}/pulls"
|
||||
base_branch = "main"
|
||||
|
||||
# Create a new branch for the PR
|
||||
new_branch_name = f"pr-branch-{int(time.time())}"
|
||||
|
||||
# Get the latest commit SHA of the base branch
|
||||
base_commit_sha = self._request("GET", f"repos/{owner}/{repo}/branches/{base_branch}").json()["commit"]["id"]
|
||||
|
||||
# Create the new branch
|
||||
self._request("POST", f"repos/{owner}/{repo}/branches", json={
|
||||
"new_branch_name": new_branch_name,
|
||||
"old_ref": base_commit_sha # Use the commit SHA directly
|
||||
})
|
||||
|
||||
# Create a new file or modify an existing one in the new branch
|
||||
file_path = f"test-file-{int(time.time())}.txt"
|
||||
file_content = "This is a test file for the PR."
|
||||
self._request("POST", f"repos/{owner}/{repo}/contents/{file_path}", json={
|
||||
"content": base64.b64encode(file_content.encode('utf-8')).decode('ascii'),
|
||||
"message": "Add test file",
|
||||
"branch": new_branch_name
|
||||
})
|
||||
|
||||
# Now create the PR
|
||||
data = {
|
||||
"head": new_branch_name, # Use the newly created branch as head
|
||||
"base": base_branch,
|
||||
"title": title,
|
||||
"body": "Test Pull Request"
|
||||
}
|
||||
response = self._request("POST", url, json=data)
|
||||
return response.json()
|
||||
|
||||
def modify_gitea_pr(self, repo_full_name: str, pr_number: int, diff_content: str, message: str):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
|
||||
# Get PR details to find the head branch
|
||||
pr_details = self._request("GET", f"repos/{owner}/{repo}/pulls/{pr_number}").json()
|
||||
head_branch = pr_details["head"]["ref"]
|
||||
|
||||
file_path = f"modified-file-{int(time.time())}.txt"
|
||||
file_content = "This is a modified test file for the PR."
|
||||
|
||||
self._request("POST", f"repos/{owner}/{repo}/contents/{file_path}", json={
|
||||
"content": base64.b64encode(file_content.encode('utf-8')).decode('ascii'),
|
||||
"message": message,
|
||||
"branch": head_branch
|
||||
})
|
||||
|
||||
def update_gitea_pr_properties(self, repo_full_name: str, pr_number: int, **kwargs):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
url = f"repos/{owner}/{repo}/pulls/{pr_number}"
|
||||
response = self._request("PATCH", url, json=kwargs)
|
||||
return response.json()
|
||||
|
||||
def get_timeline_events(self, repo_full_name: str, pr_number: int):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
url = f"repos/{owner}/{repo}/issues/{pr_number}/timeline"
|
||||
|
||||
# Retry logic for timeline events
|
||||
for i in range(10): # Try up to 10 times
|
||||
try:
|
||||
response = self._request("GET", url)
|
||||
timeline_events = response.json()
|
||||
if timeline_events: # Check if timeline_events list is not empty
|
||||
return timeline_events
|
||||
print(f"Attempt {i+1}: Timeline for PR {pr_number} is empty. Retrying in 3 seconds...")
|
||||
time.sleep(3)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
print(f"Attempt {i+1}: Timeline for PR {pr_number} not found yet. Retrying in 3 seconds...")
|
||||
time.sleep(3)
|
||||
else:
|
||||
raise # Re-raise other HTTP errors
|
||||
raise Exception(f"Failed to retrieve timeline for PR {pr_number} after multiple retries.")
|
||||
|
||||
def get_comments(self, repo_full_name: str, pr_number: int):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
url = f"repos/{owner}/{repo}/issues/{pr_number}/comments"
|
||||
|
||||
# Retry logic for comments
|
||||
for i in range(10): # Try up to 10 times
|
||||
try:
|
||||
response = self._request("GET", url)
|
||||
comments = response.json()
|
||||
print(f"Attempt {i+1}: Comments for PR {pr_number} received: {comments}") # Added debug print
|
||||
if comments: # Check if comments list is not empty
|
||||
return comments
|
||||
print(f"Attempt {i+1}: Comments for PR {pr_number} are empty. Retrying in 3 seconds...")
|
||||
time.sleep(3)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
print(f"Attempt {i+1}: Comments for PR {pr_number} not found yet. Retrying in 3 seconds...")
|
||||
time.sleep(3)
|
||||
else:
|
||||
raise # Re-raise other HTTP errors
|
||||
raise Exception(f"Failed to retrieve comments for PR {pr_number} after multiple retries.")
|
||||
|
||||
def get_pr_details(self, repo_full_name: str, pr_number: int):
|
||||
owner, repo = repo_full_name.split("/")
|
||||
url = f"repos/{owner}/{repo}/pulls/{pr_number}"
|
||||
response = self._request("GET", url)
|
||||
return response.json()
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
import pytest
|
||||
import re
|
||||
import time
|
||||
import subprocess
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from tests.lib.common_test_utils import (
|
||||
GiteaAPIClient,
|
||||
mock_build_result,
|
||||
)
|
||||
|
||||
# =============================================================================
|
||||
# TEST CASES
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def test_pr_workflow_succeeded(gitea_env, mock_build_result):
|
||||
"""End-to-end test for a successful PR workflow."""
|
||||
diff = "diff --git a/test.txt b/test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
|
||||
pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR - should succeed")
|
||||
initial_pr_number = pr["number"]
|
||||
|
||||
compose_dir = Path(__file__).parent.parent
|
||||
|
||||
forwarded_pr_number = None
|
||||
print(
|
||||
f"Polling pool/pkgA PR #{initial_pr_number} timeline for forwarded PR event..."
|
||||
)
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("pool/pkgA", initial_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("type") == "pull_ref":
|
||||
if not (ref_issue := event.get("ref_issue")):
|
||||
continue
|
||||
url_to_check = ref_issue.get("html_url", "")
|
||||
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
|
||||
if match:
|
||||
forwarded_pr_number = match.group(1)
|
||||
break
|
||||
if forwarded_pr_number:
|
||||
break
|
||||
assert (
|
||||
forwarded_pr_number is not None
|
||||
), "Workflow bot did not create a pull_ref event on the timeline."
|
||||
print(f"Found forwarded PR: products/SLFO #{forwarded_pr_number}")
|
||||
|
||||
print(f"Polling products/SLFO PR #{forwarded_pr_number} for reviewer assignment...")
|
||||
reviewer_added = False
|
||||
for _ in range(15):
|
||||
time.sleep(1)
|
||||
pr_details = gitea_env.get_pr_details("products/SLFO", forwarded_pr_number)
|
||||
if any(
|
||||
r.get("login") == "autogits_obs_staging_bot"
|
||||
for r in pr_details.get("requested_reviewers", [])
|
||||
):
|
||||
reviewer_added = True
|
||||
break
|
||||
assert reviewer_added, "Staging bot was not added as a reviewer."
|
||||
print("Staging bot has been added as a reviewer.")
|
||||
|
||||
mock_build_result(package_name="pkgA", code="succeeded")
|
||||
|
||||
print("Restarting obs-staging-bot...")
|
||||
subprocess.run(
|
||||
["podman-compose", "restart", "obs-staging-bot"],
|
||||
cwd=compose_dir,
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
|
||||
print(f"Polling products/SLFO PR #{forwarded_pr_number} for final status...")
|
||||
status_comment_found = False
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("products/SLFO", forwarded_pr_number)
|
||||
for event in timeline_events:
|
||||
print(event.get("body", "not a body"))
|
||||
if event.get("body") and "successful" in event["body"]:
|
||||
status_comment_found = True
|
||||
break
|
||||
if status_comment_found:
|
||||
break
|
||||
assert status_comment_found, "Staging bot did not post a 'successful' comment."
|
||||
|
||||
|
||||
def test_pr_workflow_failed(gitea_env, mock_build_result):
|
||||
"""End-to-end test for a failed PR workflow."""
|
||||
diff = "diff --git a/another_test.txt b/another_test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
|
||||
pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR - should fail")
|
||||
initial_pr_number = pr["number"]
|
||||
|
||||
compose_dir = Path(__file__).parent.parent
|
||||
|
||||
forwarded_pr_number = None
|
||||
print(
|
||||
f"Polling pool/pkgA PR #{initial_pr_number} timeline for forwarded PR event..."
|
||||
)
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("pool/pkgA", initial_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("type") == "pull_ref":
|
||||
if not (ref_issue := event.get("ref_issue")):
|
||||
continue
|
||||
url_to_check = ref_issue.get("html_url", "")
|
||||
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
|
||||
if match:
|
||||
forwarded_pr_number = match.group(1)
|
||||
break
|
||||
if forwarded_pr_number:
|
||||
break
|
||||
assert (
|
||||
forwarded_pr_number is not None
|
||||
), "Workflow bot did not create a pull_ref event on the timeline."
|
||||
print(f"Found forwarded PR: products/SLFO #{forwarded_pr_number}")
|
||||
|
||||
print(f"Polling products/SLFO PR #{forwarded_pr_number} for reviewer assignment...")
|
||||
reviewer_added = False
|
||||
for _ in range(15):
|
||||
time.sleep(1)
|
||||
pr_details = gitea_env.get_pr_details("products/SLFO", forwarded_pr_number)
|
||||
if any(
|
||||
r.get("login") == "autogits_obs_staging_bot"
|
||||
for r in pr_details.get("requested_reviewers", [])
|
||||
):
|
||||
reviewer_added = True
|
||||
break
|
||||
assert reviewer_added, "Staging bot was not added as a reviewer."
|
||||
print("Staging bot has been added as a reviewer.")
|
||||
|
||||
mock_build_result(package_name="pkgA", code="failed")
|
||||
|
||||
print("Restarting obs-staging-bot...")
|
||||
subprocess.run(
|
||||
["podman-compose", "restart", "obs-staging-bot"],
|
||||
cwd=compose_dir,
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
|
||||
print(f"Polling products/SLFO PR #{forwarded_pr_number} for final status...")
|
||||
status_comment_found = False
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("products/SLFO", forwarded_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("body") and "failed" in event["body"]:
|
||||
status_comment_found = True
|
||||
break
|
||||
if status_comment_found:
|
||||
break
|
||||
assert status_comment_found, "Staging bot did not post a 'failed' comment."
|
||||
@@ -1,117 +0,0 @@
|
||||
import pytest
|
||||
import re
|
||||
import time
|
||||
import subprocess
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from tests.lib.common_test_utils import (
|
||||
GiteaAPIClient,
|
||||
)
|
||||
|
||||
# =============================================================================
|
||||
# TEST CASES
|
||||
# =============================================================================
|
||||
|
||||
pytest.pr = None
|
||||
pytest.pr_details = None
|
||||
pytest.initial_pr_number = None
|
||||
pytest.forwarded_pr_number = None
|
||||
|
||||
|
||||
@pytest.mark.dependency()
|
||||
def test_001_project_pr(gitea_env):
|
||||
"""Forwarded PR correct title"""
|
||||
diff = "diff --git a/another_test.txt b/another_test.txt\nnew file mode 100644\nindex 0000000..e69de29\n"
|
||||
pytest.pr = gitea_env.create_gitea_pr("pool/pkgA", diff, "Test PR")
|
||||
pytest.initial_pr_number = pytest.pr["number"]
|
||||
time.sleep(5) # Give Gitea some time to process the PR and make the timeline available
|
||||
|
||||
compose_dir = Path(__file__).parent.parent
|
||||
|
||||
pytest.forwarded_pr_number = None
|
||||
print(
|
||||
f"Polling pool/pkgA PR #{pytest.initial_pr_number} timeline for forwarded PR event..."
|
||||
)
|
||||
# Instead of polling timeline, check if forwarded PR exists directly
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
timeline_events = gitea_env.get_timeline_events("pool/pkgA", pytest.initial_pr_number)
|
||||
for event in timeline_events:
|
||||
if event.get("type") == "pull_ref":
|
||||
if not (ref_issue := event.get("ref_issue")):
|
||||
continue
|
||||
url_to_check = ref_issue.get("html_url", "")
|
||||
match = re.search(r"products/SLFO/pulls/(\d+)", url_to_check)
|
||||
if match:
|
||||
pytest.forwarded_pr_number = match.group(1)
|
||||
break
|
||||
if pytest.forwarded_pr_number:
|
||||
break
|
||||
assert (
|
||||
pytest.forwarded_pr_number is not None
|
||||
), "Workflow bot did not create a forwarded PR."
|
||||
pytest.pr_details = gitea_env.get_pr_details("products/SLFO", pytest.forwarded_pr_number)
|
||||
assert (
|
||||
pytest.pr_details["title"] == "Forwarded PRs: pkgA"
|
||||
), "Forwarded PR correct title"
|
||||
|
||||
|
||||
@pytest.mark.dependency(depends=["test_001_project_pr"])
|
||||
def test_002_updated_project_pr(gitea_env):
|
||||
"""Forwarded PR head is updated"""
|
||||
diff = "diff --git a/another_test.txt b/another_test.txt\nnew file mode 100444\nindex 0000000..e69de21\n"
|
||||
gitea_env.modify_gitea_pr("pool/pkgA", pytest.initial_pr_number, diff, "Tweaks")
|
||||
sha_old = pytest.pr_details["head"]["sha"]
|
||||
|
||||
sha_changed = False
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
new_pr_details = gitea_env.get_pr_details("products/SLFO", pytest.forwarded_pr_number)
|
||||
sha_new = new_pr_details["head"]["sha"]
|
||||
if sha_new != sha_old:
|
||||
print(f"Sha changed from {sha_old} to {sha_new}")
|
||||
sha_changed = True
|
||||
break
|
||||
|
||||
assert sha_changed, "Forwarded PR has sha updated"
|
||||
|
||||
|
||||
@pytest.mark.dependency(depends=["test_001_project_pr"])
|
||||
def test_003_wip(gitea_env):
|
||||
"""WIP flag set for PR"""
|
||||
# 1. set WIP flag in PR f"pool/pkgA#{pytest.initial_pr_number}"
|
||||
initial_pr_details = gitea_env.get_pr_details("pool/pkgA", pytest.initial_pr_number)
|
||||
wip_title = "WIP: " + initial_pr_details["title"]
|
||||
|
||||
gitea_env.update_gitea_pr_properties("pool/pkgA", pytest.initial_pr_number, title=wip_title)
|
||||
# 2. in loop check whether WIP flag is set for PR f"products/SLFO #{pytest.forwarded_pr_number}"
|
||||
wip_flag_set = False
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
forwarded_pr_details = gitea_env.get_pr_details(
|
||||
"products/SLFO", pytest.forwarded_pr_number
|
||||
)
|
||||
if "WIP: " in forwarded_pr_details["title"]:
|
||||
wip_flag_set = True
|
||||
break
|
||||
|
||||
assert wip_flag_set, "WIP flag was not set in the forwarded PR."
|
||||
|
||||
# Remove WIP flag from PR f"pool/pkgA#{pytest.initial_pr_number}"
|
||||
initial_pr_details = gitea_env.get_pr_details("pool/pkgA", pytest.initial_pr_number)
|
||||
non_wip_title = initial_pr_details["title"].replace("WIP: ", "")
|
||||
gitea_env.update_gitea_pr_properties(
|
||||
"pool/pkgA", pytest.initial_pr_number, title=non_wip_title
|
||||
)
|
||||
|
||||
# In loop check whether WIP flag is removed for PR f"products/SLFO #{pytest.forwarded_pr_number}"
|
||||
wip_flag_removed = False
|
||||
for _ in range(20):
|
||||
time.sleep(1)
|
||||
forwarded_pr_details = gitea_env.get_pr_details(
|
||||
"products/SLFO", pytest.forwarded_pr_number
|
||||
)
|
||||
if "WIP: " not in forwarded_pr_details["title"]:
|
||||
wip_flag_removed = True
|
||||
break
|
||||
assert wip_flag_removed, "WIP flag was not removed from the forwarded PR."
|
||||
@@ -1 +0,0 @@
|
||||
Dockerfile.package
|
||||
@@ -1,17 +0,0 @@
|
||||
# Use the same base image as the Gitea container
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
# Add the custom CA to the trust store
|
||||
COPY integration/rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||
RUN update-ca-certificates
|
||||
|
||||
# Install git and ssh
|
||||
RUN zypper -n in git-core openssh-clients binutils
|
||||
|
||||
# Copy the pre-built binary into the container
|
||||
COPY workflow-pr/workflow-pr /usr/local/bin/workflow-pr
|
||||
COPY integration/workflow-pr/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +4755 /usr/local/bin/entrypoint.sh
|
||||
|
||||
# Set the entrypoint for the container
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Use the same base image as the Gitea container
|
||||
FROM registry.suse.com/bci/bci-base:15.7
|
||||
|
||||
# Add the custom CA to the trust store
|
||||
COPY rabbitmq-config/certs/cert.pem /usr/share/pki/trust/anchors/gitea-rabbitmq-ca.crt
|
||||
RUN update-ca-certificates
|
||||
|
||||
RUN zypper ar -f http://download.opensuse.org/repositories/devel:/Factory:/git-workflow/15.7/devel:Factory:git-workflow.repo
|
||||
RUN zypper --gpg-auto-import-keys ref
|
||||
|
||||
# Install git and ssh
|
||||
RUN zypper -n in git-core openssh-clients autogits-workflow-pr binutils
|
||||
|
||||
COPY integration/workflow-pr/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +4755 /usr/local/bin/entrypoint.sh
|
||||
|
||||
# Set the entrypoint for the container
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
@@ -1,66 +0,0 @@
|
||||
#!/bin/bash
|
||||
TOKEN_FILE="/var/lib/gitea/workflow-pr.token"
|
||||
|
||||
# Wait for the token file to be created by the gitea setup script
|
||||
echo "Waiting for $TOKEN_FILE..."
|
||||
while [ ! -s "$TOKEN_FILE" ]; do
|
||||
sleep 2
|
||||
done
|
||||
|
||||
# Read token and trim whitespace/newlines
|
||||
GITEA_TOKEN=$(cat "$TOKEN_FILE" | tr -d '\n\r ' )
|
||||
|
||||
if [ -z "$GITEA_TOKEN" ]; then
|
||||
echo "Error: Token file $TOKEN_FILE is empty after trimming."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export GITEA_TOKEN
|
||||
echo "GITEA_TOKEN exported (length: ${#GITEA_TOKEN})"
|
||||
|
||||
# Wait for the dummy data to be created by the gitea setup script
|
||||
echo "Waiting for workflow.config in products/SLFO..."
|
||||
API_URL="http://gitea-test:3000/api/v1/repos/products/SLFO/contents/workflow.config"
|
||||
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token $GITEA_TOKEN" "$API_URL")
|
||||
|
||||
while [ "$HTTP_STATUS" != "200" ]; do
|
||||
echo "workflow.config not found yet (HTTP Status: $HTTP_STATUS). Retrying in 5s..."
|
||||
sleep 5
|
||||
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token $GITEA_TOKEN" "$API_URL")
|
||||
done
|
||||
|
||||
# Wait for the shared SSH key to be generated by the gitea setup script
|
||||
echo "Waiting for /var/lib/gitea/ssh-keys/id_ed25519..."
|
||||
while [ ! -f /var/lib/gitea/ssh-keys/id_ed25519 ]; do
|
||||
sleep 2
|
||||
done
|
||||
|
||||
export AUTOGITS_IDENTITY_FILE="/root/.ssh/id_ed25519"
|
||||
|
||||
# Pre-populate known_hosts with Gitea's SSH host key
|
||||
echo "Preparing SSH environment in /root/.ssh..."
|
||||
mkdir -p /root/.ssh
|
||||
chmod 700 /root/.ssh
|
||||
|
||||
# Copy the private key to the standard location and set permissions
|
||||
cp /var/lib/gitea/ssh-keys/id_ed25519 /root/.ssh/id_ed25519
|
||||
chmod 600 /root/.ssh/id_ed25519
|
||||
|
||||
echo "Scanning Gitea SSH host key..."
|
||||
# We try multiple times because Gitea might still be starting its SSH server
|
||||
for i in {1..10}; do
|
||||
ssh-keyscan -p 3022 gitea-test >> /root/.ssh/known_hosts 2>/dev/null && break
|
||||
echo "Retrying ssh-keyscan in 2s..."
|
||||
sleep 2
|
||||
done
|
||||
chmod 644 /root/.ssh/known_hosts
|
||||
|
||||
exe=$(which workflow-pr)
|
||||
exe=${exe:-/usr/local/bin/workflow-pr}
|
||||
|
||||
package=$(rpm -qa | grep autogits-workflow-pr) || :
|
||||
|
||||
echo "!!!!!!!!!!!!!!!! using binary $exe; installed package: $package"
|
||||
which strings > /dev/null 2>&1 && strings "$exe" | grep -A 2 vcs.revision= | head -4 || :
|
||||
|
||||
exec "$exe" "$@"
|
||||
@@ -1,3 +0,0 @@
|
||||
[
|
||||
"products/SLFO#main"
|
||||
]
|
||||
@@ -4,15 +4,11 @@ OBS Staging Bot
|
||||
Build a PR against a ProjectGit, if review is requested.
|
||||
|
||||
|
||||
Main Tasks
|
||||
----------
|
||||
Areas of Responsibility
|
||||
-----------------------
|
||||
|
||||
* A build in OBS is initiated when a review for this bot is requested.
|
||||
* The overall build status is reported:
|
||||
* Build successful
|
||||
* Build failed
|
||||
* It checks the build status only for the involved packages compared to the last state of the project for all architectures and all flavors.
|
||||
* It adds an svg with detailed building status.
|
||||
* Monitors Notification API in Gitea for review requests
|
||||
* Reviews Package build results in OBS for all changed packages in ProjectGit PR
|
||||
|
||||
|
||||
Target Usage
|
||||
@@ -25,56 +21,22 @@ Configuration File
|
||||
------------------
|
||||
|
||||
Bot reads `staging.config` from the project git or the PR to the project git.
|
||||
It's a JSON file with following syntax:
|
||||
It's a JSON file with following syntax
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"ObsProject": "SUSE:SLFO:1.2",
|
||||
"StagingProject": "SUSE:SLFO:1.2:PullRequest",
|
||||
"QA": [
|
||||
{
|
||||
"Name": "SLES",
|
||||
"Origin": "SUSE:SLFO:Products:SLES:16.0",
|
||||
"Label": "BootstrapRing",
|
||||
"BuildDisableRepos": ["product"]
|
||||
}
|
||||
]
|
||||
"ObsProject": "home:foo:project",
|
||||
"StagingProject": "home:foo:project:staging",
|
||||
"QA": [
|
||||
{
|
||||
"Name": "ProjectBuild",
|
||||
"Origin": "home:foo:product:images"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
| Field name | Details | Mandatory | Type | Allowed Values | Default |
|
||||
| ----- | ----- | ----- | ----- | ----- | ----- |
|
||||
| *ObsProject* | Product OBS project. Builds in this project will be used to compare to builds based on sources from the PR. | yes | string | `[a-zA-Z0-9-_:]+` | |
|
||||
| *StagingProject* | Used both as base project and prefix for all OBS staging projects. Upon being added as a reviewer to a PrjGit PR, this bot automatically generates an OBS project named *StagingProject:<PR_Number>*. It must be a sub-project of the *ObsProject*. | yes | string | `[a-zA-Z0-9-_:]+` | |
|
||||
| *QA* | Crucial for generating a product build (such as an ISO or FTP tree) that incorporates the packages. | no | array of objects | | |
|
||||
| *QA > Name* | Suffix for the QA OBS staging project. The project is named *StagingProject:<PR_Number>:Name*. | no | string | | |
|
||||
| *QA > Origin* | OBS reference project | no | string | | |
|
||||
| *QA > Label* | Setup the project only when the given gitea label is set on pull request | no | string | | |
|
||||
| *QA > BuildDisableRepos* | The names of OBS repositories to build-disable, if any. | no | array of strings | | [] |
|
||||
* ObsProject: (**required**) Project where the base project is built. Builds in this project will be used to compare to builds based on sources from the PR
|
||||
* StagingProject: template project that will be used as template for the staging project. Omitting this will use the ObsProject repositories to create the staging. Staging project will be created under the template, or in the bot's home directory if not specified.
|
||||
* QA: set of projects to build ontop of the binaries built in staging.
|
||||
|
||||
|
||||
Details
|
||||
-------
|
||||
|
||||
* **OBS staging projects are deleted** when the relative PrjGit PR is closed or merged.
|
||||
|
||||
* **PrjGit PR - staging project**
|
||||
* The OBS staging project utilizes an **scmsync** tag, configured with the `onlybuild` flag, to exclusively build packages associated with this specific PrjGit PR.
|
||||
* The **build config** is inherited from the PrjGit PR config file (even if unchanged).
|
||||
* The **project meta** creates a standard repository following the StagingProject as a project path.
|
||||
* The base *StagingProject* has the macro **FromScratch:** set in its config, which prevents inheriting the configuration from the included project paths.
|
||||
* The bot copies the project maintainers from *StagingProject* to the specific staging project (*StagingProject:<PR_Number>*).
|
||||
* The bot reports “Build successful” only if the build is successful for all repositories and all architectures.
|
||||
|
||||
* **PrjGit PR - QA staging project**
|
||||
* The QA staging project is meant for building the product; the relative build config is inherited from the `QA > Origin` project.
|
||||
* In this case, the **scmsync** tag is inherited from the `QA > Origin` project.
|
||||
* It is desirable in some cases to avoid building some specific build service repositories when not needed. In this case, `QA > BuildDisableRepos` can be specified.
|
||||
These repositories would be disabled in the project meta when generating the QA project.
|
||||
* QA projects can build on each other. In this case it is important that the order to setup is correct
|
||||
in the staging.config file.
|
||||
* Based on Label settings QA projects can get created or removed. The staging bot is also checking that these
|
||||
projects build successfully.
|
||||
* It is possible to include the sources from the staging project also in the QA project. Define a template using
|
||||
a project link pointing to the project defined as "StagingProject". You must *not* use scmsync directly in the
|
||||
same project then, but you can use it indirectly via a second project link
|
||||
|
||||
@@ -19,7 +19,6 @@ package main
|
||||
*/
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/xml"
|
||||
"errors"
|
||||
"flag"
|
||||
@@ -110,110 +109,161 @@ const (
|
||||
BuildStatusSummaryUnknown = 4
|
||||
)
|
||||
|
||||
type DisableFlag struct {
|
||||
XMLName string `xml:"disable"`
|
||||
Name string `xml:"repository,attr"`
|
||||
}
|
||||
func ProcessBuildStatus(project, refProject *common.BuildResultList) BuildStatusSummary {
|
||||
if _, finished := refProject.BuildResultSummary(); !finished {
|
||||
common.LogDebug("refProject not finished building??")
|
||||
return BuildStatusSummaryUnknown
|
||||
}
|
||||
|
||||
func ProcessBuildStatus(project *common.BuildResultList) BuildStatusSummary {
|
||||
if _, finished := project.BuildResultSummary(); !finished {
|
||||
common.LogDebug("Still building...")
|
||||
return BuildStatusSummaryBuilding
|
||||
}
|
||||
|
||||
common.LogDebug("build results", len(project.Result))
|
||||
// the repositories should be setup equally between the projects. We
|
||||
// need to verify that packages that are building in `refProject` are not
|
||||
// failing in the `project`
|
||||
BuildResultSorter := func(a, b *common.BuildResult) int {
|
||||
if c := strings.Compare(a.Repository, b.Repository); c != 0 {
|
||||
return c
|
||||
}
|
||||
if c := strings.Compare(a.Arch, b.Arch); c != 0 {
|
||||
return c
|
||||
}
|
||||
|
||||
panic("Should not happen -- BuiltResultSorter equal repos?")
|
||||
}
|
||||
slices.SortFunc(project.Result, BuildResultSorter)
|
||||
if refProject == nil {
|
||||
// just return if buid finished and have some successes, since new package
|
||||
common.LogInfo("New package. Only need some success...")
|
||||
SomeSuccess := false
|
||||
for i := 0; i < len(project.Result); i++ {
|
||||
repoRes := project.Result[i]
|
||||
repoResStatus, ok := common.ObsRepoStatusDetails[repoRes.Code]
|
||||
if !ok {
|
||||
common.LogDebug("cannot find code:", repoRes.Code)
|
||||
return BuildStatusSummaryUnknown
|
||||
}
|
||||
if !repoResStatus.Finished {
|
||||
return BuildStatusSummaryBuilding
|
||||
}
|
||||
|
||||
for _, pkg := range repoRes.Status {
|
||||
pkgStatus, ok := common.ObsBuildStatusDetails[pkg.Code]
|
||||
if !ok {
|
||||
common.LogInfo("Unknown package build status:", pkg.Code, "for", pkg.Package)
|
||||
common.LogDebug("Details:", pkg.Details)
|
||||
}
|
||||
|
||||
if pkgStatus.Success {
|
||||
SomeSuccess = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if SomeSuccess {
|
||||
return BuildStatusSummarySuccess
|
||||
}
|
||||
return BuildStatusSummaryFailed
|
||||
}
|
||||
|
||||
slices.SortFunc(refProject.Result, BuildResultSorter)
|
||||
|
||||
common.LogDebug("comparing results", len(project.Result), "vs. ref", len(refProject.Result))
|
||||
SomeSuccess := false
|
||||
for i := 0; i < len(project.Result); i++ {
|
||||
common.LogDebug("searching for", project.Result[i].Repository, "/", project.Result[i].Arch)
|
||||
j := 0
|
||||
found:
|
||||
for j := 0; j < len(project.Result); j++ {
|
||||
for ; j < len(refProject.Result); j++ {
|
||||
if project.Result[i].Repository != refProject.Result[j].Repository ||
|
||||
project.Result[i].Arch != refProject.Result[j].Arch {
|
||||
continue
|
||||
}
|
||||
|
||||
common.LogDebug(" found match for @ idx:", j)
|
||||
res := ProcessRepoBuildStatus(project.Result[i].Status)
|
||||
res, success := ProcessRepoBuildStatus(project.Result[i].Status, refProject.Result[j].Status)
|
||||
switch res {
|
||||
case BuildStatusSummarySuccess:
|
||||
SomeSuccess = SomeSuccess || success
|
||||
break found
|
||||
case BuildStatusSummaryFailed:
|
||||
return BuildStatusSummaryFailed
|
||||
default:
|
||||
return res
|
||||
}
|
||||
}
|
||||
|
||||
if j >= len(refProject.Result) {
|
||||
common.LogDebug("Cannot find results...")
|
||||
common.LogDebug(project.Result[i])
|
||||
common.LogDebug(refProject.Result)
|
||||
return BuildStatusSummaryUnknown
|
||||
}
|
||||
}
|
||||
|
||||
return BuildStatusSummarySuccess
|
||||
if SomeSuccess {
|
||||
return BuildStatusSummarySuccess
|
||||
}
|
||||
|
||||
return BuildStatusSummaryFailed
|
||||
}
|
||||
|
||||
func ProcessRepoBuildStatus(results []*common.PackageBuildStatus) (status BuildStatusSummary) {
|
||||
|
||||
PackageBuildStatusSorter := func(a, b *common.PackageBuildStatus) int {
|
||||
return strings.Compare(a.Package, b.Package)
|
||||
}
|
||||
func ProcessRepoBuildStatus(results, ref []*common.PackageBuildStatus) (status BuildStatusSummary, SomeSuccess bool) {
|
||||
PackageBuildStatusSorter := func(a, b *common.PackageBuildStatus) int {
|
||||
return strings.Compare(a.Package, b.Package)
|
||||
}
|
||||
|
||||
common.LogDebug("******** REF: ")
|
||||
data, _ := xml.MarshalIndent(ref, "", " ")
|
||||
common.LogDebug(string(data))
|
||||
common.LogDebug("******* RESULTS: ")
|
||||
data, _ := xml.MarshalIndent(results, "", " ")
|
||||
data, _ = xml.MarshalIndent(results, "", " ")
|
||||
common.LogDebug(string(data))
|
||||
common.LogDebug("*******")
|
||||
|
||||
// compare build result
|
||||
slices.SortFunc(results, PackageBuildStatusSorter)
|
||||
slices.SortFunc(ref, PackageBuildStatusSorter)
|
||||
|
||||
j := 0
|
||||
SomeSuccess = false
|
||||
for i := 0; i < len(results); i++ {
|
||||
res, ok := common.ObsBuildStatusDetails[results[i].Code]
|
||||
if !ok {
|
||||
common.LogInfo("unknown package result code:", results[i].Code, "for package:", results[i].Package)
|
||||
return BuildStatusSummaryUnknown
|
||||
return BuildStatusSummaryUnknown, SomeSuccess
|
||||
}
|
||||
|
||||
if !res.Finished {
|
||||
return BuildStatusSummaryBuilding
|
||||
return BuildStatusSummaryBuilding, SomeSuccess
|
||||
}
|
||||
|
||||
if !res.Success {
|
||||
return BuildStatusSummaryFailed
|
||||
}
|
||||
}
|
||||
|
||||
return BuildStatusSummarySuccess
|
||||
}
|
||||
|
||||
func GetPackageBuildStatus(project *common.BuildResultList, packageName string) (bool, BuildStatusSummary) {
|
||||
var packageStatuses []*common.PackageBuildStatus
|
||||
|
||||
// Collect all statuses for the package
|
||||
for _, result := range project.Result {
|
||||
for _, pkgStatus := range result.Status {
|
||||
if pkgStatus.Package == packageName {
|
||||
packageStatuses = append(packageStatuses, pkgStatus)
|
||||
// not failed if reference project also failed for same package here
|
||||
for ; j < len(results) && strings.Compare(results[i].Package, ref[j].Package) < 0; j++ {
|
||||
}
|
||||
|
||||
if j < len(results) && results[i].Package == ref[j].Package {
|
||||
refRes, ok := common.ObsBuildStatusDetails[ref[j].Code]
|
||||
if !ok {
|
||||
common.LogInfo("unknown ref package result code:", ref[j].Code, "package:", ref[j].Package)
|
||||
return BuildStatusSummaryUnknown, SomeSuccess
|
||||
}
|
||||
|
||||
if !refRes.Finished {
|
||||
common.LogDebug("Not finished building in reference project?")
|
||||
}
|
||||
|
||||
if refRes.Success {
|
||||
return BuildStatusSummaryFailed, SomeSuccess
|
||||
}
|
||||
}
|
||||
} else {
|
||||
SomeSuccess = true
|
||||
}
|
||||
}
|
||||
|
||||
if len(packageStatuses) == 0 {
|
||||
return true, BuildStatusSummaryUnknown // true for 'missing'
|
||||
}
|
||||
|
||||
// Check for any failures
|
||||
for _, pkgStatus := range packageStatuses {
|
||||
res, ok := common.ObsBuildStatusDetails[pkgStatus.Code]
|
||||
if !ok {
|
||||
common.LogInfo("unknown package result code:", pkgStatus.Code, "for package:", pkgStatus.Package)
|
||||
return false, BuildStatusSummaryUnknown
|
||||
}
|
||||
if !res.Success {
|
||||
return false, BuildStatusSummaryFailed
|
||||
}
|
||||
}
|
||||
|
||||
// Check for any unfinished builds
|
||||
for _, pkgStatus := range packageStatuses {
|
||||
res, _ := common.ObsBuildStatusDetails[pkgStatus.Code]
|
||||
// 'ok' is already checked in the loop above
|
||||
if !res.Finished {
|
||||
return false, BuildStatusSummaryBuilding
|
||||
}
|
||||
}
|
||||
|
||||
// If we got here, all are finished and successful
|
||||
return false, BuildStatusSummarySuccess
|
||||
return BuildStatusSummarySuccess, SomeSuccess
|
||||
}
|
||||
|
||||
func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingPrj, buildPrj string, stagingMasterPrj string) (*common.ProjectMeta, error) {
|
||||
@@ -272,9 +322,9 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
|
||||
common.LogError("error fetching project meta for", buildPrj, ". Err:", err)
|
||||
return nil, err
|
||||
}
|
||||
common.LogInfo("Meta: ", meta)
|
||||
|
||||
// generate new project with paths pointinig back to original repos
|
||||
// disable publishing
|
||||
|
||||
meta.Name = stagingPrj
|
||||
meta.Description = fmt.Sprintf(`Pull request build job PR#%d to branch %s of %s/%s`,
|
||||
@@ -289,10 +339,7 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
|
||||
|
||||
urlPkg := make([]string, 0, len(modifiedOrNew))
|
||||
for _, pkg := range modifiedOrNew {
|
||||
// FIXME: skip manifest subdirectories itself
|
||||
// strip any leading directory name and just hand over last directory as package name
|
||||
onlybuilds := strings.Split(pkg, "/")
|
||||
urlPkg = append(urlPkg, "onlybuild="+url.QueryEscape(onlybuilds[len(onlybuilds)-1]))
|
||||
urlPkg = append(urlPkg, "onlybuild="+url.QueryEscape(pkg))
|
||||
}
|
||||
meta.ScmSync = pr.Head.Repo.CloneURL + "?" + strings.Join(urlPkg, "&") + "#" + pr.Head.Sha
|
||||
if len(meta.ScmSync) >= 65535 {
|
||||
@@ -330,97 +377,28 @@ func GenerateObsPrjMeta(git common.Git, gitea common.Gitea, pr *models.PullReque
|
||||
// stagingProject:$buildProject
|
||||
// ^- stagingProject:$buildProject:$subProjectName (based on templateProject)
|
||||
|
||||
func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingProject, templateProject, subProjectName string, buildDisableRepos []string) error {
|
||||
func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingProject, templateProject, subProjectName string) error {
|
||||
common.LogDebug("Setup QA sub projects")
|
||||
common.LogDebug("reading templateProject ", templateProject)
|
||||
templateMeta, err := ObsClient.GetProjectMeta(templateProject)
|
||||
if err != nil {
|
||||
common.LogError("error fetching template project meta for", templateProject, ":", err)
|
||||
return err
|
||||
}
|
||||
// patch baseMeta to become the new project
|
||||
common.LogDebug("upcoming project name ", stagingProject, ":", subProjectName)
|
||||
templateMeta.Name = stagingProject + ":" + subProjectName
|
||||
// freeze tag for now
|
||||
if len(templateMeta.ScmSync) > 0 {
|
||||
repository, err := url.Parse(templateMeta.ScmSync)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
common.LogDebug("getting data for ", repository.EscapedPath())
|
||||
split := strings.Split(repository.EscapedPath(), "/")
|
||||
org, repo := split[1], split[2]
|
||||
|
||||
common.LogDebug("getting commit for ", org, " repo ", repo, " fragment ", repository.Fragment)
|
||||
branch, err := gitea.GetCommit(org, repo, repository.Fragment)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// set expanded commit url
|
||||
repository.Fragment = branch.SHA
|
||||
templateMeta.ScmSync = repository.String()
|
||||
common.LogDebug("Setting scmsync url to ", templateMeta.ScmSync)
|
||||
}
|
||||
// Build-disable repositories if asked
|
||||
if len(buildDisableRepos) > 0 {
|
||||
toDisable := make([]DisableFlag, len(buildDisableRepos))
|
||||
for idx, repositoryName := range buildDisableRepos {
|
||||
toDisable[idx] = DisableFlag{Name: repositoryName}
|
||||
}
|
||||
|
||||
output, err := xml.Marshal(toDisable)
|
||||
if err != nil {
|
||||
common.LogError("error while marshalling, skipping BuildDisableRepos: ", err)
|
||||
} else {
|
||||
templateMeta.BuildFlags.Contents += string(output)
|
||||
}
|
||||
}
|
||||
|
||||
// include sources from submission project when link points to staging project
|
||||
for idx, l := range templateMeta.Link {
|
||||
if l.Project == stagingConfig.StagingProject {
|
||||
templateMeta.Link[idx].Project = stagingProject
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup ReleaseTarget and modify affected path entries
|
||||
for idx, r := range templateMeta.Repositories {
|
||||
templateMeta.Repositories[idx].ReleaseTargets = nil
|
||||
|
||||
for pidx, path := range r.Paths {
|
||||
// Check for path building against code stream
|
||||
common.LogDebug(" checking in ", templateMeta.Name)
|
||||
common.LogDebug(" stagingProject ", stagingProject)
|
||||
common.LogDebug(" checking for ", templateMeta.Repositories[idx].Paths[pidx].Project)
|
||||
common.LogDebug(" path.Project ", path.Project)
|
||||
common.LogDebug(" stagingConfig.ObsProject ", stagingConfig.ObsProject)
|
||||
common.LogDebug(" stagingConfig.StagingProject ", stagingConfig.StagingProject)
|
||||
common.LogDebug(" templateProject ", templateProject)
|
||||
if path.Project == stagingConfig.ObsProject {
|
||||
templateMeta.Repositories[idx].Paths[pidx].Project = stagingProject
|
||||
} else
|
||||
}
|
||||
// Check for path building against a repo in template project itself
|
||||
if path.Project == templateProject {
|
||||
templateMeta.Repositories[idx].Paths[pidx].Project = templateMeta.Name
|
||||
} else
|
||||
// Check for path prefixes against a template project inside of template project area
|
||||
if strings.HasPrefix(path.Project, stagingConfig.StagingProject + ":") {
|
||||
newProjectName := stagingProject
|
||||
// find project name
|
||||
for _, setup := range stagingConfig.QA {
|
||||
if setup.Origin == path.Project {
|
||||
common.LogDebug(" Match:", setup.Origin)
|
||||
newProjectName = newProjectName + ":" + setup.Name
|
||||
common.LogDebug(" New:", newProjectName)
|
||||
break
|
||||
}
|
||||
}
|
||||
templateMeta.Repositories[idx].Paths[pidx].Project = newProjectName
|
||||
common.LogDebug(" Matched prefix")
|
||||
}
|
||||
common.LogDebug(" Path using project ", templateMeta.Repositories[idx].Paths[pidx].Project)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -428,8 +406,6 @@ func CreateQASubProject(stagingConfig *common.StagingConfig, git common.Git, git
|
||||
err = ObsClient.SetProjectMeta(templateMeta)
|
||||
if err != nil {
|
||||
common.LogError("cannot create project:", templateMeta.Name, err)
|
||||
x, _ := xml.MarshalIndent(templateMeta, "", " ")
|
||||
common.LogError(string(x))
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
@@ -536,7 +512,7 @@ func FetchOurLatestActionableReview(gitea common.Gitea, org, repo string, id int
|
||||
}
|
||||
|
||||
func ParseNotificationToPR(thread *models.NotificationThread) (org string, repo string, num int64, err error) {
|
||||
rx := regexp.MustCompile(`^.*/api/v\d+/repos/(?<org>[-_a-zA-Z0-9]+)/(?<project>[-_a-zA-Z0-9]+)/issues/(?<num>[0-9]+)$`)
|
||||
rx := regexp.MustCompile(`^https://src\.(?:open)?suse\.(?:org|de)/api/v\d+/repos/(?<org>[-_a-zA-Z0-9]+)/(?<project>[-_a-zA-Z0-9]+)/issues/(?<num>[0-9]+)$`)
|
||||
notification := thread.Subject
|
||||
match := rx.FindStringSubmatch(notification.URL)
|
||||
if match == nil {
|
||||
@@ -685,64 +661,6 @@ func SetStatus(gitea common.Gitea, org, repo, hash string, status *models.Commit
|
||||
return err
|
||||
}
|
||||
|
||||
func commentOnPackagePR(gitea common.Gitea, org string, repo string, prNum int64, msg string) {
|
||||
if IsDryRun {
|
||||
common.LogInfo("Would comment on package PR %s/%s#%d: %s", org, repo, prNum, msg)
|
||||
return
|
||||
}
|
||||
|
||||
pr, err := gitea.GetPullRequest(org, repo, prNum)
|
||||
if err != nil {
|
||||
common.LogError("Failed to get package PR %s/%s#%d: %v", org, repo, prNum, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = gitea.AddComment(pr, msg)
|
||||
if err != nil {
|
||||
common.LogError("Failed to comment on package PR %s/%s#%d: %v", org, repo, prNum, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Create and remove QA projects
|
||||
func ProcessQaProjects(stagingConfig *common.StagingConfig, git common.Git, gitea common.Gitea, pr *models.PullRequest, stagingProject string) []string {
|
||||
usedQAprojects := make([]string, 0)
|
||||
prLabelNames := make(map[string]int)
|
||||
for _, label := range pr.Labels {
|
||||
prLabelNames[label.Name] = 1
|
||||
}
|
||||
msg := ""
|
||||
for _, setup := range stagingConfig.QA {
|
||||
QAproject := stagingProject + ":" + setup.Name
|
||||
if len(setup.Label) > 0 {
|
||||
if _, ok := prLabelNames[setup.Label]; !ok {
|
||||
if !IsDryRun {
|
||||
// blindly remove, will fail when not existing
|
||||
ObsClient.DeleteProject(QAproject)
|
||||
}
|
||||
common.LogInfo("QA project ", setup.Name, "has no matching Label")
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
usedQAprojects = append(usedQAprojects, QAproject)
|
||||
// check for existens first, no error, but no meta is a 404
|
||||
if meta, err := ObsClient.GetProjectMeta(QAproject); meta == nil && err == nil {
|
||||
common.LogInfo("Create QA project ", QAproject)
|
||||
CreateQASubProject(stagingConfig, git, gitea, pr,
|
||||
stagingProject,
|
||||
setup.Origin,
|
||||
setup.Name,
|
||||
setup.BuildDisableRepos)
|
||||
msg = msg + "QA Project added: " + ObsWebHost + "/project/show/" +
|
||||
QAproject + "\n"
|
||||
}
|
||||
}
|
||||
if len(msg) > 1 {
|
||||
gitea.AddComment(pr, msg)
|
||||
}
|
||||
return usedQAprojects
|
||||
}
|
||||
|
||||
func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, error) {
|
||||
dir, err := os.MkdirTemp(os.TempDir(), BotName)
|
||||
common.PanicOnError(err)
|
||||
@@ -820,12 +738,11 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
meta, err := ObsClient.GetProjectMeta(stagingConfig.ObsProject)
|
||||
if err != nil || meta == nil {
|
||||
common.LogError("Cannot find reference project meta:", stagingConfig.ObsProject, err)
|
||||
if !IsDryRun && err == nil {
|
||||
common.LogError("Reference project is absent:", stagingConfig.ObsProject, err)
|
||||
if !IsDryRun {
|
||||
_, err := gitea.AddReviewComment(pr, common.ReviewStateRequestChanges, "Cannot fetch reference project meta")
|
||||
return true, err
|
||||
}
|
||||
return true, err
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if metaUrl, err := url.Parse(meta.ScmSync); err != nil {
|
||||
@@ -857,7 +774,6 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
l := len(stagingConfig.ObsProject)
|
||||
if l >= len(stagingConfig.StagingProject) || stagingConfig.ObsProject != stagingConfig.StagingProject[0:l] {
|
||||
common.LogError("StagingProject (", stagingConfig.StagingProject, ") is not child of target project", stagingConfig.ObsProject)
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
|
||||
@@ -976,49 +892,35 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
msg = "Build is started in " + ObsWebHost + "/project/show/" +
|
||||
stagingProject + " .\n"
|
||||
|
||||
if len(stagingConfig.QA) > 0 {
|
||||
msg = msg + "\nAdditional QA builds: \n"
|
||||
}
|
||||
SetStatus(gitea, org, repo, pr.Head.Sha, status)
|
||||
for _, setup := range stagingConfig.QA {
|
||||
CreateQASubProject(stagingConfig, git, gitea, pr,
|
||||
stagingProject,
|
||||
setup.Origin,
|
||||
setup.Name)
|
||||
msg = msg + ObsWebHost + "/project/show/" +
|
||||
stagingProject + ":" + setup.Name + "\n"
|
||||
}
|
||||
}
|
||||
if change != RequestModificationNoChange && !IsDryRun {
|
||||
gitea.AddComment(pr, msg)
|
||||
}
|
||||
|
||||
baseResult, err := ObsClient.LastBuildResults(stagingConfig.ObsProject, modifiedPackages...)
|
||||
if err != nil {
|
||||
common.LogError("failed fetching ref project status for", stagingConfig.ObsProject, ":", err)
|
||||
}
|
||||
stagingResult, err := ObsClient.BuildStatus(stagingProject)
|
||||
if err != nil {
|
||||
common.LogError("failed fetching stage project status for", stagingProject, ":", err)
|
||||
}
|
||||
|
||||
_, packagePRs := common.ExtractDescriptionAndPRs(bufio.NewScanner(strings.NewReader(pr.Body)))
|
||||
|
||||
// always update QA projects because Labels can change
|
||||
qaProjects := ProcessQaProjects(stagingConfig, git, gitea, pr, stagingProject)
|
||||
buildStatus := ProcessBuildStatus(stagingResult, baseResult)
|
||||
|
||||
done := false
|
||||
overallBuildStatus := ProcessBuildStatus(stagingResult)
|
||||
commentSuffix := ""
|
||||
if len(qaProjects) > 0 && overallBuildStatus == BuildStatusSummarySuccess {
|
||||
seperator := " in "
|
||||
for _, qaProject := range qaProjects {
|
||||
qaResult, err := ObsClient.BuildStatus(qaProject)
|
||||
if err != nil {
|
||||
common.LogError("failed fetching stage project status for", qaProject, ":", err)
|
||||
}
|
||||
qaBuildStatus := ProcessBuildStatus(qaResult)
|
||||
if qaBuildStatus != BuildStatusSummarySuccess {
|
||||
// either still building or in failed state
|
||||
overallBuildStatus = qaBuildStatus
|
||||
commentSuffix = commentSuffix + seperator + qaProject
|
||||
seperator = ", "
|
||||
}
|
||||
if qaBuildStatus == BuildStatusSummaryFailed {
|
||||
// main project was successful, but QA project, adapt the link to QA project
|
||||
// and change commit state to fail
|
||||
status.Status = common.CommitStatus_Fail
|
||||
status.TargetURL = ObsWebHost + "/project/show/" + qaProject
|
||||
SetStatus(gitea, org, repo, pr.Head.Sha, status)
|
||||
}
|
||||
}
|
||||
}
|
||||
switch overallBuildStatus {
|
||||
switch buildStatus {
|
||||
case BuildStatusSummarySuccess:
|
||||
status.Status = common.CommitStatus_Success
|
||||
done = true
|
||||
@@ -1038,44 +940,7 @@ func ProcessPullRequest(gitea common.Gitea, org, repo string, id int64) (bool, e
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if overallBuildStatus == BuildStatusSummarySuccess || overallBuildStatus == BuildStatusSummaryFailed {
|
||||
// avoid commenting while build is in progress
|
||||
missingPkgs := []string{}
|
||||
|
||||
for _, packagePR := range packagePRs {
|
||||
missing, packageBuildStatus := GetPackageBuildStatus(stagingResult, packagePR.Repo)
|
||||
if missing {
|
||||
missingPkgs = append(missingPkgs, packagePR.Repo)
|
||||
continue
|
||||
}
|
||||
var msg string
|
||||
switch packageBuildStatus {
|
||||
case BuildStatusSummarySuccess:
|
||||
msg = fmt.Sprintf("Build successful, for more information go in %s/project/show/%s.\n", ObsWebHost, stagingProject)
|
||||
case BuildStatusSummaryFailed:
|
||||
msg = fmt.Sprintf("Build failed, for more information go in %s/project/show/%s.\n", ObsWebHost, stagingProject)
|
||||
default:
|
||||
continue
|
||||
}
|
||||
commentOnPackagePR(gitea, packagePR.Org, packagePR.Repo, packagePR.Num, msg)
|
||||
}
|
||||
|
||||
if len(missingPkgs) > 0 {
|
||||
overallBuildStatus = BuildStatusSummaryFailed
|
||||
msg := "The following packages were not found in the staging project:\n"
|
||||
for _, pkg := range missingPkgs {
|
||||
msg = msg + " - " + pkg + "\n"
|
||||
}
|
||||
common.LogInfo(msg)
|
||||
err := gitea.AddComment(pr, msg)
|
||||
if err != nil {
|
||||
common.LogError(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
common.LogInfo("Build status:", overallBuildStatus)
|
||||
common.LogInfo("Build status:", buildStatus)
|
||||
if !IsDryRun {
|
||||
if err = SetStatus(gitea, org, repo, pr.Head.Sha, status); err != nil {
|
||||
return false, err
|
||||
@@ -1140,7 +1005,6 @@ func PollWorkNotifications(giteaUrl string) {
|
||||
|
||||
var ListPullNotificationsOnly bool
|
||||
var GiteaUrl string
|
||||
var ObsApiHost string
|
||||
var ObsWebHost string
|
||||
var IsDryRun bool
|
||||
var ProcessPROnly string
|
||||
@@ -1163,8 +1027,8 @@ func main() {
|
||||
flag.BoolVar(&ListPullNotificationsOnly, "list-notifications-only", false, "Only lists notifications without acting on them")
|
||||
ProcessPROnly := flag.String("pr", "", "Process only specific PR and ignore the rest. Use for debugging")
|
||||
buildRoot := flag.String("build-root", "", "Default build location for staging projects. Default is bot's home project")
|
||||
flag.StringVar(&GiteaUrl, "gitea-url", "", "Gitea instance")
|
||||
flag.StringVar(&ObsApiHost, "obs", "", "API for OBS instance")
|
||||
flag.StringVar(&GiteaUrl, "gitea-url", "https://src.opensuse.org", "Gitea instance")
|
||||
obsApiHost := flag.String("obs", "https://api.opensuse.org", "API for OBS instance")
|
||||
flag.StringVar(&ObsWebHost, "obs-web", "", "Web OBS instance, if not derived from the obs config")
|
||||
flag.BoolVar(&IsDryRun, "dry", false, "Dry-run, don't actually create any build projects or review changes")
|
||||
debug := flag.Bool("debug", false, "Turns on debug logging")
|
||||
@@ -1176,34 +1040,18 @@ func main() {
|
||||
common.SetLoggingLevel(common.LogLevelInfo)
|
||||
}
|
||||
|
||||
if len(GiteaUrl) == 0 {
|
||||
GiteaUrl = os.Getenv(common.GiteaHostEnv)
|
||||
}
|
||||
if len(GiteaUrl) == 0 {
|
||||
GiteaUrl = "https://src.opensuse.org"
|
||||
}
|
||||
if len(ObsApiHost) == 0 {
|
||||
ObsApiHost = os.Getenv(common.ObsApiEnv)
|
||||
}
|
||||
if len(ObsApiHost) == 0 {
|
||||
ObsApiHost = "https://api.opensuse.org"
|
||||
}
|
||||
if len(ObsWebHost) == 0 {
|
||||
ObsWebHost = os.Getenv(common.ObsWebEnv)
|
||||
}
|
||||
if len(ObsWebHost) == 0 {
|
||||
ObsWebHost = "https://build.opensuse.org"
|
||||
ObsWebHost = ObsWebHostFromApiHost(*obsApiHost)
|
||||
}
|
||||
|
||||
common.LogDebug("OBS Gitea Host:", GiteaUrl)
|
||||
common.LogDebug("OBS Web Host:", ObsWebHost)
|
||||
common.LogDebug("OBS API Host:", ObsApiHost)
|
||||
common.LogDebug("OBS API Host:", *obsApiHost)
|
||||
|
||||
common.PanicOnErrorWithMsg(common.RequireGiteaSecretToken(), "Cannot find GITEA_TOKEN")
|
||||
common.PanicOnErrorWithMsg(common.RequireObsSecretToken(), "Cannot find OBS_USER and OBS_PASSWORD")
|
||||
|
||||
var err error
|
||||
if ObsClient, err = common.NewObsClient(ObsApiHost); err != nil {
|
||||
if ObsClient, err = common.NewObsClient(*obsApiHost); err != nil {
|
||||
log.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
OBS Status Service
|
||||
==================
|
||||
|
||||
Reports build status of OBS service as an easily to produce SVG. Repository
|
||||
results (build results) are cached for 10 seconds and repository listing
|
||||
for OBS instance are cached for 5 minutes -- new repositories take up to
|
||||
5 minutes to be visible.
|
||||
Reports build status of OBS service as an easily to produce SVG
|
||||
|
||||
Requests for individual build results:
|
||||
|
||||
@@ -20,31 +17,19 @@ Get requests for / will also return 404 statu normally. If the Backend redis
|
||||
server is not available, it will return 500
|
||||
|
||||
|
||||
By default, SVG output is generated, suitable for inclusion. But JSON and XML
|
||||
output is possible by setting `Accept:` request header
|
||||
|
||||
| Accept Request Header | Output format
|
||||
|------------------------|---------------------
|
||||
| | SVG image
|
||||
| application/json | JSON data
|
||||
| application/obs+xml | XML output
|
||||
|
||||
|
||||
Areas of Responsibility
|
||||
-----------------------
|
||||
|
||||
* Fetch and cache internal data from OBS and present it in usable format:
|
||||
+ Generate SVG output for specific OBS project or package
|
||||
+ Generate JSON/XML output for automated processing
|
||||
* Low-overhead
|
||||
* Monitors RabbitMQ interface for notification of OBS package and project status
|
||||
* Produces SVG output based on GET request
|
||||
* Cache results (sqlite) and periodically update results from OBS (in case of messages are missing)
|
||||
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
* inside README.md of package git or project git
|
||||
* README.md of package git or project git
|
||||
* comment section of a Gitea PR
|
||||
* automated build result processing
|
||||
|
||||
Running
|
||||
-------
|
||||
@@ -57,4 +42,3 @@ Default parameters can be changed by env variables
|
||||
| `OBS_STATUS_SERVICE_LISTEN` | [::1]:8080 | Listening address and port
|
||||
| `OBS_STATUS_SERVICE_CERT` | /run/obs-status-service.pem | Location of certificate file for service
|
||||
| `OBS_STATUS_SERVICE_KEY` | /run/obs-status-service.pem | Location of key file for service
|
||||
| `REDIS` | | OBS's Redis instance URL
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -105,10 +105,10 @@ func ProjectStatusSummarySvg(res []*common.BuildResult) []byte {
|
||||
|
||||
func LinkToBuildlog(R *common.BuildResult, S *common.PackageBuildStatus) string {
|
||||
if R != nil && S != nil {
|
||||
//switch S.Code {
|
||||
//case "succeeded", "failed", "building":
|
||||
switch S.Code {
|
||||
case "succeeded", "failed", "building":
|
||||
return "/buildlog/" + url.PathEscape(R.Project) + "/" + url.PathEscape(S.Package) + "/" + url.PathEscape(R.Repository) + "/" + url.PathEscape(R.Arch)
|
||||
//}
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
@@ -170,7 +170,6 @@ func BuildStatusSvg(repo *common.BuildResult, status *common.PackageBuildStatus)
|
||||
buildStatus, ok := common.ObsBuildStatusDetails[status.Code]
|
||||
if !ok {
|
||||
buildStatus = common.ObsBuildStatusDetails["error"]
|
||||
common.LogError("Cannot find detail for status.Code", status.Code)
|
||||
}
|
||||
fillColor := "#480" // orange
|
||||
textColor := "#888"
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"compress/bzip2"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
@@ -85,36 +82,3 @@ func TestStatusSvg(t *testing.T) {
|
||||
os.WriteFile("testpackage.svg", PackageStatusSummarySvg("pkg2", data), 0o777)
|
||||
os.WriteFile("testproject.svg", ProjectStatusSummarySvg(data), 0o777)
|
||||
}
|
||||
|
||||
func TestFactoryResults(t *testing.T) {
|
||||
data, err := os.Open("factory.results.json.bz2")
|
||||
if err != nil {
|
||||
t.Fatal("Openning factory.results.json.bz2 failed:", err)
|
||||
}
|
||||
UncompressedData, err := io.ReadAll(bzip2.NewReader(data))
|
||||
if err != nil {
|
||||
t.Fatal("Reading factory.results.json.bz2 failed:", err)
|
||||
}
|
||||
|
||||
var results []*common.BuildResult
|
||||
if err := json.Unmarshal(UncompressedData, &results); err != nil {
|
||||
t.Fatal("Failed parsing test data", err)
|
||||
}
|
||||
|
||||
// add tests here
|
||||
tests := []struct {
|
||||
name string
|
||||
}{
|
||||
// add test data here
|
||||
{
|
||||
name: "First test",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
// and test code here
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"html"
|
||||
"net/url"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type SvgWriter struct {
|
||||
@@ -134,7 +133,7 @@ func (svg *SvgWriter) WritePackageStatus(loglink, arch, status, detail string) {
|
||||
}
|
||||
|
||||
func (svg *SvgWriter) WriteProjectStatus(project, repo, arch, status string, count int) {
|
||||
u, err := url.Parse(*ObsUrl + "/project/monitor/" + url.PathEscape(project) + "?defaults=0&" + url.QueryEscape(status) + "=1&arch_" + url.QueryEscape(arch) + "=1&repo_" + url.QueryEscape(strings.ReplaceAll(repo, ".", "_")) + "=1")
|
||||
u, err := url.Parse(*ObsUrl + "/project/monitor/" + url.PathEscape(project) + "?defaults=0&" + url.QueryEscape(status) + "=1&arch_" + url.QueryEscape(arch) + "=1&repo_" + url.QueryEscape(repo) + "=1")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
[Unit]
|
||||
Description=Group Review bot for %i
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=exec
|
||||
ExecStart=/usr/bin/group-review %i
|
||||
EnvironmentFile=-/etc/default/group-review/%i.env
|
||||
DynamicUser=yes
|
||||
NoNewPrivileges=yes
|
||||
ProtectSystem=strict
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
[Unit]
|
||||
Description=WorkflowDirect git bot for %i
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=exec
|
||||
ExecStart=/usr/bin/workflow-direct
|
||||
EnvironmentFile=-/etc/default/%i/workflow-direct.env
|
||||
#DynamicUser=yes
|
||||
NoNewPrivileges=yes
|
||||
ProtectSystem=strict
|
||||
# DynamicUser does not work as we cannot seem to be able to put SSH keyfiles into the temp home that are readable by SSH
|
||||
# Also, systemd override is needed away to assign User to run this. This should be dependent per instance.
|
||||
ProtectHome=no
|
||||
PrivateTmp=yes
|
||||
# RuntimeDirectory=%i
|
||||
# SLES 15 doesn't have HOME set for dynamic users, so we improvise
|
||||
# BindReadOnlyPaths=/etc/default/%i/known_hosts:/etc/ssh/ssh_known_hosts /etc/default/%i/config.json:%t/%i/config.json /etc/default/%i/id_ed25519 /etc/default/%i/id_ed25519.pub
|
||||
# WorkingDirectory=%t/%i
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
[Unit]
|
||||
Description=WorkflowPR git bot for %i
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=exec
|
||||
ExecStart=/usr/bin/workflow-pr
|
||||
EnvironmentFile=-/etc/default/%i/workflow-pr.env
|
||||
#DynamicUser=yes
|
||||
NoNewPrivileges=yes
|
||||
ProtectSystem=strict
|
||||
# DynamicUser does not work as we cannot seem to be able to put SSH keyfiles into the temp home that are readable by SSH
|
||||
# Also, systemd override is needed away to assign User to run this. This should be dependent per instance.
|
||||
ProtectHome=no
|
||||
PrivateTmp=yes
|
||||
# RuntimeDirectory=%i
|
||||
# SLES 15 doesn't have HOME set for dynamic users, so we improvise
|
||||
# BindReadOnlyPaths=/etc/default/%i/known_hosts:/etc/ssh/ssh_known_hosts /etc/default/%i/config.json:%t/%i/config.json /etc/default/%i/id_ed25519 /etc/default/%i/id_ed25519.pub
|
||||
# WorkingDirectory=%t/%i
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
9
utils/gitmodules-automerge/README.md
Normal file
9
utils/gitmodules-automerge/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
Purpose
|
||||
-------
|
||||
|
||||
Automatically resolve git configlicts in .gitmodules if there's a conflict
|
||||
due to a merge.
|
||||
|
||||
It uses HEAD and MERGE_HEAD to calculate merge base and pass it to the
|
||||
conflict resolution
|
||||
|
||||
42
utils/gitmodules-automerge/main.go
Normal file
42
utils/gitmodules-automerge/main.go
Normal file
@@ -0,0 +1,42 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
func main() {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
common.LogError(err)
|
||||
return
|
||||
}
|
||||
gh, err := common.AllocateGitWorkTree(cwd, "", "")
|
||||
if err != nil {
|
||||
common.LogError(err)
|
||||
return
|
||||
}
|
||||
|
||||
git, err := gh.ReadExistingPath("")
|
||||
if err != nil {
|
||||
common.LogError(err)
|
||||
return
|
||||
}
|
||||
|
||||
MergeBase := strings.TrimSpace(git.GitExecWithOutputOrPanic("", "merge-base", "HEAD", "MERGE_HEAD"))
|
||||
status, err := git.GitStatus("")
|
||||
if err != nil {
|
||||
common.LogError(err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, s := range status {
|
||||
if s.Path == ".gitmodules" && s.Status == common.GitStatus_Unmerged {
|
||||
if err := git.GitResolveSubmoduleFileConflict(s, "", MergeBase, "HEAD", "MERGE_HEAD"); err != nil {
|
||||
common.LogError(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
"slices"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
func WriteNewMaintainershipFile(m *common.MaintainershipMap, filename string) {
|
||||
f, err := os.Create(filename + ".new")
|
||||
common.PanicOnError(err)
|
||||
common.PanicOnError(m.WriteMaintainershipFile(f))
|
||||
common.PanicOnError(f.Sync())
|
||||
common.PanicOnError(f.Close())
|
||||
common.PanicOnError(os.Rename(filename+".new", filename))
|
||||
}
|
||||
|
||||
func run() error {
|
||||
pkg := flag.String("package", "", "Package to modify")
|
||||
rm := flag.Bool("rm", false, "Remove maintainer from package")
|
||||
add := flag.Bool("add", false, "Add maintainer to package")
|
||||
lint := flag.Bool("lint-only", false, "Reformat entire _maintainership.json only")
|
||||
flag.Parse()
|
||||
|
||||
if (*add == *rm) && !*lint {
|
||||
return fmt.Errorf("Need to either add or remove a maintainer, or lint")
|
||||
}
|
||||
|
||||
filename := common.MaintainershipFile
|
||||
if *lint {
|
||||
if len(flag.Args()) > 0 {
|
||||
filename = flag.Arg(0)
|
||||
}
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(filename)
|
||||
if os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
m, err := common.ParseMaintainershipData(data)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to parse JSON: %w", err)
|
||||
}
|
||||
|
||||
if *lint {
|
||||
m.Raw = nil // forces a rewrite
|
||||
} else {
|
||||
users := flag.Args()
|
||||
if len(users) > 0 {
|
||||
maintainers, ok := m.Data[*pkg]
|
||||
if !ok && !*add {
|
||||
return fmt.Errorf("No package %s and not adding one.", *pkg)
|
||||
}
|
||||
|
||||
if *add {
|
||||
for _, u := range users {
|
||||
if !slices.Contains(maintainers, u) {
|
||||
maintainers = append(maintainers, u)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if *rm {
|
||||
newMaintainers := make([]string, 0, len(maintainers))
|
||||
for _, m := range maintainers {
|
||||
if !slices.Contains(users, m) {
|
||||
newMaintainers = append(newMaintainers, m)
|
||||
}
|
||||
}
|
||||
maintainers = newMaintainers
|
||||
}
|
||||
|
||||
if len(maintainers) > 0 {
|
||||
slices.Sort(maintainers)
|
||||
m.Data[*pkg] = maintainers
|
||||
} else {
|
||||
delete(m.Data, *pkg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
WriteNewMaintainershipFile(m, filename)
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
if err := run(); err != nil {
|
||||
common.LogError(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
@@ -1,242 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"os"
|
||||
"os/exec"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"src.opensuse.org/autogits/common"
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
if os.Getenv("BE_MAIN") == "1" {
|
||||
main()
|
||||
return
|
||||
}
|
||||
os.Exit(m.Run())
|
||||
}
|
||||
|
||||
func TestRun(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
inData string
|
||||
expectedOut string
|
||||
params []string
|
||||
expectedError string
|
||||
isDir bool
|
||||
}{
|
||||
{
|
||||
name: "add user to existing package",
|
||||
inData: `{"pkg1": ["user1"]}`,
|
||||
params: []string{"-package", "pkg1", "-add", "user2"},
|
||||
expectedOut: `{"pkg1": ["user1", "user2"]}`,
|
||||
},
|
||||
{
|
||||
name: "add user to new package",
|
||||
inData: `{"pkg1": ["user1"]}`,
|
||||
params: []string{"-package", "pkg2", "-add", "user2"},
|
||||
expectedOut: `{"pkg1": ["user1"], "pkg2": ["user2"]}`,
|
||||
},
|
||||
{
|
||||
name: "no-op with no users",
|
||||
inData: `{"pkg1": ["user1"]}`,
|
||||
params: []string{"-package", "pkg1", "-add"},
|
||||
expectedOut: `{"pkg1": ["user1"]}`,
|
||||
},
|
||||
{
|
||||
name: "add existing user",
|
||||
inData: `{"pkg1": ["user1", "user2"]}`,
|
||||
params: []string{"-package", "pkg1", "-add", "user2"},
|
||||
expectedOut: `{"pkg1": ["user1", "user2"]}`,
|
||||
},
|
||||
{
|
||||
name: "remove user from package",
|
||||
inData: `{"pkg1": ["user1", "user2"]}`,
|
||||
params: []string{"-package", "pkg1", "-rm", "user2"},
|
||||
expectedOut: `{"pkg1": ["user1"]}`,
|
||||
},
|
||||
{
|
||||
name: "remove last user from package",
|
||||
inData: `{"pkg1": ["user1"]}`,
|
||||
params: []string{"-package", "pkg1", "-rm", "user1"},
|
||||
expectedOut: `{}`,
|
||||
},
|
||||
{
|
||||
name: "remove non-existent user",
|
||||
inData: `{"pkg1": ["user1"]}`,
|
||||
params: []string{"-package", "pkg1", "-rm", "user2"},
|
||||
expectedOut: `{"pkg1": ["user1"]}`,
|
||||
},
|
||||
{
|
||||
name: "lint only unsorted",
|
||||
inData: `{"pkg1": ["user2", "user1"]}`,
|
||||
params: []string{"-lint-only"},
|
||||
expectedOut: `{"pkg1": ["user1", "user2"]}`,
|
||||
},
|
||||
{
|
||||
name: "lint only no changes",
|
||||
inData: `{"pkg1": ["user1", "user2"]}`,
|
||||
params: []string{"-lint-only"},
|
||||
expectedOut: `{"pkg1": ["user1", "user2"]}`,
|
||||
},
|
||||
{
|
||||
name: "no file",
|
||||
params: []string{"-add"},
|
||||
expectedError: "no such file or directory",
|
||||
},
|
||||
{
|
||||
name: "invalid json",
|
||||
inData: `{"pkg1": ["user1"`,
|
||||
params: []string{"-add"},
|
||||
expectedError: "Failed to parse JSON",
|
||||
},
|
||||
{
|
||||
name: "add",
|
||||
inData: `{"pkg1": ["user1", "user2"]}`,
|
||||
params: []string{"-package", "pkg1", "-add", "user3"},
|
||||
expectedOut: `{"pkg1": ["user1", "user2", "user3"]}`,
|
||||
},
|
||||
{
|
||||
name: "lint specific file",
|
||||
inData: `{"pkg1": ["user2", "user1"]}`,
|
||||
params: []string{"-lint-only", "other.json"},
|
||||
expectedOut: `{"pkg1": ["user1", "user2"]}`,
|
||||
},
|
||||
{
|
||||
name: "add user to package when it was not there before",
|
||||
inData: `{}`,
|
||||
params: []string{"-package", "newpkg", "-add", "user1"},
|
||||
expectedOut: `{"newpkg": ["user1"]}`,
|
||||
},
|
||||
{
|
||||
name: "unreadable file (is a directory)",
|
||||
isDir: true,
|
||||
params: []string{"-rm"},
|
||||
expectedError: "is a directory",
|
||||
},
|
||||
{
|
||||
name: "remove user from non-existent package",
|
||||
inData: `{"pkg1": ["user1"]}`,
|
||||
params: []string{"-package", "pkg2", "-rm", "user2"},
|
||||
expectedError: "No package pkg2 and not adding one.",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
oldWd, _ := os.Getwd()
|
||||
_ = os.Chdir(dir)
|
||||
defer os.Chdir(oldWd)
|
||||
|
||||
targetFile := common.MaintainershipFile
|
||||
if tt.name == "lint specific file" {
|
||||
targetFile = "other.json"
|
||||
}
|
||||
|
||||
if tt.isDir {
|
||||
_ = os.Mkdir(targetFile, 0755)
|
||||
} else if tt.inData != "" {
|
||||
_ = os.WriteFile(targetFile, []byte(tt.inData), 0644)
|
||||
}
|
||||
|
||||
flag.CommandLine = flag.NewFlagSet(os.Args[0], flag.ContinueOnError)
|
||||
|
||||
os.Args = append([]string{"cmd"}, tt.params...)
|
||||
err := run()
|
||||
|
||||
if tt.expectedError != "" {
|
||||
if err == nil {
|
||||
t.Fatalf("expected error containing %q, but got none", tt.expectedError)
|
||||
}
|
||||
if !strings.Contains(err.Error(), tt.expectedError) {
|
||||
t.Fatalf("expected error containing %q, got %q", tt.expectedError, err.Error())
|
||||
}
|
||||
} else if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
|
||||
if tt.expectedOut != "" {
|
||||
data, _ := os.ReadFile(targetFile)
|
||||
var got, expected map[string][]string
|
||||
_ = json.Unmarshal(data, &got)
|
||||
_ = json.Unmarshal([]byte(tt.expectedOut), &expected)
|
||||
|
||||
if len(got) == 0 && len(expected) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(got, expected) {
|
||||
t.Fatalf("expected %v, got %v", expected, got)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestMainRecursive(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
inData string
|
||||
expectedOut string
|
||||
params []string
|
||||
expectExit bool
|
||||
}{
|
||||
{
|
||||
name: "test main() via recursive call",
|
||||
inData: `{"pkg1": ["user1"]}`,
|
||||
params: []string{"-package", "pkg1", "-add", "user2"},
|
||||
expectedOut: `{"pkg1": ["user1", "user2"]}`,
|
||||
},
|
||||
{
|
||||
name: "test main() failure",
|
||||
params: []string{"-package", "pkg1"},
|
||||
expectExit: true,
|
||||
},
|
||||
}
|
||||
|
||||
exe, _ := os.Executable()
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
oldWd, _ := os.Getwd()
|
||||
_ = os.Chdir(dir)
|
||||
defer os.Chdir(oldWd)
|
||||
|
||||
if tt.inData != "" {
|
||||
_ = os.WriteFile(common.MaintainershipFile, []byte(tt.inData), 0644)
|
||||
}
|
||||
|
||||
cmd := exec.Command(exe, append([]string{"-test.run=None"}, tt.params...)...)
|
||||
cmd.Env = append(os.Environ(), "BE_MAIN=1")
|
||||
out, runErr := cmd.CombinedOutput()
|
||||
|
||||
if tt.expectExit {
|
||||
if runErr == nil {
|
||||
t.Fatalf("expected exit with error, but it succeeded")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if runErr != nil {
|
||||
t.Fatalf("unexpected error: %v: %s", runErr, string(out))
|
||||
}
|
||||
|
||||
if tt.expectedOut != "" {
|
||||
data, _ := os.ReadFile(common.MaintainershipFile)
|
||||
var got, expected map[string][]string
|
||||
_ = json.Unmarshal(data, &got)
|
||||
_ = json.Unmarshal([]byte(tt.expectedOut), &expected)
|
||||
|
||||
if !reflect.DeepEqual(got, expected) {
|
||||
t.Fatalf("expected %v, got %v", expected, got)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,51 +1,33 @@
|
||||
Direct Workflow bot
|
||||
===================
|
||||
|
||||
The project submodule is automatically updated by the direct bot whenever a branch is updated in a package repository.
|
||||
This bot can coexist with the Workflow PR bot, which is instead triggered by a new package PR.
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
Devel project, where direct pushes to package git are possible.
|
||||
|
||||
Areas of responsibility
|
||||
-----------------------
|
||||
|
||||
1. Keep ProjectGit in sync with packages in the organization
|
||||
* **On pushes to package**: updates the submodule commit ID to the default branch HEAD (as configured in Gitea).
|
||||
* **On repository adds**: creates a new submodule (if non-empty).
|
||||
* **On repository removal**: removes the submodule.
|
||||
|
||||
**Note:** If you want to revert a change in a package, you need to do that manually in the project git.
|
||||
* on pushes to package, updates the submodule commit id
|
||||
to the default branch HEAD (as configured in Gitea)
|
||||
* on repository adds, creates a new submodule (if non empty)
|
||||
* on repository removal, removes the submodule
|
||||
|
||||
NOTE: reverts (push HEAD^) are not supported as they would step-on the
|
||||
work of the workflow-pr bot. Manual update of the project git is
|
||||
required in this case.
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
Uses `workflow.config` for configuration.
|
||||
Uses `workflow.config` for configuration. Parameters
|
||||
|
||||
| Field name | Details | Mandatory | Type | Allowed Values | Default |
|
||||
| ----- | ----- | ----- | ----- | ----- | ----- |
|
||||
| *Workflows* | Type of workflow | yes | string | “direct” | |
|
||||
| *Organization* | The organization that holds all the packages | yes | string | | |
|
||||
| *Branch* | The designated branch for packages | no | string | | blank (default package branch) |
|
||||
| *GitProjectName* | Repository and branch where the ProjectGit lives. | no | string | **Format**: `org/project_repo#branch` | By default assumes `_ObsPrj` with default branch in the *Organization* |
|
||||
* _Workflows_: ["direct"] -- direct entry enables direct workflow. **Mandatory**
|
||||
* _Organization_: organization that holds all the packages. **Mandatory**
|
||||
* _Branch_: branch updated in repo's, or blank for default package branch
|
||||
* _GitProjectName_: package in above org, or `org/package#branch` for PrjGit. By default assumes `_ObsPrj` with default branch and in the `Organization`
|
||||
|
||||
NOTE: `-rm`, `-removed`, `-deleted` are all removed suffixes used to indicate current branch is a placeholder for previously existing package. These branches will be ignored by the bot, and if default, the package will be removed and will not be added to the project.
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
Environment Variables
|
||||
-------
|
||||
|
||||
* `GITEA_TOKEN` (required)
|
||||
* `AMQP_USERNAME` (required)
|
||||
* `AMQP_PASSWORD` (required)
|
||||
* `AUTOGITS_CONFIG` (required)
|
||||
* `AUTOGITS_URL` - default: https://src.opensuse.org
|
||||
* `AUTOGITS_RABBITURL` - default: amqps://rabbit.opensuse.org
|
||||
* `AUTOGITS_DEBUG` - disabled by default, set to any value to enable
|
||||
* `AUTOGITS_CHECK_ON_START` - disabled by default, set to any value to enable
|
||||
* `AUTOGITS_REPO_PATH` - default is temporary directory
|
||||
* `AUTOGITS_IDENTITY_FILE` - in case where we need explicit identify path for ssh specified
|
||||
Devel project, where direct pushes to package git are possible
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"log"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"os"
|
||||
@@ -39,7 +40,7 @@ import (
|
||||
const (
|
||||
AppName = "direct_workflow"
|
||||
GitAuthor = "AutoGits prjgit-updater"
|
||||
GitEmail = "autogits-direct@noreply@src.opensuse.org"
|
||||
GitEmail = "adam+autogits-direct@zombino.com"
|
||||
)
|
||||
|
||||
var configuredRepos map[string][]*common.AutogitConfig
|
||||
@@ -52,6 +53,18 @@ func isConfiguredOrg(org *common.Organization) bool {
|
||||
return found
|
||||
}
|
||||
|
||||
func concatenateErrors(err1, err2 error) error {
|
||||
if err1 == nil {
|
||||
return err2
|
||||
}
|
||||
|
||||
if err2 == nil {
|
||||
return err1
|
||||
}
|
||||
|
||||
return fmt.Errorf("%w\n%w", err1, err2)
|
||||
}
|
||||
|
||||
type RepositoryActionProcessor struct{}
|
||||
|
||||
func (*RepositoryActionProcessor) ProcessFunc(request *common.Request) error {
|
||||
@@ -59,90 +72,69 @@ func (*RepositoryActionProcessor) ProcessFunc(request *common.Request) error {
|
||||
configs, configFound := configuredRepos[action.Organization.Username]
|
||||
|
||||
if !configFound {
|
||||
common.LogInfo("Repository event for", action.Organization.Username, ". Not configured. Ignoring.", action.Organization.Username)
|
||||
log.Printf("Repository event for %s. Not configured. Ignoring.\n", action.Organization.Username)
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, config := range configs {
|
||||
if org, repo, _ := config.GetPrjGit(); org == action.Repository.Owner.Username && repo == action.Repository.Name {
|
||||
common.LogError("+ ignoring repo event for PrjGit repository", config.GitProjectName)
|
||||
log.Println("+ ignoring repo event for PrjGit repository", config.GitProjectName)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
for _, config := range configs {
|
||||
processConfiguredRepositoryAction(action, config)
|
||||
err = concatenateErrors(err, processConfiguredRepositoryAction(action, config))
|
||||
}
|
||||
|
||||
return nil
|
||||
return err
|
||||
}
|
||||
|
||||
func processConfiguredRepositoryAction(action *common.RepositoryWebhookEvent, config *common.AutogitConfig) {
|
||||
func processConfiguredRepositoryAction(action *common.RepositoryWebhookEvent, config *common.AutogitConfig) error {
|
||||
gitOrg, gitPrj, gitBranch := config.GetPrjGit()
|
||||
git, err := gh.CreateGitHandler(config.Organization)
|
||||
common.PanicOnError(err)
|
||||
defer git.Close()
|
||||
|
||||
configBranch := config.Branch
|
||||
if len(configBranch) == 0 {
|
||||
configBranch = action.Repository.Default_Branch
|
||||
if common.IsRemovedBranch(configBranch) {
|
||||
common.LogDebug(" - default branch has deleted suffix. Skipping")
|
||||
return
|
||||
}
|
||||
|
||||
if len(configBranch) == 0 {
|
||||
common.LogDebug("Empty default branch in message. Maybe race-condition?")
|
||||
repo, err := gitea.GetRepository(action.Repository.Owner.Username, action.Repository.Name)
|
||||
if err != nil {
|
||||
common.LogError("Failed to fetch repository we have an event for?", action.Repository.Owner.Username, action.Repository.Name)
|
||||
return
|
||||
}
|
||||
|
||||
if len(repo.DefaultBranch) == 0 {
|
||||
common.LogError("Default branch is somehow empty. We cannot do anything.")
|
||||
return
|
||||
}
|
||||
configBranch = repo.DefaultBranch
|
||||
}
|
||||
if len(config.Branch) == 0 {
|
||||
config.Branch = action.Repository.Default_Branch
|
||||
}
|
||||
|
||||
prjGitRepo, err := gitea.CreateRepositoryIfNotExist(git, gitOrg, gitPrj)
|
||||
if err != nil {
|
||||
common.LogError("Error accessing/creating prjgit:", gitOrg, gitPrj, gitBranch, err)
|
||||
return
|
||||
return fmt.Errorf("Error accessing/creating prjgit: %s/%s#%s err: %w", gitOrg, gitPrj, gitBranch, err)
|
||||
}
|
||||
|
||||
remoteName, err := git.GitClone(gitPrj, gitBranch, prjGitRepo.SSHURL)
|
||||
common.PanicOnError(err)
|
||||
git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
|
||||
|
||||
switch action.Action {
|
||||
case "created":
|
||||
if action.Repository.Object_Format_Name != "sha256" {
|
||||
common.LogError(" - ", action.Repository.Name, "repo is not sha256. Ignoring.")
|
||||
return
|
||||
return fmt.Errorf(" - '%s' repo is not sha256. Ignoring.", action.Repository.Name)
|
||||
}
|
||||
common.PanicOnError(git.GitExec(gitPrj, "submodule", "--quiet", "add", "--force", "--depth", "1", action.Repository.Clone_Url, action.Repository.Name))
|
||||
defer git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
|
||||
common.PanicOnError(git.GitExec(gitPrj, "submodule", "--quiet", "add", "--depth", "1", action.Repository.Clone_Url, action.Repository.Name))
|
||||
defer git.GitExecOrPanic(gitPrj, "submodule", "deinit", "--all")
|
||||
|
||||
branch := strings.TrimSpace(git.GitExecWithOutputOrPanic(path.Join(gitPrj, action.Repository.Name), "branch", "--show-current"))
|
||||
if branch != configBranch {
|
||||
if err := git.GitExec(path.Join(gitPrj, action.Repository.Name), "fetch", "--depth", "1", "origin", configBranch+":"+configBranch); err != nil {
|
||||
common.LogError("error fetching branch", configBranch, ". ignoring as non-existent.", err) // no branch? so ignore repo here
|
||||
return
|
||||
if branch != config.Branch {
|
||||
if err := git.GitExec(path.Join(gitPrj, action.Repository.Name), "fetch", "--depth", "1", "origin", config.Branch+":"+config.Branch); err != nil {
|
||||
return fmt.Errorf("error fetching branch %s. ignoring as non-existent. err: %w", config.Branch, err) // no branch? so ignore repo here
|
||||
}
|
||||
common.PanicOnError(git.GitExec(path.Join(gitPrj, action.Repository.Name), "checkout", configBranch))
|
||||
common.PanicOnError(git.GitExec(path.Join(gitPrj, action.Repository.Name), "checkout", config.Branch))
|
||||
}
|
||||
common.PanicOnError(git.GitExec(gitPrj, "commit", "-m", "Auto-inclusion "+action.Repository.Name))
|
||||
common.PanicOnError(git.GitExec(gitPrj, "commit", "-m", "Automatic package inclusion via Direct Workflow"))
|
||||
if !noop {
|
||||
common.PanicOnError(git.GitExec(gitPrj, "push"))
|
||||
}
|
||||
|
||||
case "deleted":
|
||||
if stat, err := os.Stat(filepath.Join(git.GetPath(), gitPrj, action.Repository.Name)); err != nil || !stat.IsDir() {
|
||||
common.LogDebug("delete event for", action.Repository.Name, "-- not in project. Ignoring")
|
||||
return
|
||||
if DebugMode {
|
||||
log.Println("delete event for", action.Repository.Name, "-- not in project. Ignoring")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
common.PanicOnError(git.GitExec(gitPrj, "rm", action.Repository.Name))
|
||||
common.PanicOnError(git.GitExec(gitPrj, "commit", "-m", "Automatic package removal via Direct Workflow"))
|
||||
@@ -151,9 +143,10 @@ func processConfiguredRepositoryAction(action *common.RepositoryWebhookEvent, co
|
||||
}
|
||||
|
||||
default:
|
||||
common.LogError("Unknown action type:", action.Action)
|
||||
return
|
||||
return fmt.Errorf("%s: %s", "Unknown action type", action.Action)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type PushActionProcessor struct{}
|
||||
@@ -163,83 +156,77 @@ func (*PushActionProcessor) ProcessFunc(request *common.Request) error {
|
||||
configs, configFound := configuredRepos[action.Repository.Owner.Username]
|
||||
|
||||
if !configFound {
|
||||
common.LogDebug("Repository event for", action.Repository.Owner.Username, ". Not configured. Ignoring.")
|
||||
log.Printf("Repository event for %s. Not configured. Ignoring.\n", action.Repository.Owner.Username)
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, config := range configs {
|
||||
if gitOrg, gitPrj, _ := config.GetPrjGit(); gitOrg == action.Repository.Owner.Username && gitPrj == action.Repository.Name {
|
||||
common.LogInfo("+ ignoring push to PrjGit repository", config.GitProjectName)
|
||||
log.Println("+ ignoring push to PrjGit repository", config.GitProjectName)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
for _, config := range configs {
|
||||
processConfiguredPushAction(action, config)
|
||||
err = concatenateErrors(err, processConfiguredPushAction(action, config))
|
||||
}
|
||||
return nil
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func processConfiguredPushAction(action *common.PushWebhookEvent, config *common.AutogitConfig) {
|
||||
func processConfiguredPushAction(action *common.PushWebhookEvent, config *common.AutogitConfig) error {
|
||||
gitOrg, gitPrj, gitBranch := config.GetPrjGit()
|
||||
git, err := gh.CreateGitHandler(config.Organization)
|
||||
common.PanicOnError(err)
|
||||
defer git.Close()
|
||||
|
||||
common.LogDebug("push to:", action.Repository.Owner.Username, action.Repository.Name, "for:", gitOrg, gitPrj, gitBranch)
|
||||
branch := config.Branch
|
||||
if len(branch) == 0 {
|
||||
if common.IsRemovedBranch(branch) {
|
||||
common.LogDebug(" + default branch has removed suffix:", branch, "Skipping.")
|
||||
return
|
||||
}
|
||||
branch = action.Repository.Default_Branch
|
||||
common.LogDebug(" + using default branch", branch)
|
||||
log.Printf("push to: %s/%s for %s/%s#%s", action.Repository.Owner.Username, action.Repository.Name, gitOrg, gitPrj, gitBranch)
|
||||
if len(config.Branch) == 0 {
|
||||
config.Branch = action.Repository.Default_Branch
|
||||
log.Println(" + default branch", action.Repository.Default_Branch)
|
||||
}
|
||||
|
||||
prjGitRepo, err := gitea.CreateRepositoryIfNotExist(git, gitOrg, gitPrj)
|
||||
if err != nil {
|
||||
common.LogError("Error accessing/creating prjgit:", gitOrg, gitPrj, err)
|
||||
return
|
||||
return fmt.Errorf("Error accessing/creating prjgit: %s/%s err: %w", gitOrg, gitPrj, err)
|
||||
}
|
||||
|
||||
remoteName, err := git.GitClone(gitPrj, gitBranch, prjGitRepo.SSHURL)
|
||||
common.PanicOnError(err)
|
||||
git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
|
||||
headCommitId, err := git.GitRemoteHead(gitPrj, remoteName, gitBranch)
|
||||
common.PanicOnError(err)
|
||||
commit, ok := git.GitSubmoduleCommitId(gitPrj, action.Repository.Name, headCommitId)
|
||||
for ok && action.Head_Commit.Id == commit {
|
||||
common.LogDebug(" -- nothing to do, commit already in ProjectGit")
|
||||
return
|
||||
log.Println(" -- nothing to do, commit already in ProjectGit")
|
||||
return nil
|
||||
}
|
||||
|
||||
if stat, err := os.Stat(filepath.Join(git.GetPath(), gitPrj, action.Repository.Name)); err != nil {
|
||||
git.GitExecOrPanic(gitPrj, "submodule", "--quiet", "add", "--force", "--depth", "1", action.Repository.Clone_Url, action.Repository.Name)
|
||||
common.LogDebug("Pushed to package that is not part of the project. Re-adding...", err)
|
||||
} else if !stat.IsDir() {
|
||||
common.LogError("Pushed to a package that is not a submodule but exists in the project. Ignoring.")
|
||||
return
|
||||
if stat, err := os.Stat(filepath.Join(git.GetPath(), gitPrj, action.Repository.Name)); err != nil || !stat.IsDir() {
|
||||
if DebugMode {
|
||||
log.Println("Pushed to package that is not part of the project. Ignoring:", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
git.GitExecOrPanic(gitPrj, "submodule", "update", "--init", "--force", "--depth", "1", "--checkout", action.Repository.Name)
|
||||
defer git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
|
||||
git.GitExecOrPanic(gitPrj, "submodule", "update", "--init", "--depth", "1", "--checkout", action.Repository.Name)
|
||||
defer git.GitExecOrPanic(gitPrj, "submodule", "deinit", "--all")
|
||||
|
||||
if err := git.GitExec(filepath.Join(gitPrj, action.Repository.Name), "fetch", "--depth", "1", "--force", "origin", branch+":"+branch); err != nil {
|
||||
common.LogError("Error fetching branch:", branch, "Ignoring as non-existent.", err)
|
||||
return
|
||||
if err := git.GitExec(filepath.Join(gitPrj, action.Repository.Name), "fetch", "--depth", "1", "--force", remoteName, config.Branch+":"+config.Branch); err != nil {
|
||||
return fmt.Errorf("error fetching branch %s. ignoring as non-existent. err: %w", config.Branch, err) // no branch? so ignore repo here
|
||||
}
|
||||
id, err := git.GitBranchHead(filepath.Join(gitPrj, action.Repository.Name), branch)
|
||||
id, err := git.GitRemoteHead(filepath.Join(gitPrj, action.Repository.Name), remoteName, config.Branch)
|
||||
common.PanicOnError(err)
|
||||
if action.Head_Commit.Id == id {
|
||||
git.GitExecOrPanic(filepath.Join(gitPrj, action.Repository.Name), "checkout", id)
|
||||
git.GitExecOrPanic(gitPrj, "commit", "-a", "-m", fmt.Sprintf("'%s' update via Direct Workflow", action.Repository.Name))
|
||||
git.GitExecOrPanic(gitPrj, "commit", "-a", "-m", "Automatic update via push via Direct Workflow")
|
||||
if !noop {
|
||||
git.GitExecOrPanic(gitPrj, "push", remoteName)
|
||||
}
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
common.LogDebug("push of refs not on the configured branch", branch, ". ignoring.")
|
||||
log.Println("push of refs not on the configured branch", config.Branch, ". ignoring.")
|
||||
return nil
|
||||
}
|
||||
|
||||
func verifyProjectState(git common.Git, org string, config *common.AutogitConfig, configs []*common.AutogitConfig) (err error) {
|
||||
@@ -261,65 +248,51 @@ func verifyProjectState(git common.Git, org string, config *common.AutogitConfig
|
||||
|
||||
remoteName, err := git.GitClone(gitPrj, gitBranch, repo.SSHURL)
|
||||
common.PanicOnError(err)
|
||||
git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
|
||||
defer git.GitExecQuietOrPanic(gitPrj, "submodule", "deinit", "--all", "-f")
|
||||
defer git.GitExecOrPanic(gitPrj, "submodule", "deinit", "--all")
|
||||
|
||||
common.LogDebug(" * Getting submodule list")
|
||||
log.Println(" * Getting submodule list")
|
||||
sub, err := git.GitSubmoduleList(gitPrj, "HEAD")
|
||||
common.PanicOnError(err)
|
||||
|
||||
common.LogDebug(" * Getting package links")
|
||||
log.Println(" * Getting package links")
|
||||
var pkgLinks []*PackageRebaseLink
|
||||
if f, err := fs.Stat(os.DirFS(path.Join(git.GetPath(), gitPrj)), common.PrjLinksFile); err == nil && (f.Mode()&fs.ModeType == 0) && f.Size() < 1000000 {
|
||||
if data, err := os.ReadFile(path.Join(git.GetPath(), gitPrj, common.PrjLinksFile)); err == nil {
|
||||
pkgLinks, err = parseProjectLinks(data)
|
||||
if err != nil {
|
||||
common.LogError("Cannot parse project links file:", err.Error())
|
||||
log.Println("Cannot parse project links file:", err.Error())
|
||||
pkgLinks = nil
|
||||
} else {
|
||||
ResolveLinks(org, pkgLinks, gitea)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
common.LogInfo(" - No package links defined")
|
||||
log.Println(" - No package links defined")
|
||||
}
|
||||
|
||||
/* Check existing submodule that they are updated */
|
||||
|
||||
isGitUpdated := false
|
||||
next_package:
|
||||
for filename, commitId := range sub {
|
||||
// ignore project gits
|
||||
//for _, c := range configs {
|
||||
if gitPrj == filename {
|
||||
common.LogDebug(" prjgit as package? ignoring project git:", filename)
|
||||
log.Println(" prjgit as package? ignoring project git:", filename)
|
||||
continue next_package
|
||||
}
|
||||
//}
|
||||
|
||||
branch := config.Branch
|
||||
common.LogDebug(" verifying package:", commitId, "->", filename, "@", branch)
|
||||
if repo, err := gitea.GetRepository(org, filename); repo == nil && err == nil {
|
||||
common.LogDebug(" repository removed...")
|
||||
git.GitExecOrPanic(gitPrj, "rm", filename)
|
||||
isGitUpdated = true
|
||||
continue
|
||||
} else if err != nil {
|
||||
common.LogError("failed fetching repo data", org, filename, err)
|
||||
continue
|
||||
} else if len(branch) == 0 {
|
||||
branch = repo.DefaultBranch
|
||||
common.LogDebug(" -> using default branch", branch)
|
||||
if common.IsRemovedBranch(branch) {
|
||||
common.LogDebug(" Default branch for", filename, "is excluded")
|
||||
log.Printf(" verifying package: %s -> %s(%s)", commitId, filename, config.Branch)
|
||||
commits, err := gitea.GetRecentCommits(org, filename, config.Branch, 10)
|
||||
if len(commits) == 0 {
|
||||
if repo, err := gitea.GetRepository(org, filename); repo == nil && err == nil {
|
||||
git.GitExecOrPanic(gitPrj, "rm", filename)
|
||||
isGitUpdated = true
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
commits, err := gitea.GetRecentCommits(org, filename, branch, 10)
|
||||
if err != nil {
|
||||
common.LogDebug(" -> failed to fetch recent commits for package:", filename, " Err:", err)
|
||||
log.Println(" -> failed to fetch recent commits for package:", filename, " Err:", err)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -336,7 +309,7 @@ next_package:
|
||||
if l.Pkg == filename {
|
||||
link = l
|
||||
|
||||
common.LogDebug(" -> linked package")
|
||||
log.Println(" -> linked package")
|
||||
// so, we need to rebase here. Can't really optimize, so clone entire package tree and remote
|
||||
pkgPath := path.Join(gitPrj, filename)
|
||||
git.GitExecOrPanic(gitPrj, "submodule", "update", "--init", "--checkout", filename)
|
||||
@@ -350,7 +323,7 @@ next_package:
|
||||
nCommits := len(common.SplitStringNoEmpty(git.GitExecWithOutputOrPanic(pkgPath, "rev-list", "^NOW", "HEAD"), "\n"))
|
||||
if nCommits > 0 {
|
||||
if !noop {
|
||||
git.GitExecOrPanic(pkgPath, "push", "-f", "origin", "HEAD:"+branch)
|
||||
git.GitExecOrPanic(pkgPath, "push", "-f", "origin", "HEAD:"+config.Branch)
|
||||
}
|
||||
isGitUpdated = true
|
||||
}
|
||||
@@ -367,27 +340,42 @@ next_package:
|
||||
common.PanicOnError(git.GitExec(gitPrj, "submodule", "update", "--init", "--depth", "1", "--checkout", filename))
|
||||
common.PanicOnError(git.GitExec(filepath.Join(gitPrj, filename), "fetch", "--depth", "1", "origin", commits[0].SHA))
|
||||
common.PanicOnError(git.GitExec(filepath.Join(gitPrj, filename), "checkout", commits[0].SHA))
|
||||
common.LogDebug(" -> updated to", commits[0].SHA)
|
||||
log.Println(" -> updated to", commits[0].SHA)
|
||||
isGitUpdated = true
|
||||
} else {
|
||||
// probably need `merge-base` or `rev-list` here instead, or the project updated already
|
||||
common.LogInfo(" *** Cannot find SHA of last matching update for package:", filename, " Ignoring")
|
||||
log.Println(" *** Cannot find SHA of last matching update for package:", filename, " Ignoring")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// find all missing repositories, and add them
|
||||
common.LogDebug("checking for missing repositories...")
|
||||
if DebugMode {
|
||||
log.Println("checking for missing repositories...")
|
||||
}
|
||||
repos, err := gitea.GetOrganizationRepositories(org)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
common.LogDebug(" nRepos:", len(repos))
|
||||
if DebugMode {
|
||||
log.Println(" nRepos:", len(repos))
|
||||
}
|
||||
|
||||
/* Check repositories in org to make sure they are included in project git */
|
||||
next_repo:
|
||||
for _, r := range repos {
|
||||
if DebugMode {
|
||||
log.Println(" -- checking", r.Name)
|
||||
}
|
||||
|
||||
if r.ObjectFormatName != "sha256" {
|
||||
if DebugMode {
|
||||
log.Println(" + ", r.ObjectFormatName, ". Needs to be sha256. Ignoring")
|
||||
}
|
||||
continue next_repo
|
||||
}
|
||||
|
||||
// for _, c := range configs {
|
||||
if gitPrj == r.Name {
|
||||
// ignore project gits
|
||||
@@ -402,45 +390,43 @@ next_repo:
|
||||
}
|
||||
}
|
||||
|
||||
common.LogDebug(" -- checking repository:", r.Name)
|
||||
|
||||
branch := config.Branch
|
||||
if len(branch) == 0 {
|
||||
branch = r.DefaultBranch
|
||||
if common.IsRemovedBranch(branch) {
|
||||
continue
|
||||
}
|
||||
if DebugMode {
|
||||
log.Println(" -- checking repository:", r.Name)
|
||||
}
|
||||
if commits, err := gitea.GetRecentCommits(org, r.Name, branch, 1); err != nil || len(commits) == 0 {
|
||||
|
||||
if _, err := gitea.GetRecentCommits(org, r.Name, config.Branch, 1); err != nil {
|
||||
// assumption that package does not exist, so not part of project
|
||||
// https://github.com/go-gitea/gitea/issues/31976
|
||||
|
||||
// or, we do not have commits here
|
||||
continue
|
||||
}
|
||||
|
||||
// add repository to git project
|
||||
common.PanicOnError(git.GitExec(gitPrj, "submodule", "--quiet", "add", "--force", "--depth", "1", r.CloneURL, r.Name))
|
||||
common.PanicOnError(git.GitExec(gitPrj, "submodule", "--quiet", "add", "--depth", "1", r.CloneURL, r.Name))
|
||||
|
||||
curBranch := strings.TrimSpace(git.GitExecWithOutputOrPanic(path.Join(gitPrj, r.Name), "branch", "--show-current"))
|
||||
if branch != curBranch {
|
||||
if err := git.GitExec(path.Join(gitPrj, r.Name), "fetch", "--depth", "1", "origin", branch+":"+branch); err != nil {
|
||||
return fmt.Errorf("Fetching branch %s for %s/%s failed. Ignoring.", branch, repo.Owner.UserName, r.Name)
|
||||
if len(config.Branch) > 0 {
|
||||
branch := strings.TrimSpace(git.GitExecWithOutputOrPanic(path.Join(gitPrj, r.Name), "branch", "--show-current"))
|
||||
if branch != config.Branch {
|
||||
if err := git.GitExec(path.Join(gitPrj, r.Name), "fetch", "--depth", "1", "origin", config.Branch+":"+config.Branch); err != nil {
|
||||
return fmt.Errorf("Fetching branch %s for %s/%s failed. Ignoring.", config.Branch, repo.Owner.UserName, r.Name)
|
||||
}
|
||||
common.PanicOnError(git.GitExec(path.Join(gitPrj, r.Name), "checkout", config.Branch))
|
||||
}
|
||||
common.PanicOnError(git.GitExec(path.Join(gitPrj, r.Name), "checkout", branch))
|
||||
}
|
||||
|
||||
isGitUpdated = true
|
||||
}
|
||||
|
||||
if isGitUpdated {
|
||||
common.PanicOnError(git.GitExec(gitPrj, "commit", "-a", "-m", "Periodic SYNC in Direct Workflow"))
|
||||
common.PanicOnError(git.GitExec(gitPrj, "commit", "-a", "-m", "Automatic update via push via Direct Workflow -- SYNC"))
|
||||
if !noop {
|
||||
git.GitExecOrPanic(gitPrj, "push", remoteName)
|
||||
}
|
||||
}
|
||||
|
||||
common.LogInfo("Verification finished for ", org, ", prjgit:", config.GitProjectName)
|
||||
if DebugMode {
|
||||
log.Println("Verification finished for ", org, ", prjgit:", config.GitProjectName)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -451,17 +437,17 @@ var checkInterval time.Duration
|
||||
func checkOrg(org string, configs []*common.AutogitConfig) {
|
||||
git, err := gh.CreateGitHandler(org)
|
||||
if err != nil {
|
||||
common.LogError("Failed to allocate GitHandler:", err)
|
||||
log.Println("Faield to allocate GitHandler:", err)
|
||||
return
|
||||
}
|
||||
defer git.Close()
|
||||
|
||||
for _, config := range configs {
|
||||
common.LogInfo(" ++ starting verification, org:", org, "config:", config.GitProjectName)
|
||||
log.Printf(" ++ starting verification, org: `%s` config: `%s`\n", org, config.GitProjectName)
|
||||
if err := verifyProjectState(git, org, config, configs); err != nil {
|
||||
common.LogError(" *** verification failed, org:", org, err)
|
||||
log.Printf(" *** verification failed, org: `%s`, err: %#v\n", org, err)
|
||||
} else {
|
||||
common.LogError(" ++ verification complete, org:", org, config.GitProjectName)
|
||||
log.Printf(" ++ verification complete, org: `%s` config: `%s`\n", org, config.GitProjectName)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -470,7 +456,7 @@ func checkRepos() {
|
||||
for org, configs := range configuredRepos {
|
||||
if checkInterval > 0 {
|
||||
sleepInterval := checkInterval - checkInterval/2 + time.Duration(rand.Int63n(int64(checkInterval)))
|
||||
common.LogInfo(" - sleep interval", sleepInterval, "until next check")
|
||||
log.Println(" - sleep interval", sleepInterval, "until next check")
|
||||
time.Sleep(sleepInterval)
|
||||
}
|
||||
|
||||
@@ -482,9 +468,9 @@ func consistencyCheckProcess() {
|
||||
if checkOnStart {
|
||||
savedCheckInterval := checkInterval
|
||||
checkInterval = 0
|
||||
common.LogInfo("== Startup consistency check begin...")
|
||||
log.Println("== Startup consistency check begin...")
|
||||
checkRepos()
|
||||
common.LogInfo("== Startup consistency check done...")
|
||||
log.Println("== Startup consistency check done...")
|
||||
checkInterval = savedCheckInterval
|
||||
}
|
||||
|
||||
@@ -499,8 +485,7 @@ var gh common.GitHandlerGenerator
|
||||
func updateConfiguration(configFilename string, orgs *[]string) {
|
||||
configFile, err := common.ReadConfigFile(configFilename)
|
||||
if err != nil {
|
||||
common.LogError(err)
|
||||
os.Exit(4)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
configs, _ := common.ResolveWorkflowConfigs(gitea, configFile)
|
||||
@@ -508,7 +493,9 @@ func updateConfiguration(configFilename string, orgs *[]string) {
|
||||
*orgs = make([]string, 0, 1)
|
||||
for _, c := range configs {
|
||||
if slices.Contains(c.Workflows, "direct") {
|
||||
common.LogDebug(" + adding org:", c.Organization, ", branch:", c.Branch, ", prjgit:", c.GitProjectName)
|
||||
if DebugMode {
|
||||
log.Printf(" + adding org: '%s', branch: '%s', prjgit: '%s'\n", c.Organization, c.Branch, c.GitProjectName)
|
||||
}
|
||||
configs := configuredRepos[c.Organization]
|
||||
if configs == nil {
|
||||
configs = make([]*common.AutogitConfig, 0, 1)
|
||||
@@ -522,7 +509,7 @@ func updateConfiguration(configFilename string, orgs *[]string) {
|
||||
}
|
||||
|
||||
func main() {
|
||||
configFilename := flag.String("config", "config.json", "List of PrjGit")
|
||||
configFilename := flag.String("config", "", "List of PrjGit")
|
||||
giteaUrl := flag.String("gitea-url", "https://src.opensuse.org", "Gitea instance")
|
||||
rabbitUrl := flag.String("url", "amqps://rabbit.opensuse.org", "URL for RabbitMQ instance")
|
||||
flag.BoolVar(&DebugMode, "debug", false, "Extra debugging information")
|
||||
@@ -533,35 +520,10 @@ func main() {
|
||||
flag.Parse()
|
||||
|
||||
if err := common.RequireGiteaSecretToken(); err != nil {
|
||||
common.LogError(err)
|
||||
os.Exit(1)
|
||||
log.Fatal(err)
|
||||
}
|
||||
if err := common.RequireRabbitSecrets(); err != nil {
|
||||
common.LogError(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if cf := os.Getenv("AUTOGITS_CONFIG"); len(cf) > 0 {
|
||||
*configFilename = cf
|
||||
}
|
||||
if url := os.Getenv("AUTOGITS_URL"); len(url) > 0 {
|
||||
*giteaUrl = url
|
||||
}
|
||||
if url := os.Getenv("AUTOGITS_RABBITURL"); len(url) > 0 {
|
||||
*rabbitUrl = url
|
||||
}
|
||||
if debug := os.Getenv("AUTOGITS_DEBUG"); len(debug) > 0 {
|
||||
DebugMode = true
|
||||
}
|
||||
if check := os.Getenv("AUTOGITS_CHECK_ON_START"); len(check) > 0 {
|
||||
checkOnStart = true
|
||||
}
|
||||
if p := os.Getenv("AUTOGITS_REPO_PATH"); len(p) > 0 {
|
||||
*basePath = p
|
||||
}
|
||||
|
||||
if DebugMode {
|
||||
common.SetLoggingLevel(common.LogLevelDebug)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
defs := &common.RabbitMQGiteaEventsProcessor{}
|
||||
@@ -570,14 +532,12 @@ func main() {
|
||||
if len(*basePath) == 0 {
|
||||
*basePath, err = os.MkdirTemp(os.TempDir(), AppName)
|
||||
if err != nil {
|
||||
common.LogError(err)
|
||||
os.Exit(1)
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
gh, err = common.AllocateGitWorkTree(*basePath, GitAuthor, GitEmail)
|
||||
if err != nil {
|
||||
common.LogError(err)
|
||||
os.Exit(1)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// handle reconfiguration
|
||||
@@ -592,10 +552,10 @@ func main() {
|
||||
}
|
||||
|
||||
if sig != syscall.SIGHUP {
|
||||
common.LogError("Unexpected signal received:", sig)
|
||||
log.Println("Unexpected signal received:", sig)
|
||||
continue
|
||||
}
|
||||
common.LogError("*** Reconfiguring ***")
|
||||
log.Println("*** Reconfiguring ***")
|
||||
updateConfiguration(*configFilename, &defs.Orgs)
|
||||
defs.Connection().UpdateTopics(defs)
|
||||
}
|
||||
@@ -607,25 +567,23 @@ func main() {
|
||||
gitea = common.AllocateGiteaTransport(*giteaUrl)
|
||||
CurrentUser, err := gitea.GetCurrentUser()
|
||||
if err != nil {
|
||||
common.LogError("Cannot fetch current user:", err)
|
||||
os.Exit(2)
|
||||
log.Fatalln("Cannot fetch current user:", err)
|
||||
}
|
||||
common.LogInfo("Current User:", CurrentUser.UserName)
|
||||
log.Println("Current User:", CurrentUser.UserName)
|
||||
|
||||
updateConfiguration(*configFilename, &defs.Orgs)
|
||||
|
||||
defs.Connection().RabbitURL, err = url.Parse(*rabbitUrl)
|
||||
if err != nil {
|
||||
common.LogError("cannot parse server URL. Err:", err)
|
||||
os.Exit(3)
|
||||
log.Panicf("cannot parse server URL. Err: %#v\n", err)
|
||||
}
|
||||
|
||||
go consistencyCheckProcess()
|
||||
common.LogInfo("defs:", *defs)
|
||||
log.Println("defs:", *defs)
|
||||
|
||||
defs.Handlers = make(map[string]common.RequestProcessor)
|
||||
defs.Handlers[common.RequestType_Push] = &PushActionProcessor{}
|
||||
defs.Handlers[common.RequestType_Repository] = &RepositoryActionProcessor{}
|
||||
|
||||
common.LogError(common.ProcessRabbitMQEvents(defs))
|
||||
log.Fatal(common.ProcessRabbitMQEvents(defs))
|
||||
}
|
||||
|
||||
@@ -1,65 +1,54 @@
|
||||
Workflow-PR bot
|
||||
===============
|
||||
|
||||
Keeps ProjectGit PRs in-sync with the relative PackageGit PRs.
|
||||
Keeps ProjectGit PR in-sync with a PackageGit PR
|
||||
|
||||
|
||||
Areas of Responsibility
|
||||
-----------------------
|
||||
|
||||
* Detects a PackageGit PR creation against a package and creates a coresponsing PR against the ProjectGit
|
||||
* When a PackageGit PR is updated, the corresponding PR against the ProjectGit is updated
|
||||
* Stores reference to the PackageGit PR in the headers of the ProjectGit PR comments, for later reference
|
||||
* this allows ProjectGit PR to be merged to seperated later (via another tool, for example)
|
||||
* Initiates all staging workflows via review requests
|
||||
|
||||
|
||||
Target Usage
|
||||
------------
|
||||
|
||||
Any project (devel, codestream, product, etc.) that accepts PRs.
|
||||
Any project (devel, etc) that accepts PR
|
||||
|
||||
|
||||
Main Tasks
|
||||
----------
|
||||
|
||||
* **Synchronization**:
|
||||
* When a **PackageGit PR** is created for a package on a specific project branch, a corresponding PR is automatically generated in **ProjectGit**.
|
||||
* When a PackageGit PR is updated, the corresponding PR against the ProjectGit is updated.
|
||||
* A link to the PackageGit PR is stored in the body of the ProjectGit PR comments in the following format:
|
||||
* `PR: organization/package_name!pull_request_number`
|
||||
* Example: `PR: pool/curl!4`
|
||||
* It closes an empty ProjectGit PR (e.g., if a PR was initially created for a single package but later integrated into a larger ProjectGit PR).
|
||||
* It forwards the Work In Progress (WIP) flag to the ProjectGit PR. If the ProjectGit PR references multiple Package PRs, triggering the WIP flag on the ProjectGit PR side only requires a single WIP package PR.
|
||||
|
||||
* **Reviewer Management**:
|
||||
* It adds required reviewers in the ProjectGit PR.
|
||||
* It adds required reviewers in the PackageGit PR.
|
||||
* If new commits are added to a PackageGit PR, reviewers who have already approved it will be re-added.
|
||||
|
||||
* **Merge Management**:
|
||||
* Manages PR merges based on configuration flags (`ManualMergeOnly`, `ManualMergeProject`).
|
||||
* In general, merge only happens if all mandatory reviews are completed.
|
||||
* **ManualMergeProject** is stricter than **ManualMergeOnly** and has higher priority.
|
||||
|
||||
| Flag | Value | Behavior |
|
||||
| ----- | ----- | ----- |
|
||||
| ManualMergeProject | true | Both ProjectGit and PackageGit PRs are merged upon an allowed project maintainer commenting "merge ok” in the ProjectGit PR. |
|
||||
| ManualMergeOnly | true | Both PackageGit PR and ProjectGit PR are merged upon an allowed package maintainer or project maintainer commenting “merge ok” in the PackageGit PR. |
|
||||
| ManualMergeOnly and ManualMergeProject | false | Both ProjectGit and PackageGit PRs are merged as soon as all reviews are completed in both PrjGit and PkgGit PRs. |
|
||||
|
||||
Config file
|
||||
-----------
|
||||
JSON
|
||||
* _Workflows_: ["pr"] -- pr entry enables pr workflow. **Mandatory**
|
||||
* _Organization_: organization that holds all the packages **Mandatory**
|
||||
* _Branch_: branch updated in repo's **Mandatory**
|
||||
* _GitProjectName_: package in above org, or `org/package#branch` for PrjGit. By default assumes `_ObsPrj` with default branch and in the `Organization`
|
||||
* _Reviewers_: accounts associated with mandatory reviews for PrjGit. Can trigger additional
|
||||
review requests for PrjGit or associated PkgGit repos. Only when all reviews are
|
||||
satisfied, will the PrjGit PR be merged. See Reviewers below.
|
||||
* _ManualMergeOnly_: (true, false) only merge if "merge ok" comment/review by package or project maintainers or reviewers
|
||||
* _ManualMergeProject_: (true, false) only merge if "merge ok" by project maintainers or reviewers
|
||||
* _ReviewRequired_: (true, false) ignores that submitter is a maintainer and require a review from other maintainer IFF available
|
||||
* _NoProjectGitPR_: (true, false) do not create PrjGit PRs, but still process reviews, etc.
|
||||
* _Permissions_: permissions and associated accounts/groups. See below.
|
||||
|
||||
* Filename: `workflow.config`
|
||||
* Location: ProjectGit
|
||||
* Format: non-standard JSON (comments allowed)
|
||||
|
||||
| Field name | Details | Mandatory | Type | Allowed Values | Default |
|
||||
| ----- | ----- | ----- | ----- | ----- | ----- |
|
||||
| *Workflows* | Type of workflow | yes | string | “pr” | |
|
||||
| *Organization* | The organization where PackageGit PRs are expected to occur | yes | string | | |
|
||||
| *Branch* | The designated branch for PackageGit PRs | yes | string | | |
|
||||
| *GitProjectName* | Repository and branch where the ProjectGit lives. | no | string | **Format**: `org/project_repo#branch` | By default assumes `_ObsPrj` with default branch in the *Organization* |
|
||||
| *ManualMergeOnly* | Merges are permitted only upon receiving a "merge ok" comment from designated maintainers in the PkgGit PR. | no | bool | true, false | false |
|
||||
| *ManualMergeProject* | Merges are permitted only upon receiving a "merge ok" comment in the ProjectGit PR from project maintainers. | no | bool | true, false | false |
|
||||
| *ReviewRequired* | If submitter is a maintainer, require review from another maintainer if available. | no | bool | true, false | false |
|
||||
| *NoProjectGitPR* | Do not create PrjGit PR, but still perform other tasks. | no | bool | true, false | false |
|
||||
| *Reviewers* | PrjGit reviewers. Additional review requests are triggered for associated PkgGit PRs. PrjGit PR is merged only when all reviews are complete. | no | array of strings | | `[]` |
|
||||
| *ReviewGroups* | If a group is specified in Reviewers, its members are listed here. | no | array of objects | | `[]` |
|
||||
| *ReviewGroups > Name* | Name of the group | no | string | | |
|
||||
| *ReviewGroups > Reviewers* | Members of the group | no | array of strings | | |
|
||||
| *ReviewGroups > Silent* | Add members for notifications. If true, members are not explicitly requested to review. If one member approves, others are removed. | no | bool | true, false | false |
|
||||
NOTE: `-rm`, `-removed`, `-deleted` are all removed suffixes used to indicate current branch is a placeholder for previously existing package. These branches will be ignored by the bot, and if default, the package will be removed and will not be added to the project.
|
||||
example:
|
||||
|
||||
[
|
||||
{
|
||||
"Workflows": ["pr", "direct"],
|
||||
"Organization": "autogits",
|
||||
"GitProjectName": "HiddenPrj",
|
||||
"Branch": "hidden",
|
||||
"Reviewers": []
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
Reviewers
|
||||
---------
|
||||
@@ -68,72 +57,39 @@ Reviews is a list of accounts that need to review package and/or project. They h
|
||||
|
||||
[~][*|-|+]username
|
||||
|
||||
A tilde (`~`) before a prefix signifies an advisory reviewer. Their input is requested, but their review status will not otherwise affect the process.
|
||||
General prefix of ~ indicates advisory reviewer. They will be requested, but ignored otherwise.
|
||||
|
||||
Other prefixes indicate project or package association of the reviewer:
|
||||
|
||||
* `*` indicates project *and* package
|
||||
* `-` indicates project-only reviewer
|
||||
* `+` indicates package-only reviewer
|
||||
|
||||
`+` is implied.
|
||||
`+` is implied. For example
|
||||
|
||||
For example: `[foo, -bar, ~*moo]` results in:
|
||||
* foo: package reviews
|
||||
* bar: project reviews
|
||||
* moo: package and project reviews, but ignored
|
||||
`[foo, -bar, ~*moo]`
|
||||
|
||||
Package Deletion Requests
|
||||
-------------------------
|
||||
(NOT YET IMPLEMENTED)
|
||||
|
||||
* **Removing a Package:**
|
||||
To remove a package from a project, submit a ProjectGit Pull Request (PR) that removes the corresponding submodule. The bot will then rename the project branch in the pool by appending "-removed" to its name.
|
||||
|
||||
* **Adding a Package Again:**
|
||||
If you wish to re-add a package, create a new PrjGit PR which adds again the submodule on the branch that has the "-removed" suffix. The bot will automatically remove this suffix from the project branch in the pool.
|
||||
|
||||
|
||||
Labels
|
||||
------
|
||||
|
||||
The following labels are used, when defined in Repo/Org.
|
||||
|
||||
| Label Config Entry | Default label | Description
|
||||
|--------------------|----------------|----------------------------------------
|
||||
| StagingAuto | staging/Auto | Assigned to Project Git PRs when first staged
|
||||
| ReviewPending | review/Pending | Assigned to Project Git PR when package reviews are still pending
|
||||
| ReviewDone | review/Done | Assigned to Project Git PR when reviews are complete on all package PRs
|
||||
results in
|
||||
* foo -> package reviews
|
||||
* bar -> project reviews
|
||||
* moo -> package and project reviews, but ignored
|
||||
|
||||
|
||||
Maintainership
|
||||
--------------
|
||||
|
||||
Filename: \_maintainership.json
|
||||
Location: ProjectGit
|
||||
Format: JSON
|
||||
Fields:
|
||||
Maintainership information is defined per project. For reviews, package maintainers are coalesced
|
||||
with project maintainers. A review by any of the maintainers is acceptable.
|
||||
|
||||
| Key | Value | Notes |
|
||||
| ----- | ----- | ----- |
|
||||
| package name | array of strings representing the package maintainers | List of package maintainers |
|
||||
| “” (empty string) | array of strings representing the project maintainers | List of project maintainers |
|
||||
example:
|
||||
|
||||
Maintainership information is defined per project. For PackageGit PR reviews, package maintainers are combined with project maintainers. A review by any of these maintainers is acceptable.
|
||||
{
|
||||
"package1": [ "reviewer", "reviewer2"],
|
||||
"package2": [],
|
||||
|
||||
If the submitter is a maintainer it will not get a review requested.
|
||||
// "project" maintainer
|
||||
"": ["reviewer3", "reviewer4"]
|
||||
}
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
{
|
||||
"package1": [ "reviewer", "reviewer2"],
|
||||
"package2": [],
|
||||
|
||||
// "project" maintainer
|
||||
"": ["reviewer3", "reviewer4"]
|
||||
}
|
||||
```
|
||||
|
||||
Permissions
|
||||
-----------
|
||||
@@ -153,11 +109,3 @@ the `workflow.config`.
|
||||
|
||||
NOTE: Project Maintainers have these permissions automatically.
|
||||
|
||||
Server configuration
|
||||
--------------------------
|
||||
|
||||
**Configuration file:**
|
||||
|
||||
| Field | Type | Notes |
|
||||
| ----- | ----- | ----- |
|
||||
| root | Array of string | Format **org/repo\#branch** |
|
||||
|
||||
18
workflow-pr/interfaces/state_checker.go
Normal file
18
workflow-pr/interfaces/state_checker.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package interfaces
|
||||
|
||||
import "src.opensuse.org/autogits/common"
|
||||
|
||||
//go:generate mockgen -source=state_checker.go -destination=../mock/state_checker.go -typed -package mock_main
|
||||
|
||||
|
||||
type StateChecker interface {
|
||||
VerifyProjectState(configs *common.AutogitConfig) ([]*PRToProcess, error)
|
||||
CheckRepos() error
|
||||
ConsistencyCheckProcess() error
|
||||
}
|
||||
|
||||
type PRToProcess struct {
|
||||
Org, Repo, Branch string
|
||||
}
|
||||
|
||||
|
||||
@@ -44,35 +44,19 @@ var CurrentUser *models.User
|
||||
var GitHandler common.GitHandlerGenerator
|
||||
var Gitea common.Gitea
|
||||
|
||||
func getEnvOverrideString(env, def string) string {
|
||||
if envValue := os.Getenv(env); len(envValue) != 0 {
|
||||
return envValue
|
||||
}
|
||||
return def
|
||||
}
|
||||
|
||||
func getEnvOverrideBool(env string, def bool) bool {
|
||||
if envValue := os.Getenv(env); len(envValue) != 0 {
|
||||
if value, err := strconv.Atoi(envValue); err == nil && value > 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return def
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.StringVar(&GitAuthor, "git-author", "AutoGits PR Review Bot", "Git commit author")
|
||||
flag.StringVar(&GitEmail, "git-email", "amajer+devel-git@suse.de", "Git commit email")
|
||||
|
||||
workflowConfig := flag.String("config", getEnvOverrideString("AUTOGITS_CONFIG", ""), "Repository and workflow definition file")
|
||||
workflowConfig := flag.String("config", "", "Repository and workflow definition file")
|
||||
giteaUrl := flag.String("gitea-url", "https://src.opensuse.org", "Gitea instance")
|
||||
rabbitUrl := flag.String("url", "amqps://rabbit.opensuse.org", "URL for RabbitMQ instance")
|
||||
debugMode := flag.Bool("debug", getEnvOverrideBool("AUTOGITS_DEBUG", false), "Extra debugging information")
|
||||
checkOnStart := flag.Bool("check-on-start", getEnvOverrideBool("AUTOGITS_CHECK_ON_START", false), "Check all repositories for consistency on start, without delays")
|
||||
debugMode := flag.Bool("debug", false, "Extra debugging information")
|
||||
checkOnStart := flag.Bool("check-on-start", false, "Check all repositories for consistency on start, without delays")
|
||||
checkIntervalHours := flag.Float64("check-interval", 5, "Check interval (+-random delay) for repositories for consitency, in hours")
|
||||
flag.BoolVar(&ListPROnly, "list-prs-only", false, "Only lists PRs without acting on them")
|
||||
flag.Int64Var(&PRID, "id", -1, "Process only the specific ID and ignore the rest. Use for debugging")
|
||||
basePath := flag.String("repo-path", getEnvOverrideString("AUTOGITS_REPO_PATH", ""), "Repository path. Default is temporary directory")
|
||||
basePath := flag.String("repo-path", "", "Repository path. Default is temporary directory")
|
||||
pr := flag.String("only-pr", "", "Only specific PR to process. For debugging")
|
||||
flag.BoolVar(&common.IsDryRun, "dry", false, "Dry mode. Do not push changes to remote repo.")
|
||||
flag.Parse()
|
||||
@@ -186,7 +170,7 @@ func main() {
|
||||
common.RequestType_PRSync: req,
|
||||
common.RequestType_PRReviewAccepted: req,
|
||||
common.RequestType_PRReviewRejected: req,
|
||||
common.RequestType_PRComment: req,
|
||||
common.RequestType_IssueComment: req,
|
||||
},
|
||||
}
|
||||
listenDefs.Connection().RabbitURL, _ = url.Parse(*rabbitUrl)
|
||||
|
||||
@@ -2,8 +2,10 @@ package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -20,6 +22,83 @@ func TestProjectBranchName(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
const LocalCMD = "---"
|
||||
|
||||
func gitExecs(t *testing.T, git *common.GitHandlerImpl, cmds [][]string) {
|
||||
for _, cmd := range cmds {
|
||||
if cmd[0] == LocalCMD {
|
||||
command := exec.Command(cmd[2], cmd[3:]...)
|
||||
command.Dir = filepath.Join(git.GitPath, cmd[1])
|
||||
command.Stdin = nil
|
||||
command.Env = append([]string{"GIT_CONFIG_COUNT=1", "GIT_CONFIG_KEY_1=protocol.file.allow", "GIT_CONFIG_VALUE_1=always"}, common.ExtraGitParams...)
|
||||
_, err := command.CombinedOutput()
|
||||
if err != nil {
|
||||
t.Errorf(" *** error: %v\n", err)
|
||||
}
|
||||
} else {
|
||||
git.GitExecOrPanic(cmd[0], cmd[1:]...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func commandsForPackages(dir, prefix string, startN, endN int) [][]string {
|
||||
commands := make([][]string, (endN-startN+2)*6)
|
||||
|
||||
if dir == "" {
|
||||
dir = "."
|
||||
}
|
||||
cmdIdx := 0
|
||||
for idx := startN; idx <= endN; idx++ {
|
||||
pkgDir := fmt.Sprintf("%s%d", prefix, idx)
|
||||
|
||||
commands[cmdIdx+0] = []string{"", "init", "-q", "--object-format", "sha256", "-b", "testing", pkgDir}
|
||||
commands[cmdIdx+1] = []string{LocalCMD, pkgDir, "/usr/bin/touch", "testFile"}
|
||||
commands[cmdIdx+2] = []string{pkgDir, "add", "testFile"}
|
||||
commands[cmdIdx+3] = []string{pkgDir, "commit", "-m", "added testFile"}
|
||||
commands[cmdIdx+4] = []string{pkgDir, "config", "receive.denyCurrentBranch", "ignore"}
|
||||
commands[cmdIdx+5] = []string{"prj", "submodule", "add", filepath.Join("..", pkgDir), filepath.Join(dir, pkgDir)}
|
||||
|
||||
cmdIdx += 6
|
||||
}
|
||||
|
||||
// add all the submodules to the prj
|
||||
commands[cmdIdx+0] = []string{"prj", "commit", "-a", "-m", "adding subpackages"}
|
||||
|
||||
return commands
|
||||
}
|
||||
|
||||
func setupGitForTests(t *testing.T, git *common.GitHandlerImpl) {
|
||||
common.ExtraGitParams = []string{
|
||||
"GIT_CONFIG_COUNT=1",
|
||||
"GIT_CONFIG_KEY_0=protocol.file.allow",
|
||||
"GIT_CONFIG_VALUE_0=always",
|
||||
|
||||
"GIT_AUTHOR_NAME=testname",
|
||||
"GIT_AUTHOR_EMAIL=test@suse.com",
|
||||
"GIT_AUTHOR_DATE='2005-04-07T22:13:13'",
|
||||
"GIT_COMMITTER_NAME=testname",
|
||||
"GIT_COMMITTER_EMAIL=test@suse.com",
|
||||
"GIT_COMMITTER_DATE='2005-04-07T22:13:13'",
|
||||
}
|
||||
|
||||
gitExecs(t, git, [][]string{
|
||||
{"", "init", "-q", "--object-format", "sha256", "-b", "testing", "prj"},
|
||||
{"", "init", "-q", "--object-format", "sha256", "-b", "testing", "foo"},
|
||||
{LocalCMD, "foo", "/usr/bin/touch", "file1"},
|
||||
{"foo", "add", "file1"},
|
||||
{"foo", "commit", "-m", "first commit"},
|
||||
{"prj", "config", "receive.denyCurrentBranch", "ignore"},
|
||||
{"prj", "submodule", "init"},
|
||||
{"prj", "submodule", "add", "../foo", "testRepo"},
|
||||
{"prj", "add", ".gitmodules", "testRepo"},
|
||||
{"prj", "commit", "-m", "First instance"},
|
||||
{"prj", "submodule", "deinit", "testRepo"},
|
||||
{LocalCMD, "foo", "/usr/bin/touch", "file2"},
|
||||
{"foo", "add", "file2"},
|
||||
{"foo", "commit", "-m", "added file2"},
|
||||
})
|
||||
}
|
||||
|
||||
func TestUpdatePrBranch(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
origLogger := log.Writer()
|
||||
@@ -46,7 +125,7 @@ func TestUpdatePrBranch(t *testing.T) {
|
||||
req.Pull_Request.Base.Sha = strings.TrimSpace(revs[1])
|
||||
req.Pull_Request.Head.Sha = strings.TrimSpace(revs[0])
|
||||
|
||||
updateSubmoduleInPR("testRepo", revs[0], git)
|
||||
updateSubmoduleInPR("mainRepo", revs[0], git)
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "commit", "-a", "-m", "created commit"))
|
||||
common.PanicOnError(git.GitExec(common.DefaultGitPrj, "push", "origin", "+HEAD:+testing"))
|
||||
git.GitExecOrPanic("prj", "reset", "--hard", "testing")
|
||||
|
||||
10
workflow-pr/mock/pr_processor.go
Normal file
10
workflow-pr/mock/pr_processor.go
Normal file
@@ -0,0 +1,10 @@
|
||||
// Code generated by MockGen. DO NOT EDIT.
|
||||
// Source: pr_processor.go
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -source=pr_processor.go -destination=mock/pr_processor.go -typed
|
||||
//
|
||||
|
||||
// Package mock_main is a generated GoMock package.
|
||||
package mock_main
|
||||
@@ -3,18 +3,18 @@
|
||||
//
|
||||
// Generated by this command:
|
||||
//
|
||||
// mockgen -source=state_checker.go -destination=mock_state_checker.go -typed -package main
|
||||
// mockgen -source=state_checker.go -destination=../mock/state_checker.go -typed -package mock_main
|
||||
//
|
||||
|
||||
// Package main is a generated GoMock package.
|
||||
package main
|
||||
// Package mock_main is a generated GoMock package.
|
||||
package mock_main
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
common "src.opensuse.org/autogits/common"
|
||||
models "src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
interfaces "src.opensuse.org/autogits/workflow-pr/interfaces"
|
||||
)
|
||||
|
||||
// MockStateChecker is a mock of StateChecker interface.
|
||||
@@ -42,9 +42,11 @@ func (m *MockStateChecker) EXPECT() *MockStateCheckerMockRecorder {
|
||||
}
|
||||
|
||||
// CheckRepos mocks base method.
|
||||
func (m *MockStateChecker) CheckRepos() {
|
||||
func (m *MockStateChecker) CheckRepos() error {
|
||||
m.ctrl.T.Helper()
|
||||
m.ctrl.Call(m, "CheckRepos")
|
||||
ret := m.ctrl.Call(m, "CheckRepos")
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// CheckRepos indicates an expected call of CheckRepos.
|
||||
@@ -60,19 +62,19 @@ type MockStateCheckerCheckReposCall struct {
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockStateCheckerCheckReposCall) Return() *MockStateCheckerCheckReposCall {
|
||||
c.Call = c.Call.Return()
|
||||
func (c *MockStateCheckerCheckReposCall) Return(arg0 error) *MockStateCheckerCheckReposCall {
|
||||
c.Call = c.Call.Return(arg0)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockStateCheckerCheckReposCall) Do(f func()) *MockStateCheckerCheckReposCall {
|
||||
func (c *MockStateCheckerCheckReposCall) Do(f func() error) *MockStateCheckerCheckReposCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockStateCheckerCheckReposCall) DoAndReturn(f func()) *MockStateCheckerCheckReposCall {
|
||||
func (c *MockStateCheckerCheckReposCall) DoAndReturn(f func() error) *MockStateCheckerCheckReposCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
@@ -116,10 +118,10 @@ func (c *MockStateCheckerConsistencyCheckProcessCall) DoAndReturn(f func() error
|
||||
}
|
||||
|
||||
// VerifyProjectState mocks base method.
|
||||
func (m *MockStateChecker) VerifyProjectState(configs *common.AutogitConfig) ([]*PRToProcess, error) {
|
||||
func (m *MockStateChecker) VerifyProjectState(configs *common.AutogitConfig) ([]*interfaces.PRToProcess, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "VerifyProjectState", configs)
|
||||
ret0, _ := ret[0].([]*PRToProcess)
|
||||
ret0, _ := ret[0].([]*interfaces.PRToProcess)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
@@ -137,81 +139,19 @@ type MockStateCheckerVerifyProjectStateCall struct {
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockStateCheckerVerifyProjectStateCall) Return(arg0 []*PRToProcess, arg1 error) *MockStateCheckerVerifyProjectStateCall {
|
||||
func (c *MockStateCheckerVerifyProjectStateCall) Return(arg0 []*interfaces.PRToProcess, arg1 error) *MockStateCheckerVerifyProjectStateCall {
|
||||
c.Call = c.Call.Return(arg0, arg1)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockStateCheckerVerifyProjectStateCall) Do(f func(*common.AutogitConfig) ([]*PRToProcess, error)) *MockStateCheckerVerifyProjectStateCall {
|
||||
func (c *MockStateCheckerVerifyProjectStateCall) Do(f func(*common.AutogitConfig) ([]*interfaces.PRToProcess, error)) *MockStateCheckerVerifyProjectStateCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockStateCheckerVerifyProjectStateCall) DoAndReturn(f func(*common.AutogitConfig) ([]*PRToProcess, error)) *MockStateCheckerVerifyProjectStateCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// MockPullRequestProcessor is a mock of PullRequestProcessor interface.
|
||||
type MockPullRequestProcessor struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *MockPullRequestProcessorMockRecorder
|
||||
isgomock struct{}
|
||||
}
|
||||
|
||||
// MockPullRequestProcessorMockRecorder is the mock recorder for MockPullRequestProcessor.
|
||||
type MockPullRequestProcessorMockRecorder struct {
|
||||
mock *MockPullRequestProcessor
|
||||
}
|
||||
|
||||
// NewMockPullRequestProcessor creates a new mock instance.
|
||||
func NewMockPullRequestProcessor(ctrl *gomock.Controller) *MockPullRequestProcessor {
|
||||
mock := &MockPullRequestProcessor{ctrl: ctrl}
|
||||
mock.recorder = &MockPullRequestProcessorMockRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
// EXPECT returns an object that allows the caller to indicate expected use.
|
||||
func (m *MockPullRequestProcessor) EXPECT() *MockPullRequestProcessorMockRecorder {
|
||||
return m.recorder
|
||||
}
|
||||
|
||||
// Process mocks base method.
|
||||
func (m *MockPullRequestProcessor) Process(req *models.PullRequest) error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "Process", req)
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// Process indicates an expected call of Process.
|
||||
func (mr *MockPullRequestProcessorMockRecorder) Process(req any) *MockPullRequestProcessorProcessCall {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
call := mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Process", reflect.TypeOf((*MockPullRequestProcessor)(nil).Process), req)
|
||||
return &MockPullRequestProcessorProcessCall{Call: call}
|
||||
}
|
||||
|
||||
// MockPullRequestProcessorProcessCall wrap *gomock.Call
|
||||
type MockPullRequestProcessorProcessCall struct {
|
||||
*gomock.Call
|
||||
}
|
||||
|
||||
// Return rewrite *gomock.Call.Return
|
||||
func (c *MockPullRequestProcessorProcessCall) Return(arg0 error) *MockPullRequestProcessorProcessCall {
|
||||
c.Call = c.Call.Return(arg0)
|
||||
return c
|
||||
}
|
||||
|
||||
// Do rewrite *gomock.Call.Do
|
||||
func (c *MockPullRequestProcessorProcessCall) Do(f func(*models.PullRequest) error) *MockPullRequestProcessorProcessCall {
|
||||
c.Call = c.Call.Do(f)
|
||||
return c
|
||||
}
|
||||
|
||||
// DoAndReturn rewrite *gomock.Call.DoAndReturn
|
||||
func (c *MockPullRequestProcessorProcessCall) DoAndReturn(f func(*models.PullRequest) error) *MockPullRequestProcessorProcessCall {
|
||||
func (c *MockStateCheckerVerifyProjectStateCall) DoAndReturn(f func(*common.AutogitConfig) ([]*interfaces.PRToProcess, error)) *MockStateCheckerVerifyProjectStateCall {
|
||||
c.Call = c.Call.DoAndReturn(f)
|
||||
return c
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
package main
|
||||
|
||||
//go:generate mockgen -source=pr_processor.go -destination=mock/pr_processor.go -typed
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
@@ -24,7 +26,6 @@ func PrjGitDescription(prset *common.PRSet) (title string, desc string) {
|
||||
title_refs := make([]string, 0, len(prset.PRs)-1)
|
||||
refs := make([]string, 0, len(prset.PRs)-1)
|
||||
|
||||
prefix := ""
|
||||
for _, pr := range prset.PRs {
|
||||
if prset.IsPrjGitPR(pr.PR) {
|
||||
continue
|
||||
@@ -33,9 +34,6 @@ func PrjGitDescription(prset *common.PRSet) (title string, desc string) {
|
||||
// remove PRs that are not open from description
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(pr.PR.Title, "WIP:") {
|
||||
prefix = "WIP: "
|
||||
}
|
||||
org, repo, idx := pr.PRComponents()
|
||||
|
||||
title_refs = append(title_refs, repo)
|
||||
@@ -43,10 +41,7 @@ func PrjGitDescription(prset *common.PRSet) (title string, desc string) {
|
||||
refs = append(refs, ref)
|
||||
}
|
||||
|
||||
slices.Sort(title_refs)
|
||||
slices.Sort(refs)
|
||||
|
||||
title = prefix + "Forwarded PRs: " + strings.Join(title_refs, ", ")
|
||||
title = "Forwarded PRs: " + strings.Join(title_refs, ", ")
|
||||
desc = fmt.Sprintf("This is a forwarded pull request by %s\nreferencing the following pull request(s):\n\n", GitAuthor) + strings.Join(refs, "\n") + "\n"
|
||||
|
||||
if prset.Config.ManualMergeOnly {
|
||||
@@ -232,18 +227,12 @@ func (pr *PRProcessor) CreatePRjGitPR(prjGitPRbranch string, prset *common.PRSet
|
||||
}
|
||||
|
||||
title, desc := PrjGitDescription(prset)
|
||||
pr, err, isNew := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch, title, desc)
|
||||
pr, err := Gitea.CreatePullRequestIfNotExist(PrjGit, prjGitPRbranch, PrjGitBranch, title, desc)
|
||||
if err != nil {
|
||||
common.LogError("Error creating PrjGit PR:", err)
|
||||
return err
|
||||
}
|
||||
org := PrjGit.Owner.UserName
|
||||
repo := PrjGit.Name
|
||||
idx := pr.Index
|
||||
if isNew {
|
||||
Gitea.SetLabels(org, repo, idx, []string{prset.Config.Label(common.Label_StagingAuto)})
|
||||
}
|
||||
Gitea.UpdatePullRequest(org, repo, idx, &models.EditPullRequestOption{
|
||||
Gitea.UpdatePullRequest(PrjGit.Owner.UserName, PrjGit.Name, pr.Index, &models.EditPullRequestOption{
|
||||
RemoveDeadline: true,
|
||||
})
|
||||
|
||||
@@ -280,7 +269,6 @@ func (pr *PRProcessor) RebaseAndSkipSubmoduleCommits(prset *common.PRSet, branch
|
||||
}
|
||||
|
||||
var updatePrjGitError_requeue error = errors.New("Commits do not match. Requeing after 5 seconds.")
|
||||
|
||||
func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
|
||||
_, _, PrjGitBranch := prset.Config.GetPrjGit()
|
||||
PrjGitPR, err := prset.GetPrjGitPR()
|
||||
@@ -291,9 +279,6 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
|
||||
|
||||
git := pr.git
|
||||
if len(prset.PRs) == 1 {
|
||||
if len(PrjGitPR.RemoteName) == 0 {
|
||||
PrjGitPR.RemoteName, _ = git.GitClone(common.DefaultGitPrj, "", PrjGitPR.PR.Base.Repo.SSHURL)
|
||||
}
|
||||
git.GitExecOrPanic(common.DefaultGitPrj, "fetch", PrjGitPR.RemoteName, PrjGitPR.PR.Head.Sha)
|
||||
common.LogDebug("Only project git in PR. Nothing to update.")
|
||||
return nil
|
||||
@@ -360,20 +345,7 @@ func (pr *PRProcessor) UpdatePrjGitPR(prset *common.PRSet) error {
|
||||
}
|
||||
|
||||
// update PR
|
||||
isPrTitleSame := func(CurrentTitle, NewTitle string) bool {
|
||||
ctlen := len(CurrentTitle)
|
||||
for _, suffix := range []string{"...", "…"} {
|
||||
slen := len(suffix)
|
||||
if ctlen > 250 && strings.HasSuffix(CurrentTitle, suffix) && len(NewTitle) > ctlen {
|
||||
NewTitle = NewTitle[0:ctlen-slen] + suffix
|
||||
if CurrentTitle == NewTitle {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return CurrentTitle == NewTitle
|
||||
}
|
||||
if PrjGitPR.PR.User.UserName == CurrentUser.UserName && (PrjGitPR.PR.Body != PrjGitBody || !isPrTitleSame(PrjGitPR.PR.Title, PrjGitTitle)) {
|
||||
if PrjGitPR.PR.Body != PrjGitBody || PrjGitPR.PR.Title != PrjGitTitle {
|
||||
Gitea.UpdatePullRequest(PrjGit.Owner.UserName, PrjGit.Name, PrjGitPR.PR.Index, &models.EditPullRequestOption{
|
||||
RemoveDeadline: true,
|
||||
Title: PrjGitTitle,
|
||||
@@ -409,10 +381,6 @@ func (pr *PRProcessor) Process(req *models.PullRequest) error {
|
||||
prjGitPRbranch := prGitBranchNameForPR(prRepo, prNo)
|
||||
prjGitPR, err := prset.GetPrjGitPR()
|
||||
if err == common.PRSet_PrjGitMissing {
|
||||
if req.State != "open" {
|
||||
common.LogDebug("This PR is closed and no ProjectGit PR. Ignoring.")
|
||||
return nil
|
||||
}
|
||||
common.LogDebug("Missing PrjGit. Need to create one under branch", prjGitPRbranch)
|
||||
|
||||
if err = pr.CreatePRjGitPR(prjGitPRbranch, prset); err != nil {
|
||||
@@ -505,7 +473,7 @@ func (pr *PRProcessor) Process(req *models.PullRequest) error {
|
||||
// make sure that prjgit is consistent and only submodules that are to be *updated*
|
||||
// reset anything that changed that is not part of the prset
|
||||
// package removals/additions are *not* counted here
|
||||
|
||||
org, repo, branch := config.GetPrjGit()
|
||||
// TODO: this is broken...
|
||||
if pr, err := prset.GetPrjGitPR(); err == nil && false {
|
||||
common.LogDebug("Submodule parse begin")
|
||||
@@ -554,19 +522,11 @@ func (pr *PRProcessor) Process(req *models.PullRequest) error {
|
||||
} else {
|
||||
common.LogInfo("* No prjgit")
|
||||
}
|
||||
maintainers, err := common.FetchProjectMaintainershipData(Gitea, config)
|
||||
maintainers, err := common.FetchProjectMaintainershipData(Gitea, org, repo, branch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// update prset if we should build it or not
|
||||
if prjGitPR != nil {
|
||||
if file, err := git.GitCatFile(common.DefaultGitPrj, prjGitPR.PR.Head.Sha, "staging.config"); err == nil {
|
||||
prset.HasAutoStaging = (file != nil)
|
||||
common.LogDebug(" -> automatic staging enabled?:", prset.HasAutoStaging)
|
||||
}
|
||||
}
|
||||
|
||||
// handle case where PrjGit PR is only one left and there are no changes, then we can just close the PR
|
||||
if len(prset.PRs) == 1 && prjGitPR != nil && prset.PRs[0] == prjGitPR && prjGitPR.PR.User.UserName == prset.BotUser {
|
||||
common.LogDebug(" --> checking if superflous PR")
|
||||
@@ -605,14 +565,14 @@ func (pr *PRProcessor) Process(req *models.PullRequest) error {
|
||||
common.LogError("merge error:", err)
|
||||
}
|
||||
} else {
|
||||
err = prset.AssignReviewers(Gitea, maintainers)
|
||||
prset.AssignReviewers(Gitea, maintainers)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
type RequestProcessor struct {
|
||||
configuredRepos map[string][]*common.AutogitConfig
|
||||
recursive int
|
||||
recursive int
|
||||
}
|
||||
|
||||
func ProcesPullRequest(pr *models.PullRequest, configs []*common.AutogitConfig) error {
|
||||
@@ -631,15 +591,6 @@ func ProcesPullRequest(pr *models.PullRequest, configs []*common.AutogitConfig)
|
||||
return PRProcessor.Process(pr)
|
||||
}
|
||||
|
||||
func (w *RequestProcessor) Process(pr *models.PullRequest) error {
|
||||
configs, ok := w.configuredRepos[pr.Base.Repo.Owner.UserName]
|
||||
if !ok {
|
||||
common.LogError("*** Cannot find config for org:", pr.Base.Repo.Owner.UserName)
|
||||
return fmt.Errorf("*** Cannot find config for org: %s", pr.Base.Repo.Owner.UserName)
|
||||
}
|
||||
return ProcesPullRequest(pr, configs)
|
||||
}
|
||||
|
||||
func (w *RequestProcessor) ProcessFunc(request *common.Request) (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
@@ -662,7 +613,7 @@ func (w *RequestProcessor) ProcessFunc(request *common.Request) (err error) {
|
||||
common.LogError("Cannot find PR for issue:", req.Pull_Request.Base.Repo.Owner.Username, req.Pull_Request.Base.Repo.Name, req.Pull_Request.Number)
|
||||
return err
|
||||
}
|
||||
} else if req, ok := request.Data.(*common.IssueCommentWebhookEvent); ok {
|
||||
} else if req, ok := request.Data.(*common.IssueWebhookEvent); ok {
|
||||
pr, err = Gitea.GetPullRequest(req.Repository.Owner.Username, req.Repository.Name, int64(req.Issue.Number))
|
||||
if err != nil {
|
||||
common.LogError("Cannot find PR for issue:", req.Repository.Owner.Username, req.Repository.Name, int64(req.Issue.Number))
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
"src.opensuse.org/autogits/common"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/client/repository"
|
||||
"src.opensuse.org/autogits/common/gitea-generated/models"
|
||||
mock_common "src.opensuse.org/autogits/common/mock"
|
||||
)
|
||||
@@ -16,7 +17,7 @@ func TestOpenPR(t *testing.T) {
|
||||
Reviewers: []string{"reviewer1", "reviewer2"},
|
||||
Branch: "branch",
|
||||
Organization: "test",
|
||||
GitProjectName: "prj#testing",
|
||||
GitProjectName: "prj",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -25,7 +26,6 @@ func TestOpenPR(t *testing.T) {
|
||||
Number: 1,
|
||||
Pull_Request: &common.PullRequest{
|
||||
Id: 1,
|
||||
Number: 1,
|
||||
Base: common.Head{
|
||||
Ref: "branch",
|
||||
Sha: "testing",
|
||||
@@ -53,56 +53,6 @@ func TestOpenPR(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
modelPR := &models.PullRequest{
|
||||
ID: 1,
|
||||
Index: 1,
|
||||
State: "open",
|
||||
User: &models.User{UserName: "testuser"},
|
||||
RequestedReviewers: []*models.User{},
|
||||
Base: &models.PRBranchInfo{
|
||||
Ref: "branch",
|
||||
Sha: "testing",
|
||||
Repo: &models.Repository{
|
||||
Name: "testRepo",
|
||||
Owner: &models.User{
|
||||
UserName: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Ref: "branch",
|
||||
Sha: "testing",
|
||||
Repo: &models.Repository{
|
||||
Name: "testRepo",
|
||||
Owner: &models.User{
|
||||
UserName: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
mockCreatePR := &models.PullRequest{
|
||||
ID: 2,
|
||||
Index: 2,
|
||||
Body: "Forwarded PRs: testRepo\n\nPR: test/testRepo!1",
|
||||
User: &models.User{UserName: "testuser"},
|
||||
RequestedReviewers: []*models.User{},
|
||||
Base: &models.PRBranchInfo{
|
||||
Name: "testing",
|
||||
Repo: &models.Repository{
|
||||
Name: "prjcopy",
|
||||
Owner: &models.User{UserName: "test"},
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Sha: "head",
|
||||
},
|
||||
}
|
||||
|
||||
CurrentUser = &models.User{
|
||||
UserName: "testuser",
|
||||
}
|
||||
|
||||
git := &common.GitHandlerImpl{
|
||||
GitCommiter: "tester",
|
||||
GitEmail: "test@suse.com",
|
||||
@@ -110,47 +60,14 @@ func TestOpenPR(t *testing.T) {
|
||||
|
||||
t.Run("PR git opened request against PrjGit == no action", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
gitea.EXPECT().ResetTimelineCache(gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
Gitea = gitea
|
||||
Gitea = mock_common.NewMockGitea(ctl)
|
||||
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
pr.config.GitProjectName = "testRepo#testing"
|
||||
pr.config.GitProjectName = "testRepo"
|
||||
event.Repository.Name = "testRepo"
|
||||
mockGit := mock_common.NewMockGit(ctl)
|
||||
pr.git = mockGit
|
||||
|
||||
mockGitGen := mock_common.NewMockGitHandlerGenerator(ctl)
|
||||
GitHandler = mockGitGen
|
||||
mockGitGen.EXPECT().CreateGitHandler(gomock.Any()).Return(mockGit, nil).AnyTimes()
|
||||
|
||||
mockGit.EXPECT().GitClone(gomock.Any(), gomock.Any(), gomock.Any()).Return("origin", nil).AnyTimes()
|
||||
mockGit.EXPECT().GitBranchHead(gomock.Any(), gomock.Any()).Return("head", nil).AnyTimes()
|
||||
mockGit.EXPECT().GitSubmoduleList(gomock.Any(), gomock.Any()).Return(map[string]string{"testRepo": "testing"}, nil).AnyTimes()
|
||||
mockGit.EXPECT().GitExecOrPanic(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
mockGit.EXPECT().GitCatFile(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
mockGit.EXPECT().Close().Return(nil).AnyTimes()
|
||||
|
||||
gitea.EXPECT().GetTimeline(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.TimelineComment{}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(modelPR, nil).AnyTimes()
|
||||
gitea.EXPECT().UpdatePullRequest(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(modelPR, nil).AnyTimes()
|
||||
gitea.EXPECT().SetLabels(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.Label{}, nil).AnyTimes()
|
||||
gitea.EXPECT().SetRepoOptions(gomock.Any(), gomock.Any(), gomock.Any()).Return(&models.Repository{SSHURL: "git@src.opensuse.org:test/prj.git"}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequestReviews(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.PullReview{}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetRepository(gomock.Any(), gomock.Any()).Return(&models.Repository{
|
||||
Owner: &models.User{UserName: "test"},
|
||||
Name: "prjcopy",
|
||||
SSHURL: "git@src.opensuse.org:test/prj.git",
|
||||
}, nil).AnyTimes()
|
||||
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(gomock.Any(), gomock.Any(), gomock.Any()).Return(&models.Repository{SSHURL: "git@src.opensuse.org:test/prj.git"}, nil).AnyTimes()
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(mockCreatePR, nil, true).AnyTimes()
|
||||
gitea.EXPECT().RequestReviews(gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
gitea.EXPECT().FetchMaintainershipDirFile(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
gitea.EXPECT().FetchMaintainershipFile(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
|
||||
if err := pr.Process(modelPR); err != nil {
|
||||
if err := pr.Process(event); err != nil {
|
||||
t.Error("Error PrjGit opened request. Should be no error.", err)
|
||||
}
|
||||
})
|
||||
@@ -158,52 +75,43 @@ func TestOpenPR(t *testing.T) {
|
||||
t.Run("Open PrjGit PR", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
gitea.EXPECT().ResetTimelineCache(gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
|
||||
Gitea = gitea
|
||||
|
||||
event.Repository.Name = "testRepo"
|
||||
pr.config.GitProjectName = "prjcopy#testing"
|
||||
pr.config.GitProjectName = "prjcopy"
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
setupGitForTests(t, git)
|
||||
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(gomock.Any(), gomock.Any(), gomock.Any()).Return(&models.Repository{SSHURL: "git@src.opensuse.org:test/prj.git"}, nil).AnyTimes()
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(mockCreatePR, nil, true).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(modelPR, nil).AnyTimes()
|
||||
gitea.EXPECT().RequestReviews(gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequestReviews(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.PullReview{}, nil).AnyTimes()
|
||||
prjgit := &models.Repository{
|
||||
SSHURL: "./prj",
|
||||
DefaultBranch: "testing",
|
||||
}
|
||||
giteaPR := &models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Repo: &models.Repository{
|
||||
Owner: &models.User{
|
||||
UserName: "test",
|
||||
},
|
||||
Name: "testRepo",
|
||||
},
|
||||
},
|
||||
User: &models.User{
|
||||
UserName: "test",
|
||||
},
|
||||
}
|
||||
// gitea.EXPECT().GetAssociatedPrjGitPR("test", "prjcopy", "test", "testRepo", int64(1)).Return(nil, nil)
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(git, "test", "prjcopy").Return(prjgit, nil)
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(prjgit, gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(giteaPR, nil)
|
||||
gitea.EXPECT().GetPullRequest("test", "testRepo", int64(1)).Return(giteaPR, nil)
|
||||
gitea.EXPECT().RequestReviews(giteaPR, "reviewer1", "reviewer2").Return(nil, nil)
|
||||
gitea.EXPECT().GetPullRequestReviews("test", "testRepo", int64(0)).Return([]*models.PullReview{}, nil)
|
||||
|
||||
gitea.EXPECT().FetchMaintainershipDirFile(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
gitea.EXPECT().FetchMaintainershipFile(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
gitea.EXPECT().FetchMaintainershipDirFile("test", "prjcopy", "branch", "_project").Return(nil, "", repository.NewRepoGetRawFileNotFound())
|
||||
gitea.EXPECT().FetchMaintainershipFile("test", "prjcopy", "branch").Return(nil, "", repository.NewRepoGetRawFileNotFound())
|
||||
|
||||
mockGit := mock_common.NewMockGit(ctl)
|
||||
pr.git = mockGit
|
||||
|
||||
mockGitGen := mock_common.NewMockGitHandlerGenerator(ctl)
|
||||
GitHandler = mockGitGen
|
||||
mockGitGen.EXPECT().CreateGitHandler(gomock.Any()).Return(mockGit, nil).AnyTimes()
|
||||
|
||||
mockGit.EXPECT().GitClone(gomock.Any(), gomock.Any(), gomock.Any()).Return("origin", nil).AnyTimes()
|
||||
mockGit.EXPECT().GitBranchHead(gomock.Any(), gomock.Any()).Return("head", nil).AnyTimes()
|
||||
mockGit.EXPECT().GitSubmoduleList(gomock.Any(), gomock.Any()).Return(map[string]string{"testRepo": "testing"}, nil).AnyTimes()
|
||||
mockGit.EXPECT().GitExecOrPanic(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
mockGit.EXPECT().GitCatFile(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
mockGit.EXPECT().Close().Return(nil).AnyTimes()
|
||||
|
||||
gitea.EXPECT().GetTimeline(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.TimelineComment{}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(modelPR, nil).AnyTimes()
|
||||
gitea.EXPECT().UpdatePullRequest(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(modelPR, nil).AnyTimes()
|
||||
gitea.EXPECT().SetLabels(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.Label{}, nil).AnyTimes()
|
||||
gitea.EXPECT().SetRepoOptions(gomock.Any(), gomock.Any(), gomock.Any()).Return(&models.Repository{SSHURL: "git@src.opensuse.org:test/prj.git"}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequestReviews(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.PullReview{}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetRepository(gomock.Any(), gomock.Any()).Return(&models.Repository{
|
||||
Owner: &models.User{UserName: "test"},
|
||||
Name: "prjcopy",
|
||||
SSHURL: "git@src.opensuse.org:test/prj.git",
|
||||
}, nil).AnyTimes()
|
||||
|
||||
err := pr.Process(modelPR)
|
||||
err := pr.Process(event)
|
||||
if err != nil {
|
||||
t.Error("error:", err)
|
||||
}
|
||||
@@ -212,61 +120,30 @@ func TestOpenPR(t *testing.T) {
|
||||
t.Run("Cannot create prjgit repository", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
gitea.EXPECT().ResetTimelineCache(gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
|
||||
Gitea = gitea
|
||||
|
||||
event.Repository.Name = "testRepo"
|
||||
pr.config.GitProjectName = "prjcopy#testing"
|
||||
pr.config.GitProjectName = "prjcopy"
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
setupGitForTests(t, git)
|
||||
failedErr := errors.New("Returned error here")
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, failedErr).AnyTimes()
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(mockCreatePR, nil, true).AnyTimes()
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(git, "test", "prjcopy").Return(nil, failedErr)
|
||||
|
||||
mockGit := mock_common.NewMockGit(ctl)
|
||||
pr.git = mockGit
|
||||
|
||||
gitea.EXPECT().FetchMaintainershipDirFile(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
mockGitGen := mock_common.NewMockGitHandlerGenerator(ctl)
|
||||
GitHandler = mockGitGen
|
||||
mockGitGen.EXPECT().CreateGitHandler(gomock.Any()).Return(mockGit, nil).AnyTimes()
|
||||
|
||||
mockGit.EXPECT().GitClone(gomock.Any(), gomock.Any(), gomock.Any()).Return("origin", nil).AnyTimes()
|
||||
mockGit.EXPECT().GitBranchHead(gomock.Any(), gomock.Any()).Return("head", nil).AnyTimes()
|
||||
mockGit.EXPECT().GitSubmoduleList(gomock.Any(), gomock.Any()).Return(map[string]string{"testRepo": "testing"}, nil).AnyTimes()
|
||||
mockGit.EXPECT().GitExecOrPanic(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
mockGit.EXPECT().GitCatFile(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
gitea.EXPECT().RequestReviews(gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
mockGit.EXPECT().Close().Return(nil).AnyTimes()
|
||||
|
||||
gitea.EXPECT().GetTimeline(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.TimelineComment{}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(modelPR, nil).AnyTimes()
|
||||
gitea.EXPECT().UpdatePullRequest(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(modelPR, nil).AnyTimes()
|
||||
gitea.EXPECT().SetLabels(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.Label{}, nil).AnyTimes()
|
||||
gitea.EXPECT().SetRepoOptions(gomock.Any(), gomock.Any(), gomock.Any()).Return(&models.Repository{SSHURL: "git@src.opensuse.org:test/prj.git"}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequestReviews(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.PullReview{}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetRepository(gomock.Any(), gomock.Any()).Return(&models.Repository{
|
||||
Owner: &models.User{UserName: "test"},
|
||||
Name: "prjcopy",
|
||||
SSHURL: "git@src.opensuse.org:test/prj.git",
|
||||
}, nil).AnyTimes()
|
||||
|
||||
err := pr.Process(modelPR)
|
||||
if err != nil {
|
||||
err := pr.Process(event)
|
||||
if err != failedErr {
|
||||
t.Error("error:", err)
|
||||
}
|
||||
})
|
||||
t.Run("Cannot create PR", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
gitea.EXPECT().ResetTimelineCache(gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
|
||||
Gitea = gitea
|
||||
|
||||
event.Repository.Name = "testRepo"
|
||||
pr.config.GitProjectName = "prjcopy#testing"
|
||||
pr.config.GitProjectName = "prjcopy"
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
setupGitForTests(t, git)
|
||||
@@ -275,37 +152,10 @@ func TestOpenPR(t *testing.T) {
|
||||
DefaultBranch: "testing",
|
||||
}
|
||||
failedErr := errors.New("Returned error here")
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(gomock.Any(), gomock.Any(), gomock.Any()).Return(prjgit, nil).AnyTimes()
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, failedErr, false)
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(git, "test", "prjcopy").Return(prjgit, nil)
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(prjgit, gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, failedErr)
|
||||
|
||||
mockGit := mock_common.NewMockGit(ctl)
|
||||
pr.git = mockGit
|
||||
|
||||
gitea.EXPECT().RequestReviews(gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
mockGitGen := mock_common.NewMockGitHandlerGenerator(ctl)
|
||||
GitHandler = mockGitGen
|
||||
mockGitGen.EXPECT().CreateGitHandler(gomock.Any()).Return(mockGit, nil).AnyTimes()
|
||||
|
||||
mockGit.EXPECT().GitClone(gomock.Any(), gomock.Any(), gomock.Any()).Return("origin", nil).AnyTimes()
|
||||
mockGit.EXPECT().GitBranchHead(gomock.Any(), gomock.Any()).Return("head", nil).AnyTimes()
|
||||
mockGit.EXPECT().GitSubmoduleList(gomock.Any(), gomock.Any()).Return(map[string]string{"testRepo": "testing"}, nil).AnyTimes()
|
||||
mockGit.EXPECT().GitExecOrPanic(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
mockGit.EXPECT().GitCatFile(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
mockGit.EXPECT().Close().Return(nil).AnyTimes()
|
||||
|
||||
gitea.EXPECT().GetTimeline(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.TimelineComment{}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(modelPR, nil).AnyTimes()
|
||||
gitea.EXPECT().UpdatePullRequest(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(modelPR, nil).AnyTimes()
|
||||
gitea.EXPECT().SetLabels(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.Label{}, nil).AnyTimes()
|
||||
gitea.EXPECT().SetRepoOptions(gomock.Any(), gomock.Any(), gomock.Any()).Return(&models.Repository{SSHURL: "git@src.opensuse.org:test/prj.git"}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequestReviews(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.PullReview{}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetRepository(gomock.Any(), gomock.Any()).Return(&models.Repository{
|
||||
Owner: &models.User{UserName: "test"},
|
||||
Name: "prjcopy",
|
||||
SSHURL: "git@src.opensuse.org:test/prj.git",
|
||||
}, nil).AnyTimes()
|
||||
|
||||
err := pr.Process(modelPR)
|
||||
err := pr.Process(event)
|
||||
if err != failedErr {
|
||||
t.Error("error:", err)
|
||||
}
|
||||
@@ -313,54 +163,44 @@ func TestOpenPR(t *testing.T) {
|
||||
t.Run("Open PrjGit PR", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
gitea.EXPECT().ResetTimelineCache(gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
|
||||
Gitea = gitea
|
||||
|
||||
event.Repository.Name = "testRepo"
|
||||
pr.config.GitProjectName = "prjcopy#testing"
|
||||
pr.config.GitProjectName = "prjcopy"
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
setupGitForTests(t, git)
|
||||
prjgit := &models.Repository{
|
||||
Name: "SomeRepo",
|
||||
Owner: &models.User{
|
||||
UserName: "org",
|
||||
},
|
||||
SSHURL: "./prj",
|
||||
DefaultBranch: "testing",
|
||||
}
|
||||
giteaPR := &models.PullRequest{
|
||||
Base: &models.PRBranchInfo{
|
||||
Repo: prjgit,
|
||||
},
|
||||
Index: 13,
|
||||
User: &models.User{
|
||||
UserName: "test",
|
||||
},
|
||||
}
|
||||
failedErr := errors.New("Returned error here")
|
||||
// gitea.EXPECT().GetAssociatedPrjGitPR("test", "prjcopy", "test", "testRepo", int64(1)).Return(nil, nil)
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(git, "test", "prjcopy").Return(prjgit, nil)
|
||||
gitea.EXPECT().GetPullRequest("test", "testRepo", int64(1)).Return(giteaPR, nil)
|
||||
gitea.EXPECT().GetPullRequestReviews("org", "SomeRepo", int64(13)).Return([]*models.PullReview{}, nil)
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(prjgit, gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(giteaPR, nil)
|
||||
gitea.EXPECT().RequestReviews(giteaPR, "reviewer1", "reviewer2").Return(nil, failedErr)
|
||||
|
||||
gitea.EXPECT().CreateRepositoryIfNotExist(gomock.Any(), gomock.Any(), gomock.Any()).Return(&models.Repository{SSHURL: "git@src.opensuse.org:test/prj.git"}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(modelPR, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequestReviews(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.PullReview{}, nil).AnyTimes()
|
||||
gitea.EXPECT().CreatePullRequestIfNotExist(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(mockCreatePR, nil, true).AnyTimes()
|
||||
gitea.EXPECT().RequestReviews(gomock.Any(), gomock.Any()).Return(nil, failedErr).AnyTimes()
|
||||
gitea.EXPECT().FetchMaintainershipDirFile("test", "prjcopy", "branch", "_project").Return(nil, "", repository.NewRepoGetRawFileNotFound())
|
||||
gitea.EXPECT().FetchMaintainershipFile("test", "prjcopy", "branch").Return(nil, "", repository.NewRepoGetRawFileNotFound())
|
||||
|
||||
gitea.EXPECT().FetchMaintainershipDirFile(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
gitea.EXPECT().FetchMaintainershipFile(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
|
||||
mockGit := mock_common.NewMockGit(ctl)
|
||||
pr.git = mockGit
|
||||
|
||||
mockGitGen := mock_common.NewMockGitHandlerGenerator(ctl)
|
||||
GitHandler = mockGitGen
|
||||
mockGitGen.EXPECT().CreateGitHandler(gomock.Any()).Return(mockGit, nil).AnyTimes()
|
||||
|
||||
mockGit.EXPECT().GitClone(gomock.Any(), gomock.Any(), gomock.Any()).Return("origin", nil).AnyTimes()
|
||||
mockGit.EXPECT().GitBranchHead(gomock.Any(), gomock.Any()).Return("head", nil).AnyTimes()
|
||||
mockGit.EXPECT().GitSubmoduleList(gomock.Any(), gomock.Any()).Return(map[string]string{"testRepo": "testing"}, nil).AnyTimes()
|
||||
mockGit.EXPECT().GitExecOrPanic(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
mockGit.EXPECT().GitCatFile(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
mockGit.EXPECT().Close().Return(nil).AnyTimes()
|
||||
|
||||
gitea.EXPECT().GetTimeline(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.TimelineComment{}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(modelPR, nil).AnyTimes()
|
||||
gitea.EXPECT().UpdatePullRequest(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(modelPR, nil).AnyTimes()
|
||||
gitea.EXPECT().SetLabels(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.Label{}, nil).AnyTimes()
|
||||
gitea.EXPECT().SetRepoOptions(gomock.Any(), gomock.Any(), gomock.Any()).Return(&models.Repository{SSHURL: "git@src.opensuse.org:test/prj.git"}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequestReviews(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.PullReview{}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetRepository(gomock.Any(), gomock.Any()).Return(&models.Repository{
|
||||
Owner: &models.User{UserName: "test"},
|
||||
Name: "prjcopy",
|
||||
SSHURL: "git@src.opensuse.org:test/prj.git",
|
||||
}, nil).AnyTimes()
|
||||
|
||||
err := pr.Process(modelPR)
|
||||
if err != nil {
|
||||
err := pr.Process(event)
|
||||
if errors.Unwrap(err) != failedErr {
|
||||
t.Error("error:", err)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
package main
|
||||
|
||||
/*
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"go.uber.org/mock/gomock"
|
||||
@@ -11,147 +16,217 @@ import (
|
||||
)
|
||||
|
||||
func TestSyncPR(t *testing.T) {
|
||||
config := &common.AutogitConfig{
|
||||
Reviewers: []string{"reviewer1", "reviewer2"},
|
||||
Branch: "testing",
|
||||
Organization: "test-org",
|
||||
GitProjectName: "test-prj#testing",
|
||||
pr := PRProcessor{
|
||||
config: &common.AutogitConfig{
|
||||
Reviewers: []string{"reviewer1", "reviewer2"},
|
||||
Branch: "testing",
|
||||
Organization: "test",
|
||||
GitProjectName: "prj",
|
||||
},
|
||||
}
|
||||
|
||||
git := &common.GitHandlerImpl{
|
||||
GitCommiter: "tester",
|
||||
GitEmail: "test@suse.com",
|
||||
GitPath: t.TempDir(),
|
||||
}
|
||||
|
||||
processor := &PRProcessor{
|
||||
config: config,
|
||||
git: git,
|
||||
event := &common.PullRequestWebhookEvent{
|
||||
Action: "syncronized",
|
||||
Number: 42,
|
||||
Pull_Request: &common.PullRequest{
|
||||
Number: 42,
|
||||
Base: common.Head{
|
||||
Ref: "branch",
|
||||
Sha: "8a6a69a4232cabda04a4d9563030aa888ff5482f75aa4c6519da32a951a072e2",
|
||||
Repo: &common.Repository{
|
||||
Name: "testRepo",
|
||||
Owner: &common.Organization{
|
||||
Username: pr.config.Organization,
|
||||
},
|
||||
Default_Branch: "main1",
|
||||
},
|
||||
},
|
||||
Head: common.Head{
|
||||
Ref: "branch",
|
||||
Sha: "11eb36d5a58d7bb376cac59ac729a1986c6a7bfc63e7818e14382f545ccda985",
|
||||
Repo: &common.Repository{
|
||||
Name: "testRepo",
|
||||
Default_Branch: "main1",
|
||||
},
|
||||
},
|
||||
},
|
||||
Repository: &common.Repository{
|
||||
Owner: &common.Organization{
|
||||
Username: pr.config.Organization,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
modelPR := &models.PullRequest{
|
||||
Index: 42,
|
||||
Body: "PR: test-org/test-prj#24",
|
||||
Body: "PR: test/prj#24",
|
||||
Base: &models.PRBranchInfo{
|
||||
Ref: "main",
|
||||
Ref: "branch",
|
||||
Sha: "8a6a69a4232cabda04a4d9563030aa888ff5482f75aa4c6519da32a951a072e2",
|
||||
Repo: &models.Repository{
|
||||
Name: "test-repo",
|
||||
Owner: &models.User{UserName: "test-org"},
|
||||
DefaultBranch: "main",
|
||||
Name: "testRepo",
|
||||
Owner: &models.User{
|
||||
UserName: "test",
|
||||
},
|
||||
DefaultBranch: "main1",
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Ref: "branch",
|
||||
Sha: "11eb36d5a58d7bb376cac59ac729a1986c6a7bfc63e7818e14382f545ccda985",
|
||||
Repo: &models.Repository{
|
||||
Name: "test-repo",
|
||||
Owner: &models.User{UserName: "test-org"},
|
||||
DefaultBranch: "main",
|
||||
Name: "testRepo",
|
||||
Owner: &models.User{
|
||||
UserName: "test",
|
||||
},
|
||||
DefaultBranch: "main1",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
PrjGitPR := &models.PullRequest{
|
||||
Title: "some pull request",
|
||||
Body: "PR: test-org/test-repo#42",
|
||||
Body: "PR: test/testRepo#42",
|
||||
Index: 24,
|
||||
Base: &models.PRBranchInfo{
|
||||
Ref: "testing",
|
||||
Repo: &models.Repository{
|
||||
Name: "test-prj",
|
||||
Owner: &models.User{UserName: "test-org"},
|
||||
SSHURL: "url",
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Name: "PR_test-repo#42",
|
||||
Name: "testing",
|
||||
Sha: "db8adab91edb476b9762097d10c6379aa71efd6b60933a1c0e355ddacf419a95",
|
||||
Repo: &models.Repository{
|
||||
SSHURL: "url",
|
||||
SSHURL: "./prj",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
t.Run("PR_sync_request_against_PrjGit_==_no_action", func(t *testing.T) {
|
||||
git := &common.GitHandlerImpl{
|
||||
GitCommiter: "tester",
|
||||
GitEmail: "test@suse.com",
|
||||
}
|
||||
|
||||
t.Run("PR sync request against PrjGit == no action", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
defer ctl.Finish()
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
gitea.EXPECT().ResetTimelineCache(gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
Gitea = gitea
|
||||
Gitea = mock_common.NewMockGitea(ctl)
|
||||
|
||||
// Common expectations for FetchPRSet and downstream checks
|
||||
gitea.EXPECT().GetPullRequestReviews(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.PullReview{}, nil).AnyTimes()
|
||||
gitea.EXPECT().FetchMaintainershipFile(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
gitea.EXPECT().FetchMaintainershipDirFile(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
gitea.EXPECT().SetRepoOptions(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
prjGitRepoPR := &models.PullRequest{
|
||||
Index: 100,
|
||||
Base: &models.PRBranchInfo{
|
||||
Ref: "testing",
|
||||
Repo: &models.Repository{
|
||||
Name: "test-prj",
|
||||
Owner: &models.User{UserName: "test-org"},
|
||||
},
|
||||
},
|
||||
Head: &models.PRBranchInfo{
|
||||
Ref: "branch",
|
||||
},
|
||||
pr.config.GitProjectName = "testRepo"
|
||||
event.Repository.Name = "testRepo"
|
||||
|
||||
if err := pr.Process(event); err != nil {
|
||||
t.Error("Error PrjGit sync request. Should be no error.", err)
|
||||
}
|
||||
})
|
||||
|
||||
gitea.EXPECT().GetPullRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(prjGitRepoPR, nil).AnyTimes()
|
||||
gitea.EXPECT().GetTimeline(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.TimelineComment{}, nil).AnyTimes()
|
||||
t.Run("Missing submodule in prjgit", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
mock := mock_common.NewMockGitea(ctl)
|
||||
|
||||
if err := processor.Process(prjGitRepoPR); err != nil {
|
||||
t.Errorf("Expected nil error for PrjGit sync request, got %v", err)
|
||||
pr.gitea = mock
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
pr.config.GitProjectName = "prjGit"
|
||||
event.Repository.Name = "testRepo"
|
||||
|
||||
setupGitForTests(t, git)
|
||||
|
||||
oldSha := PrjGitPR.Head.Sha
|
||||
defer func() { PrjGitPR.Head.Sha = oldSha }()
|
||||
PrjGitPR.Head.Sha = "ab8adab91edb476b9762097d10c6379aa71efd6b60933a1c0e355ddacf419a95"
|
||||
|
||||
mock.EXPECT().GetPullRequest(pr.config.Organization, "testRepo", event.Pull_Request.Number).Return(modelPR, nil)
|
||||
mock.EXPECT().GetPullRequest(pr.config.Organization, "prj", int64(24)).Return(PrjGitPR, nil)
|
||||
|
||||
err := pr.Process(event)
|
||||
|
||||
if err == nil || err.Error() != "Cannot fetch submodule commit id in prjgit for 'testRepo'" {
|
||||
t.Error("Invalid error received.", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Missing PrjGit PR for the sync", func(t *testing.T) {
|
||||
ctl := gomock.NewController(t)
|
||||
defer ctl.Finish()
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
gitea.EXPECT().ResetTimelineCache(gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
Gitea = gitea
|
||||
mock := mock_common.NewMockGitea(ctl)
|
||||
|
||||
gitea.EXPECT().GetPullRequestReviews(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.PullReview{}, nil).AnyTimes()
|
||||
gitea.EXPECT().FetchMaintainershipFile(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
gitea.EXPECT().FetchMaintainershipDirFile(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
gitea.EXPECT().SetRepoOptions(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
pr.gitea = mock
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
gitea.EXPECT().GetTimeline(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.TimelineComment{}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, errors.New("not found")).AnyTimes()
|
||||
pr.config.GitProjectName = "prjGit"
|
||||
event.Repository.Name = "tester"
|
||||
|
||||
err := processor.Process(modelPR)
|
||||
// It should fail because it can't find the project PR linked in body
|
||||
if err == nil {
|
||||
t.Errorf("Expected error for missing project PR, got nil")
|
||||
setupGitForTests(t, git)
|
||||
|
||||
expectedErr := errors.New("Missing PR should throw error")
|
||||
mock.EXPECT().GetPullRequest(config.Organization, "tester", event.Pull_Request.Number).Return(modelPR, expectedErr)
|
||||
|
||||
err := pr.Process(event, git, config)
|
||||
|
||||
if err == nil || errors.Unwrap(err) != expectedErr {
|
||||
t.Error("Invalid error received.", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("PR sync", func(t *testing.T) {
|
||||
var b bytes.Buffer
|
||||
w := log.Writer()
|
||||
log.SetOutput(&b)
|
||||
defer log.SetOutput(w)
|
||||
|
||||
ctl := gomock.NewController(t)
|
||||
defer ctl.Finish()
|
||||
gitea := mock_common.NewMockGitea(ctl)
|
||||
gitea.EXPECT().ResetTimelineCache(gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes()
|
||||
Gitea = gitea
|
||||
mock := mock_common.NewMockGitea(ctl)
|
||||
|
||||
Gitea = mock
|
||||
git.GitPath = t.TempDir()
|
||||
|
||||
pr.config.GitProjectName = "prjGit"
|
||||
event.Repository.Name = "testRepo"
|
||||
|
||||
setupGitForTests(t, git)
|
||||
|
||||
gitea.EXPECT().GetPullRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(PrjGitPR, nil).AnyTimes()
|
||||
gitea.EXPECT().GetTimeline(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.TimelineComment{}, nil).AnyTimes()
|
||||
gitea.EXPECT().GetPullRequestReviews(gomock.Any(), gomock.Any(), gomock.Any()).Return([]*models.PullReview{}, nil).AnyTimes()
|
||||
gitea.EXPECT().FetchMaintainershipFile(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
gitea.EXPECT().FetchMaintainershipDirFile(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, "", nil).AnyTimes()
|
||||
gitea.EXPECT().SetRepoOptions(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
// mock.EXPECT().GetAssociatedPrjGitPR(event).Return(PrjGitPR, nil)
|
||||
mock.EXPECT().GetPullRequest(pr.config.Organization, "testRepo", event.Pull_Request.Number).Return(modelPR, nil)
|
||||
mock.EXPECT().GetPullRequest(pr.config.Organization, "prj", int64(24)).Return(PrjGitPR, nil)
|
||||
|
||||
// For UpdatePrjGitPR
|
||||
gitea.EXPECT().UpdatePullRequest(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes()
|
||||
err := pr.Process(event)
|
||||
|
||||
err := processor.Process(modelPR)
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
t.Error("Invalid error received.", err)
|
||||
t.Error(b.String())
|
||||
}
|
||||
|
||||
// check that we actually created the branch in the prjgit
|
||||
id, ok := git.GitSubmoduleCommitId("prj", "testRepo", "c097b9d1d69892d0ef2afa66d4e8abf0a1612c6f95d271a6e15d6aff1ad2854c")
|
||||
if id != "11eb36d5a58d7bb376cac59ac729a1986c6a7bfc63e7818e14382f545ccda985" || !ok {
|
||||
t.Error("Failed creating PR")
|
||||
t.Error(b.String())
|
||||
}
|
||||
|
||||
// does nothing on next sync of already synced data -- PR is updated
|
||||
os.RemoveAll(path.Join(git.GitPath, common.DefaultGitPrj))
|
||||
|
||||
mock.EXPECT().GetPullRequest(pr.config.Organization, "testRepo", event.Pull_Request.Number).Return(modelPR, nil)
|
||||
mock.EXPECT().GetPullRequest(pr.config.Organization, "prj", int64(24)).Return(PrjGitPR, nil)
|
||||
err = pr.Process(event)
|
||||
|
||||
if err != nil {
|
||||
t.Error("Invalid error received.", err)
|
||||
t.Error(b.String())
|
||||
}
|
||||
|
||||
// check that we actually created the branch in the prjgit
|
||||
id, ok = git.GitSubmoduleCommitId("prj", "testRepo", "c097b9d1d69892d0ef2afa66d4e8abf0a1612c6f95d271a6e15d6aff1ad2854c")
|
||||
if id != "11eb36d5a58d7bb376cac59ac729a1986c6a7bfc63e7818e14382f545ccda985" || !ok {
|
||||
t.Error("Failed creating PR")
|
||||
t.Error(b.String())
|
||||
}
|
||||
|
||||
if id, err := git.GitBranchHead("prj", "PR_testRepo#42"); id != "c097b9d1d69892d0ef2afa66d4e8abf0a1612c6f95d271a6e15d6aff1ad2854c" || err != nil {
|
||||
t.Error("no branch?", err)
|
||||
t.Error(b.String())
|
||||
}
|
||||
|
||||
if !strings.Contains(b.String(), "commitID already match - nothing to do") {
|
||||
// os.CopyFS("/tmp/test", os.DirFS(git.GitPath))
|
||||
t.Log(b.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user